mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 18:11:28 +00:00
Compare commits
14 Commits
test-codec
...
arky/add-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b29709b4d7 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 |
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||
time: 2023-06-21T00:57:52.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
PR: 7912
|
||||
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Fixed double-underline
|
||||
time: 2023-06-25T14:27:31.231253719+08:00
|
||||
custom:
|
||||
Author: lllong33
|
||||
Issue: "5301"
|
||||
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||
time: 2023-07-20T16:15:13.761813-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "7694"
|
||||
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Stop detecting materialization macros based on macro name
|
||||
time: 2023-07-20T17:01:12.496238-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6231"
|
||||
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||
time: 2023-07-20T17:24:22.969951-07:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "6653"
|
||||
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Improve handling of CTE injection with ephemeral models
|
||||
time: 2023-07-26T10:44:48.888451-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "8213"
|
||||
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: A way to control maxBytes for a single dbt.log file
|
||||
time: 2023-07-24T15:06:54.263822-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8199"
|
||||
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Under the Hood
|
||||
body: Ref expressions with version can now be processed by the latest version of the
|
||||
high-performance dbt-extractor library.
|
||||
time: 2023-07-25T10:26:09.902878-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "7688"
|
||||
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
name: 🛠️ Implementation
|
||||
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
title: "[<project>] <title>"
|
||||
labels: ["user_docs"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Housekeeping
|
||||
description: >
|
||||
A couple friendly reminders:
|
||||
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||
options:
|
||||
- label: I am a maintainer of dbt-core
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Short description
|
||||
description: |
|
||||
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Acceptance critera
|
||||
description: |
|
||||
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Context
|
||||
description: |
|
||||
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||
validations:
|
||||
validations:
|
||||
required: false
|
||||
66
.github/workflows/main.yml
vendored
66
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python integration testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
name: code-quality
|
||||
@@ -106,23 +111,55 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
include: ${{ steps.generate-include.outputs.include }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||
done
|
||||
done
|
||||
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||
INCLUDE_GROUPS+="]"
|
||||
echo "include=${INCLUDE_GROUPS}"
|
||||
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||
|
||||
integration:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
name: (${{ matrix.split-group }}) integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- python-version: 3.8
|
||||
os: windows-latest
|
||||
- python-version: 3.8
|
||||
os: macos-latest
|
||||
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
env:
|
||||
TOXENV: integration
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
@@ -165,6 +202,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: tox -- --ddtrace
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
- name: Get current date
|
||||
if: always()
|
||||
@@ -185,6 +224,15 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
integration-report:
|
||||
name: integration test suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: integration
|
||||
steps:
|
||||
- name: "[Notification] Integration test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Integration test suite passes""
|
||||
|
||||
build:
|
||||
name: build packages
|
||||
|
||||
|
||||
@@ -18,11 +18,41 @@ on:
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# top-level adjustments can be made here
|
||||
env:
|
||||
# number of parallel processes to spawn for python testing
|
||||
PYTHON_INTEGRATION_TEST_WORKERS: ${{ vars.PYTHON_INTEGRATION_TEST_WORKERS }}
|
||||
|
||||
jobs:
|
||||
integration-metadata:
|
||||
name: integration test metadata generation
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||
|
||||
steps:
|
||||
- name: generate split-groups
|
||||
id: generate-split-groups
|
||||
run: |
|
||||
MATRIX_JSON="["
|
||||
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||
done
|
||||
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||
MATRIX_JSON+="]"
|
||||
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
# run the performance measurements on the current or default branch
|
||||
test-schema:
|
||||
name: Test Log Schema
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- integration-metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
env:
|
||||
# turns warnings into errors
|
||||
RUSTFLAGS: "-D warnings"
|
||||
@@ -65,3 +95,14 @@ jobs:
|
||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||
- name: Run integration tests
|
||||
run: tox -e integration -- -nauto
|
||||
env:
|
||||
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||
|
||||
test-schema-report:
|
||||
name: Log Schema Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-schema
|
||||
steps:
|
||||
- name: "[Notification] Log test suite passes"
|
||||
run: |
|
||||
echo "::notice title="Log test suite passes""
|
||||
|
||||
@@ -37,7 +37,7 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.3.0
|
||||
rev: v1.4.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
# N.B.: Mypy is... a bit fragile.
|
||||
|
||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
||||
@p.enable_legacy_logger
|
||||
@p.fail_fast
|
||||
@p.log_cache_events
|
||||
@p.log_file_max_bytes
|
||||
@p.log_format
|
||||
@p.log_format_file
|
||||
@p.log_level
|
||||
|
||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
||||
default=True,
|
||||
)
|
||||
|
||||
log_file_max_bytes = click.option(
|
||||
"--log-file-max-bytes",
|
||||
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||
default=10 * 1024 * 1024, # 10mb
|
||||
type=click.INT,
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
log_path = click.option(
|
||||
"--log-path",
|
||||
envvar="DBT_LOG_PATH",
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import networkx as nx # type: ignore
|
||||
import os
|
||||
import pickle
|
||||
import sqlparse
|
||||
|
||||
from collections import defaultdict
|
||||
from typing import List, Dict, Any, Tuple, Optional
|
||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
||||
from dbt.events.format import pluralize
|
||||
import dbt.tracking
|
||||
import dbt.task.list as list_task
|
||||
import sqlparse
|
||||
|
||||
graph_file_name = "graph.gpickle"
|
||||
|
||||
@@ -378,16 +378,16 @@ class Compiler:
|
||||
|
||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
injected_sql = inject_ctes_into_sql(
|
||||
model.compiled_code,
|
||||
prepended_ctes,
|
||||
)
|
||||
# Check again before updating for multi-threading
|
||||
if not model.extra_ctes_injected:
|
||||
model.extra_ctes_injected = True
|
||||
model._pre_injected_sql = model.compiled_code
|
||||
model.compiled_code = injected_sql
|
||||
model.extra_ctes = prepended_ctes
|
||||
model.extra_ctes_injected = True
|
||||
|
||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||
return model, model.extra_ctes
|
||||
@@ -523,6 +523,12 @@ class Compiler:
|
||||
the node's raw_code into compiled_code, and then calls the
|
||||
recursive method to "prepend" the ctes.
|
||||
"""
|
||||
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||
from sqlparse.lexer import Lexer # type: ignore
|
||||
|
||||
if hasattr(Lexer, "get_default_instance"):
|
||||
Lexer.get_default_instance()
|
||||
|
||||
node = self._compile_code(node, manifest, extra_context)
|
||||
|
||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||
|
||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
||||
use_colors: bool = False
|
||||
output_stream: Optional[TextIO] = None
|
||||
output_file_name: Optional[str] = None
|
||||
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||
logger: Optional[Any] = None
|
||||
|
||||
|
||||
@@ -100,7 +101,7 @@ class _Logger:
|
||||
file_handler = RotatingFileHandler(
|
||||
filename=str(config.output_file_name),
|
||||
encoding="utf8",
|
||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
||||
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||
backupCount=5,
|
||||
)
|
||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||
|
||||
@@ -68,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||
EVENT_MANAGER.add_logger(
|
||||
_get_logfile_config(
|
||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
||||
log_file,
|
||||
flags.USE_COLORS_FILE,
|
||||
log_file_format,
|
||||
log_level_file,
|
||||
flags.LOG_FILE_MAX_BYTES,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -117,7 +121,11 @@ def _stdout_filter(
|
||||
|
||||
|
||||
def _get_logfile_config(
|
||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
||||
log_path: str,
|
||||
use_colors: bool,
|
||||
line_format: LineFormat,
|
||||
level: EventLevel,
|
||||
log_file_max_bytes: int,
|
||||
) -> LoggerConfig:
|
||||
return LoggerConfig(
|
||||
name="file_log",
|
||||
@@ -127,6 +135,7 @@ def _get_logfile_config(
|
||||
scrubber=env_scrubber,
|
||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||
output_file_name=log_path,
|
||||
output_file_max_bytes=log_file_max_bytes,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -63,3 +63,12 @@
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro get_relations() %}
|
||||
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_relations() %}
|
||||
{{ exceptions.raise_not_implemented(
|
||||
'get_relations macro not implemented for adapter '+adapter.type()) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -32,9 +32,14 @@
|
||||
{%- endcall %}
|
||||
|
||||
-- cleanup
|
||||
{% if existing_relation is not none %}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
|
||||
@@ -44,9 +44,14 @@
|
||||
|
||||
-- cleanup
|
||||
-- move the existing view out of the way
|
||||
{% if existing_relation is not none %}
|
||||
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||
since the variable was first set. */
|
||||
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||
{% if existing_relation is not none %}
|
||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
{% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}
|
||||
|
||||
@@ -81,7 +81,7 @@ class MacroParser(BaseParser[Macro]):
|
||||
name: str = macro.name.replace(MACRO_PREFIX, "")
|
||||
node = self.parse_macro(block, base_node, name)
|
||||
# get supported_languages for materialization macro
|
||||
if "materialization" in name:
|
||||
if block.block_type_name == "materialization":
|
||||
node.supported_languages = jinja.get_supported_languages(macro)
|
||||
yield node
|
||||
|
||||
|
||||
@@ -497,12 +497,10 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
||||
# set refs and sources on the node object
|
||||
refs: List[RefArgs] = []
|
||||
for ref in statically_parsed["refs"]:
|
||||
if len(ref) == 1:
|
||||
package, name = None, ref[0]
|
||||
else:
|
||||
package, name = ref
|
||||
|
||||
refs.append(RefArgs(package=package, name=name))
|
||||
name = ref.get("name")
|
||||
package = ref.get("package")
|
||||
version = ref.get("version")
|
||||
refs.append(RefArgs(name, package, version))
|
||||
|
||||
node.refs += refs
|
||||
node.sources += statically_parsed["sources"]
|
||||
|
||||
1
core/dbt/tests/fixtures/project.py
vendored
1
core/dbt/tests/fixtures/project.py
vendored
@@ -502,6 +502,7 @@ def project(
|
||||
DEBUG=False,
|
||||
LOG_CACHE_EVENTS=False,
|
||||
QUIET=False,
|
||||
LOG_FILE_MAX_BYTES=1000000,
|
||||
)
|
||||
setup_event_logger(log_flags)
|
||||
orig_cwd = os.getcwd()
|
||||
|
||||
@@ -17,6 +17,7 @@ from pathlib import PosixPath, WindowsPath
|
||||
|
||||
from contextlib import contextmanager
|
||||
from dbt.events.types import RetryExternalCall, RecordRetryException
|
||||
from dbt.helper_types import WarnErrorOptions
|
||||
from dbt import flags
|
||||
from enum import Enum
|
||||
from typing_extensions import Protocol
|
||||
@@ -601,6 +602,7 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0):
|
||||
except (
|
||||
requests.exceptions.RequestException,
|
||||
ReadError,
|
||||
EOFError,
|
||||
) as exc:
|
||||
if attempt <= max_attempts - 1:
|
||||
dbt.events.functions.fire_event(RecordRetryException(exc=str(exc)))
|
||||
@@ -654,6 +656,9 @@ def args_to_dict(args):
|
||||
# this was required for a test case
|
||||
if isinstance(var_args[key], PosixPath) or isinstance(var_args[key], WindowsPath):
|
||||
var_args[key] = str(var_args[key])
|
||||
if isinstance(var_args[key], WarnErrorOptions):
|
||||
var_args[key] = var_args[key].to_dict()
|
||||
|
||||
dict_args[key] = var_args[key]
|
||||
return dict_args
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ setup(
|
||||
"sqlparse>=0.2.3",
|
||||
# ----
|
||||
# These are major-version-0 packages also maintained by dbt-labs. Accept patches.
|
||||
"dbt-extractor~=0.4.1",
|
||||
"dbt-extractor~=0.5.0",
|
||||
"hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema
|
||||
"minimal-snowplow-tracker~=0.0.2",
|
||||
# DSI is under active development, so we're pinning to specific dev versions for now.
|
||||
|
||||
@@ -6,7 +6,7 @@ flake8
|
||||
flaky
|
||||
freezegun==0.3.12
|
||||
ipdb
|
||||
mypy==1.3.0
|
||||
mypy==1.4.0
|
||||
pip-tools
|
||||
pre-commit
|
||||
protobuf>=4.0.0
|
||||
@@ -16,7 +16,9 @@ pytest-csv
|
||||
pytest-dotenv
|
||||
pytest-logbook
|
||||
pytest-mock
|
||||
pytest-split
|
||||
pytest-xdist
|
||||
python-dev-tools
|
||||
sphinx
|
||||
tox>=3.13
|
||||
twine
|
||||
|
||||
@@ -20,8 +20,7 @@ from dbt.exceptions import (
|
||||
import dbt.utils
|
||||
|
||||
|
||||
# note that this isn't an adapter macro, so just a single underscore
|
||||
GET_RELATIONS_MACRO_NAME = "postgres_get_relations"
|
||||
GET_RELATIONS_MACRO_NAME = "postgres__get_relations"
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{% macro postgres_get_relations () -%}
|
||||
{% macro postgres__get_relations() -%}
|
||||
|
||||
{#
|
||||
-- in pg_depend, objid is the dependent, refobjid is the referenced object
|
||||
@@ -74,3 +74,7 @@
|
||||
|
||||
{{ return(load_result('relations').table) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro postgres_get_relations() %}
|
||||
{{ return(postgres__get_relations()) }}
|
||||
{% endmacro %}
|
||||
|
||||
@@ -4,6 +4,12 @@ models__dep_macro = """
|
||||
}}
|
||||
"""
|
||||
|
||||
models__materialization_macro = """
|
||||
{{
|
||||
materialization_macro()
|
||||
}}
|
||||
"""
|
||||
|
||||
models__with_undefined_macro = """
|
||||
{{ dispatch_to_nowhere() }}
|
||||
select 1 as id
|
||||
@@ -75,6 +81,12 @@ macros__my_macros = """
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__named_materialization = """
|
||||
{% macro materialization_macro() %}
|
||||
select 1 as foo
|
||||
{% endmacro %}
|
||||
"""
|
||||
|
||||
macros__no_default_macros = """
|
||||
{% macro do_something2(foo2, bar2) %}
|
||||
|
||||
|
||||
@@ -20,12 +20,14 @@ from tests.functional.macros.fixtures import (
|
||||
models__override_get_columns_macros,
|
||||
models__deprecated_adapter_macro_model,
|
||||
models__incorrect_dispatch,
|
||||
models__materialization_macro,
|
||||
macros__my_macros,
|
||||
macros__no_default_macros,
|
||||
macros__override_get_columns_macros,
|
||||
macros__package_override_get_columns_macros,
|
||||
macros__deprecated_adapter_macro,
|
||||
macros__incorrect_dispatch,
|
||||
macros__named_materialization,
|
||||
)
|
||||
|
||||
|
||||
@@ -78,6 +80,21 @@ class TestMacros:
|
||||
check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"])
|
||||
|
||||
|
||||
class TestMacrosNamedMaterialization:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
return {
|
||||
"models_materialization_macro.sql": models__materialization_macro,
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="class")
|
||||
def macros(self):
|
||||
return {"macros_named_materialization.sql": macros__named_materialization}
|
||||
|
||||
def test_macro_with_materialization_in_name_works(self, project):
|
||||
run_dbt(expect_pass=True)
|
||||
|
||||
|
||||
class TestInvalidMacros:
|
||||
@pytest.fixture(scope="class")
|
||||
def models(self):
|
||||
|
||||
@@ -57,6 +57,11 @@ class TestFlags:
|
||||
assert hasattr(flags, "LOG_PATH")
|
||||
assert getattr(flags, "LOG_PATH") == Path("logs")
|
||||
|
||||
def test_log_file_max_size_default(self, run_context):
|
||||
flags = Flags(run_context)
|
||||
assert hasattr(flags, "LOG_FILE_MAX_BYTES")
|
||||
assert getattr(flags, "LOG_FILE_MAX_BYTES") == 10 * 1024 * 1024
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"set_stats_param,do_not_track,expected_anonymous_usage_stats",
|
||||
[
|
||||
|
||||
@@ -424,6 +424,9 @@ def test_invocation_args_to_dict_in_macro_runtime_context(
|
||||
# Comes from unit/utils.py config_from_parts_or_dicts method
|
||||
assert ctx["invocation_args_dict"]["profile_dir"] == "/dev/null"
|
||||
|
||||
assert isinstance(ctx["invocation_args_dict"]["warn_error_options"], Dict)
|
||||
assert ctx["invocation_args_dict"]["warn_error_options"] == {"include": [], "exclude": []}
|
||||
|
||||
|
||||
def test_model_parse_context(config_postgres, manifest_fx, get_adapter, get_include_paths):
|
||||
ctx = providers.generate_parser_model_context(
|
||||
|
||||
@@ -28,6 +28,11 @@ class TestCoreDbtUtils(unittest.TestCase):
|
||||
connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5)
|
||||
self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry
|
||||
|
||||
def test_connection_exception_retry_success_failed_eofexception(self):
|
||||
Counter._reset()
|
||||
connection_exception_retry(lambda: Counter._add_with_eof_exception(), 5)
|
||||
self.assertEqual(2, counter) # 2 = original attempt returned EOFError, plus 1 retry
|
||||
|
||||
|
||||
counter: int = 0
|
||||
|
||||
@@ -57,6 +62,12 @@ class Counter:
|
||||
if counter < 2:
|
||||
raise tarfile.ReadError
|
||||
|
||||
def _add_with_eof_exception():
|
||||
global counter
|
||||
counter += 1
|
||||
if counter < 2:
|
||||
raise EOFError
|
||||
|
||||
def _reset():
|
||||
global counter
|
||||
counter = 0
|
||||
|
||||
@@ -2,7 +2,7 @@ from argparse import Namespace
|
||||
import pytest
|
||||
|
||||
import dbt.flags as flags
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error
|
||||
from dbt.events.functions import msg_to_dict, warn_or_error, setup_event_logger
|
||||
from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria
|
||||
from dbt.exceptions import EventCompilationError
|
||||
|
||||
@@ -59,3 +59,13 @@ def test_msg_to_dict_handles_exceptions_gracefully():
|
||||
assert (
|
||||
False
|
||||
), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}"
|
||||
|
||||
|
||||
def test_setup_event_logger_specify_max_bytes(mocker):
|
||||
patched_file_handler = mocker.patch("dbt.events.eventmgr.RotatingFileHandler")
|
||||
args = Namespace(log_file_max_bytes=1234567)
|
||||
flags.set_from_args(args, {})
|
||||
setup_event_logger(flags.get_flags())
|
||||
patched_file_handler.assert_called_once_with(
|
||||
filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@ from dbt import tracking
|
||||
from dbt.contracts.files import SourceFile, FileHash, FilePath
|
||||
from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck
|
||||
from dbt.graph import NodeSelector, parse_difference
|
||||
from dbt.events.functions import setup_event_logger
|
||||
|
||||
try:
|
||||
from queue import Empty
|
||||
@@ -140,6 +141,7 @@ class GraphTest(unittest.TestCase):
|
||||
|
||||
config = config_from_parts_or_dicts(project=cfg, profile=self.profile)
|
||||
dbt.flags.set_from_args(Namespace(), config)
|
||||
setup_event_logger(dbt.flags.get_flags())
|
||||
object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False)
|
||||
return config
|
||||
|
||||
|
||||
Reference in New Issue
Block a user