mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-20 04:01:27 +00:00
Compare commits
35 Commits
jerco/upda
...
split-inte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca385c7058 | ||
|
|
95d217d106 | ||
|
|
991618dfc1 | ||
|
|
1af489b1cd | ||
|
|
a433c31d6e | ||
|
|
5814928e38 | ||
|
|
6130a6e1d0 | ||
|
|
7872f6a670 | ||
|
|
f230e418aa | ||
|
|
518eb73f88 | ||
|
|
5b6d21d7da | ||
|
|
410506f448 | ||
|
|
3cb44d37c0 | ||
|
|
f977ed7471 | ||
|
|
3f5617b569 | ||
|
|
fe9c875d32 | ||
|
|
23b16ad6d2 | ||
|
|
fdeccfaf24 | ||
|
|
fecde23da5 | ||
|
|
b1d931337e | ||
|
|
39542336b8 | ||
|
|
799588cada | ||
|
|
f392add4b8 | ||
|
|
49560bf2a2 | ||
|
|
44b3ed5ae9 | ||
|
|
6235145641 | ||
|
|
ff5cb7ba51 | ||
|
|
1e2b9ae962 | ||
|
|
8cab58d248 | ||
|
|
0d645c227f | ||
|
|
fb6c349677 | ||
|
|
eeb057085c | ||
|
|
121371f4a4 | ||
|
|
a32713198b | ||
|
|
a1b067c683 |
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Breaking Changes
|
||||||
|
body: Removed the FirstRunResultError and AfterFirstRunResultError event types, using
|
||||||
|
the existing RunResultError in their place.
|
||||||
|
time: 2023-07-25T17:13:59.441682-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "7963"
|
||||||
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
6
.changes/unreleased/Dependencies-20230621-005752.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: "Dependencies"
|
||||||
|
body: "Bump mypy from 1.3.0 to 1.4.0"
|
||||||
|
time: 2023-06-21T00:57:52.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
PR: 7912
|
||||||
6
.changes/unreleased/Dependencies-20230726-201740.yaml
Normal file
6
.changes/unreleased/Dependencies-20230726-201740.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: "Dependencies"
|
||||||
|
body: "Bump mypy from 1.4.0 to 1.4.1"
|
||||||
|
time: 2023-07-26T20:17:40.00000Z
|
||||||
|
custom:
|
||||||
|
Author: dependabot[bot]
|
||||||
|
PR: 8219
|
||||||
6
.changes/unreleased/Dependencies-20230727-145703.yaml
Normal file
6
.changes/unreleased/Dependencies-20230727-145703.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Update pin for click<9
|
||||||
|
time: 2023-07-27T14:57:03.180458-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
PR: "8232"
|
||||||
6
.changes/unreleased/Dependencies-20230727-145726.yaml
Normal file
6
.changes/unreleased/Dependencies-20230727-145726.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Add upper bound to sqlparse pin of <0.5
|
||||||
|
time: 2023-07-27T14:57:26.40416-05:00
|
||||||
|
custom:
|
||||||
|
Author: emmyoop
|
||||||
|
PR: "8236"
|
||||||
6
.changes/unreleased/Dependencies-20230728-135227.yaml
Normal file
6
.changes/unreleased/Dependencies-20230728-135227.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Dependencies
|
||||||
|
body: Support dbt-semantic-interfaces 0.2.0
|
||||||
|
time: 2023-07-28T13:52:27.207241-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
PR: "8250"
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
kind: Docs
|
|
||||||
body: Fix for column tests not rendering on quoted columns
|
|
||||||
time: 2023-05-31T11:54:19.687363-04:00
|
|
||||||
custom:
|
|
||||||
Author: drewbanin
|
|
||||||
Issue: "201"
|
|
||||||
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
6
.changes/unreleased/Docs-20230715-200907.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Docs
|
||||||
|
body: Corrected spelling of "Partiton"
|
||||||
|
time: 2023-07-15T20:09:07.057361092+02:00
|
||||||
|
custom:
|
||||||
|
Author: pgoslatara
|
||||||
|
Issue: "8100"
|
||||||
6
.changes/unreleased/Features-20230714-202445.yaml
Normal file
6
.changes/unreleased/Features-20230714-202445.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Features
|
||||||
|
body: Added support to configure a delimiter for a seed file, defaults to comma
|
||||||
|
time: 2023-07-14T20:24:45.513847165+02:00
|
||||||
|
custom:
|
||||||
|
Author: ramonvermeulen
|
||||||
|
Issue: "3990"
|
||||||
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
6
.changes/unreleased/Fixes-20230625-142731.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fixed double-underline
|
||||||
|
time: 2023-06-25T14:27:31.231253719+08:00
|
||||||
|
custom:
|
||||||
|
Author: lllong33
|
||||||
|
Issue: "5301"
|
||||||
6
.changes/unreleased/Fixes-20230717-160652.yaml
Normal file
6
.changes/unreleased/Fixes-20230717-160652.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Copy target_schema from config into snapshot node
|
||||||
|
time: 2023-07-17T16:06:52.957724-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "6745"
|
||||||
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-122723.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Add status to Parse Inline Error
|
||||||
|
time: 2023-07-20T12:27:23.085084-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8173"
|
||||||
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-161513.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure `warn_error_options` get serialized in `invocation_args_dict`
|
||||||
|
time: 2023-07-20T16:15:13.761813-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "7694"
|
||||||
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-170112.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Stop detecting materialization macros based on macro name
|
||||||
|
time: 2023-07-20T17:01:12.496238-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "6231"
|
||||||
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
6
.changes/unreleased/Fixes-20230720-172422.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Update `dbt deps` download retry logic to handle `EOFError` exceptions
|
||||||
|
time: 2023-07-20T17:24:22.969951-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "6653"
|
||||||
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
6
.changes/unreleased/Fixes-20230726-104448.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Improve handling of CTE injection with ephemeral models
|
||||||
|
time: 2023-07-26T10:44:48.888451-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "8213"
|
||||||
6
.changes/unreleased/Fixes-20230727-125830.yaml
Normal file
6
.changes/unreleased/Fixes-20230727-125830.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure`
|
||||||
|
time: 2023-07-27T12:58:30.673803-07:00
|
||||||
|
custom:
|
||||||
|
Author: QMalcolm
|
||||||
|
Issue: "8230"
|
||||||
7
.changes/unreleased/Fixes-20230728-115620.yaml
Normal file
7
.changes/unreleased/Fixes-20230728-115620.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run,
|
||||||
|
etc)
|
||||||
|
time: 2023-07-28T11:56:20.863718-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8166"
|
||||||
6
.changes/unreleased/Fixes-20230802-141556.yaml
Normal file
6
.changes/unreleased/Fixes-20230802-141556.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Fixes
|
||||||
|
body: Fix retry not working with log-file-max-bytes
|
||||||
|
time: 2023-08-02T14:15:56.306027-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8297"
|
||||||
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-124611.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Refactor flaky test pp_versioned_models
|
||||||
|
time: 2023-07-19T12:46:11.972481-04:00
|
||||||
|
custom:
|
||||||
|
Author: gshank
|
||||||
|
Issue: "7781"
|
||||||
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230719-163334.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: format exception from dbtPlugin.initialize
|
||||||
|
time: 2023-07-19T16:33:34.586377-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "8152"
|
||||||
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
6
.changes/unreleased/Under the Hood-20230724-150654.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: A way to control maxBytes for a single dbt.log file
|
||||||
|
time: 2023-07-24T15:06:54.263822-07:00
|
||||||
|
custom:
|
||||||
|
Author: ChenyuLInx
|
||||||
|
Issue: "8199"
|
||||||
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
7
.changes/unreleased/Under the Hood-20230725-102609.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: Ref expressions with version can now be processed by the latest version of the
|
||||||
|
high-performance dbt-extractor library.
|
||||||
|
time: 2023-07-25T10:26:09.902878-04:00
|
||||||
|
custom:
|
||||||
|
Author: peterallenwebb
|
||||||
|
Issue: "7688"
|
||||||
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
40
.github/ISSUE_TEMPLATE/implementation-ticket.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: 🛠️ Implementation
|
||||||
|
description: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||||
|
title: "[<project>] <title>"
|
||||||
|
labels: ["user_docs"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: This is an implementation ticket intended for use by the maintainers of dbt-core
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Housekeeping
|
||||||
|
description: >
|
||||||
|
A couple friendly reminders:
|
||||||
|
1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes
|
||||||
|
2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project.
|
||||||
|
options:
|
||||||
|
- label: I am a maintainer of dbt-core
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Short description
|
||||||
|
description: |
|
||||||
|
Describe the scope of the ticket, a high-level implementation approach and any tradeoffs to consider
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Acceptance critera
|
||||||
|
description: |
|
||||||
|
What is the definition of done for this ticket? Include any relevant edge cases and/or test cases
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Context
|
||||||
|
description: |
|
||||||
|
Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate
|
||||||
|
validations:
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
69
.github/workflows/main.yml
vendored
69
.github/workflows/main.yml
vendored
@@ -33,6 +33,11 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
# top-level adjustments can be made here
|
||||||
|
env:
|
||||||
|
# number of parallel processes to spawn for python integration testing
|
||||||
|
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
code-quality:
|
code-quality:
|
||||||
name: code-quality
|
name: code-quality
|
||||||
@@ -106,25 +111,57 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
integration-metadata:
|
||||||
|
name: integration test metadata generation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||||
|
include: ${{ steps.generate-include.outputs.include }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: generate split-groups
|
||||||
|
id: generate-split-groups
|
||||||
|
run: |
|
||||||
|
MATRIX_JSON="["
|
||||||
|
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||||
|
done
|
||||||
|
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||||
|
MATRIX_JSON+="]"
|
||||||
|
echo "split-groups=${MATRIX_JSON}"
|
||||||
|
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: generate include
|
||||||
|
id: generate-include
|
||||||
|
run: |
|
||||||
|
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' )
|
||||||
|
INCLUDE_GROUPS="["
|
||||||
|
for include in ${INCLUDE[@]}; do
|
||||||
|
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
INCLUDE_GROUPS+=$(sed 's/$/, /' <<< "{\"split-group\":\"${group}\",${include}}")
|
||||||
|
done
|
||||||
|
done
|
||||||
|
INCLUDE_GROUPS=$(echo $INCLUDE_GROUPS | sed 's/,*$//g')
|
||||||
|
INCLUDE_GROUPS+="]"
|
||||||
|
echo "include=${INCLUDE_GROUPS}"
|
||||||
|
echo "include=${INCLUDE_GROUPS}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
integration:
|
integration:
|
||||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
name: (${{ matrix.split-group }}) ${{ matrix.tox-env }} test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
timeout-minutes: 60
|
timeout-minutes: 30
|
||||||
|
needs:
|
||||||
|
- integration-metadata
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
python-version: ["3.8", "3.9", "3.10", "3.11"]
|
||||||
os: [ubuntu-20.04]
|
os: [ubuntu-20.04]
|
||||||
include:
|
tox-env: [integration, adapter]
|
||||||
- python-version: 3.8
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
os: windows-latest
|
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||||
- python-version: 3.8
|
|
||||||
os: macos-latest
|
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TOXENV: integration
|
|
||||||
DBT_INVOCATION_ENV: github-actions
|
DBT_INVOCATION_ENV: github-actions
|
||||||
DBT_TEST_USER_1: dbt_test_user_1
|
DBT_TEST_USER_1: dbt_test_user_1
|
||||||
DBT_TEST_USER_2: dbt_test_user_2
|
DBT_TEST_USER_2: dbt_test_user_2
|
||||||
@@ -165,6 +202,9 @@ jobs:
|
|||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: tox -- --ddtrace
|
run: tox -- --ddtrace
|
||||||
|
env:
|
||||||
|
TOXENV: ${{ matrix.tox-env }}
|
||||||
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
- name: Get current date
|
- name: Get current date
|
||||||
if: always()
|
if: always()
|
||||||
@@ -185,6 +225,15 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
|
integration-report:
|
||||||
|
name: Integration Test Suite
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: integration
|
||||||
|
steps:
|
||||||
|
- name: "[Notification] Integration test suite passes"
|
||||||
|
run: |
|
||||||
|
echo "::notice title="Integration test suite passes""
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: build packages
|
name: build packages
|
||||||
|
|
||||||
|
|||||||
@@ -18,11 +18,41 @@ on:
|
|||||||
|
|
||||||
permissions: read-all
|
permissions: read-all
|
||||||
|
|
||||||
|
# top-level adjustments can be made here
|
||||||
|
env:
|
||||||
|
# number of parallel processes to spawn for python testing
|
||||||
|
PYTHON_INTEGRATION_TEST_WORKERS: 5
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
integration-metadata:
|
||||||
|
name: integration test metadata generation
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
split-groups: ${{ steps.generate-split-groups.outputs.split-groups }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: generate split-groups
|
||||||
|
id: generate-split-groups
|
||||||
|
run: |
|
||||||
|
MATRIX_JSON="["
|
||||||
|
for B in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||||
|
MATRIX_JSON+=$(sed 's/^/"/;s/$/"/' <<< "${B}")
|
||||||
|
done
|
||||||
|
MATRIX_JSON="${MATRIX_JSON//\"\"/\", \"}"
|
||||||
|
MATRIX_JSON+="]"
|
||||||
|
echo "split-groups=${MATRIX_JSON}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# run the performance measurements on the current or default branch
|
# run the performance measurements on the current or default branch
|
||||||
test-schema:
|
test-schema:
|
||||||
name: Test Log Schema
|
name: Test Log Schema
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
needs:
|
||||||
|
- integration-metadata
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||||
env:
|
env:
|
||||||
# turns warnings into errors
|
# turns warnings into errors
|
||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
@@ -65,3 +95,14 @@ jobs:
|
|||||||
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
# we actually care if these pass, because the normal test run doesn't usually include many json log outputs
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
run: tox -e integration -- -nauto
|
run: tox -e integration -- -nauto
|
||||||
|
env:
|
||||||
|
PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }}
|
||||||
|
|
||||||
|
test-schema-report:
|
||||||
|
name: Log Schema Test Suite
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test-schema
|
||||||
|
steps:
|
||||||
|
- name: "[Notification] Log test suite passes"
|
||||||
|
run: |
|
||||||
|
echo "::notice title="Log test suite passes""
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ repos:
|
|||||||
alias: flake8-check
|
alias: flake8-check
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v1.3.0
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
# N.B.: Mypy is... a bit fragile.
|
# N.B.: Mypy is... a bit fragile.
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ def args_to_context(args: List[str]) -> Context:
|
|||||||
if len(args) == 1 and "," in args[0]:
|
if len(args) == 1 and "," in args[0]:
|
||||||
args = args[0].split(",")
|
args = args[0].split(",")
|
||||||
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args)
|
||||||
|
|
||||||
# Handle source and docs group.
|
# Handle source and docs group.
|
||||||
if isinstance(sub_command, Group):
|
if isinstance(sub_command, Group):
|
||||||
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args)
|
||||||
@@ -319,7 +318,6 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
|||||||
|
|
||||||
for k, v in args_dict.items():
|
for k, v in args_dict.items():
|
||||||
k = k.lower()
|
k = k.lower()
|
||||||
|
|
||||||
# if a "which" value exists in the args dict, it should match the command provided
|
# if a "which" value exists in the args dict, it should match the command provided
|
||||||
if k == WHICH_KEY:
|
if k == WHICH_KEY:
|
||||||
if v != command.value:
|
if v != command.value:
|
||||||
@@ -344,7 +342,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar
|
|||||||
|
|
||||||
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
if k == "macro" and command == CliCommand.RUN_OPERATION:
|
||||||
add_fn(v)
|
add_fn(v)
|
||||||
elif v in (None, False):
|
# None is a Singleton, False is a Flyweight, only one instance of each.
|
||||||
|
elif v is None or v is False:
|
||||||
add_fn(f"--no-{spinal_cased}")
|
add_fn(f"--no-{spinal_cased}")
|
||||||
elif v is True:
|
elif v is True:
|
||||||
add_fn(f"--{spinal_cased}")
|
add_fn(f"--{spinal_cased}")
|
||||||
|
|||||||
@@ -132,6 +132,7 @@ class dbtRunner:
|
|||||||
@p.enable_legacy_logger
|
@p.enable_legacy_logger
|
||||||
@p.fail_fast
|
@p.fail_fast
|
||||||
@p.log_cache_events
|
@p.log_cache_events
|
||||||
|
@p.log_file_max_bytes
|
||||||
@p.log_format
|
@p.log_format
|
||||||
@p.log_format_file
|
@p.log_format_file
|
||||||
@p.log_level
|
@p.log_level
|
||||||
|
|||||||
@@ -171,6 +171,15 @@ use_colors_file = click.option(
|
|||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
log_file_max_bytes = click.option(
|
||||||
|
"--log-file-max-bytes",
|
||||||
|
envvar="DBT_LOG_FILE_MAX_BYTES",
|
||||||
|
help="Configure the max file size in bytes for a single dbt.log file, before rolling over. 0 means no limit.",
|
||||||
|
default=10 * 1024 * 1024, # 10mb
|
||||||
|
type=click.INT,
|
||||||
|
hidden=True,
|
||||||
|
)
|
||||||
|
|
||||||
log_path = click.option(
|
log_path = click.option(
|
||||||
"--log-path",
|
"--log-path",
|
||||||
envvar="DBT_LOG_PATH",
|
envvar="DBT_LOG_PATH",
|
||||||
@@ -380,9 +389,9 @@ inline = click.option(
|
|||||||
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
# Most CLI arguments should use the combined `select` option that aliases `--models` to `--select`.
|
||||||
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
# However, if you need to split out these separators (like `dbt ls`), use the `models` and `raw_select` options instead.
|
||||||
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
# See https://github.com/dbt-labs/dbt-core/pull/6774#issuecomment-1408476095 for more info.
|
||||||
models = click.option(*model_decls, **select_attrs)
|
models = click.option(*model_decls, **select_attrs) # type: ignore[arg-type]
|
||||||
raw_select = click.option(*select_decls, **select_attrs)
|
raw_select = click.option(*select_decls, **select_attrs) # type: ignore[arg-type]
|
||||||
select = click.option(*select_decls, *model_decls, **select_attrs)
|
select = click.option(*select_decls, *model_decls, **select_attrs) # type: ignore[arg-type]
|
||||||
|
|
||||||
selector = click.option(
|
selector = click.option(
|
||||||
"--selector",
|
"--selector",
|
||||||
|
|||||||
@@ -135,12 +135,12 @@ def as_matrix(table):
|
|||||||
return [r.values() for r in table.rows.values()]
|
return [r.values() for r in table.rows.values()]
|
||||||
|
|
||||||
|
|
||||||
def from_csv(abspath, text_columns):
|
def from_csv(abspath, text_columns, delimiter=","):
|
||||||
type_tester = build_type_tester(text_columns=text_columns)
|
type_tester = build_type_tester(text_columns=text_columns)
|
||||||
with open(abspath, encoding="utf-8") as fp:
|
with open(abspath, encoding="utf-8") as fp:
|
||||||
if fp.read(1) != BOM:
|
if fp.read(1) != BOM:
|
||||||
fp.seek(0)
|
fp.seek(0)
|
||||||
return agate.Table.from_csv(fp, column_types=type_tester)
|
return agate.Table.from_csv(fp, column_types=type_tester, delimiter=delimiter)
|
||||||
|
|
||||||
|
|
||||||
class _NullMarker:
|
class _NullMarker:
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import json
|
|||||||
import networkx as nx # type: ignore
|
import networkx as nx # type: ignore
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import sqlparse
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import List, Dict, Any, Tuple, Optional
|
from typing import List, Dict, Any, Tuple, Optional
|
||||||
@@ -36,6 +35,7 @@ from dbt.node_types import NodeType, ModelLanguage
|
|||||||
from dbt.events.format import pluralize
|
from dbt.events.format import pluralize
|
||||||
import dbt.tracking
|
import dbt.tracking
|
||||||
import dbt.task.list as list_task
|
import dbt.task.list as list_task
|
||||||
|
import sqlparse
|
||||||
|
|
||||||
graph_file_name = "graph.gpickle"
|
graph_file_name = "graph.gpickle"
|
||||||
|
|
||||||
@@ -378,16 +378,16 @@ class Compiler:
|
|||||||
|
|
||||||
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
_add_prepended_cte(prepended_ctes, InjectedCTE(id=cte.id, sql=sql))
|
||||||
|
|
||||||
injected_sql = inject_ctes_into_sql(
|
|
||||||
model.compiled_code,
|
|
||||||
prepended_ctes,
|
|
||||||
)
|
|
||||||
# Check again before updating for multi-threading
|
# Check again before updating for multi-threading
|
||||||
if not model.extra_ctes_injected:
|
if not model.extra_ctes_injected:
|
||||||
|
injected_sql = inject_ctes_into_sql(
|
||||||
|
model.compiled_code,
|
||||||
|
prepended_ctes,
|
||||||
|
)
|
||||||
|
model.extra_ctes_injected = True
|
||||||
model._pre_injected_sql = model.compiled_code
|
model._pre_injected_sql = model.compiled_code
|
||||||
model.compiled_code = injected_sql
|
model.compiled_code = injected_sql
|
||||||
model.extra_ctes = prepended_ctes
|
model.extra_ctes = prepended_ctes
|
||||||
model.extra_ctes_injected = True
|
|
||||||
|
|
||||||
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
# if model.extra_ctes is not set to prepended ctes, something went wrong
|
||||||
return model, model.extra_ctes
|
return model, model.extra_ctes
|
||||||
@@ -523,6 +523,12 @@ class Compiler:
|
|||||||
the node's raw_code into compiled_code, and then calls the
|
the node's raw_code into compiled_code, and then calls the
|
||||||
recursive method to "prepend" the ctes.
|
recursive method to "prepend" the ctes.
|
||||||
"""
|
"""
|
||||||
|
# Make sure Lexer for sqlparse 0.4.4 is initialized
|
||||||
|
from sqlparse.lexer import Lexer # type: ignore
|
||||||
|
|
||||||
|
if hasattr(Lexer, "get_default_instance"):
|
||||||
|
Lexer.get_default_instance()
|
||||||
|
|
||||||
node = self._compile_code(node, manifest, extra_context)
|
node = self._compile_code(node, manifest, extra_context)
|
||||||
|
|
||||||
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
node, _ = self._recursively_prepend_ctes(node, manifest, extra_context)
|
||||||
|
|||||||
@@ -865,8 +865,9 @@ class ProviderContext(ManifestContext):
|
|||||||
assert self.model.root_path
|
assert self.model.root_path
|
||||||
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
path = os.path.join(self.model.root_path, self.model.original_file_path)
|
||||||
column_types = self.model.config.column_types
|
column_types = self.model.config.column_types
|
||||||
|
delimiter = self.model.config.delimiter
|
||||||
try:
|
try:
|
||||||
table = agate_helper.from_csv(path, text_columns=column_types)
|
table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise LoadAgateTableValueError(e, node=self.model)
|
raise LoadAgateTableValueError(e, node=self.model)
|
||||||
table.original_abspath = os.path.abspath(path)
|
table.original_abspath = os.path.abspath(path)
|
||||||
|
|||||||
@@ -544,6 +544,7 @@ class NodeConfig(NodeAndTestConfig):
|
|||||||
@dataclass
|
@dataclass
|
||||||
class SeedConfig(NodeConfig):
|
class SeedConfig(NodeConfig):
|
||||||
materialized: str = "seed"
|
materialized: str = "seed"
|
||||||
|
delimiter: str = ","
|
||||||
quote_columns: Optional[bool] = None
|
quote_columns: Optional[bool] = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -619,6 +620,8 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def validate(cls, data):
|
def validate(cls, data):
|
||||||
super().validate(data)
|
super().validate(data)
|
||||||
|
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||||
|
# will fail when parsing the snapshot node.
|
||||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||||
@@ -649,6 +652,7 @@ class SnapshotConfig(EmptySnapshotConfig):
|
|||||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||||
|
|
||||||
|
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||||
def finalize_and_validate(self):
|
def finalize_and_validate(self):
|
||||||
data = self.to_dict(omit_none=True)
|
data = self.to_dict(omit_none=True)
|
||||||
self.validate(data)
|
self.validate(data)
|
||||||
|
|||||||
@@ -50,6 +50,7 @@ from dbt.flags import get_flags
|
|||||||
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
from dbt.node_types import ModelLanguage, NodeType, AccessType
|
||||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||||
from dbt_semantic_interfaces.references import (
|
from dbt_semantic_interfaces.references import (
|
||||||
|
EntityReference,
|
||||||
MeasureReference,
|
MeasureReference,
|
||||||
LinkableElementReference,
|
LinkableElementReference,
|
||||||
SemanticModelReference,
|
SemanticModelReference,
|
||||||
@@ -1498,6 +1499,7 @@ class SemanticModel(GraphNode):
|
|||||||
refs: List[RefArgs] = field(default_factory=list)
|
refs: List[RefArgs] = field(default_factory=list)
|
||||||
created_at: float = field(default_factory=lambda: time.time())
|
created_at: float = field(default_factory=lambda: time.time())
|
||||||
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
config: SemanticModelConfig = field(default_factory=SemanticModelConfig)
|
||||||
|
primary_entity: Optional[str] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def entity_references(self) -> List[LinkableElementReference]:
|
def entity_references(self) -> List[LinkableElementReference]:
|
||||||
@@ -1568,17 +1570,26 @@ class SemanticModel(GraphNode):
|
|||||||
measure is not None
|
measure is not None
|
||||||
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})"
|
||||||
|
|
||||||
if self.defaults is not None:
|
default_agg_time_dimension = (
|
||||||
default_agg_time_dimesion = self.defaults.agg_time_dimension
|
self.defaults.agg_time_dimension if self.defaults is not None else None
|
||||||
|
)
|
||||||
|
|
||||||
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimesion
|
agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension
|
||||||
assert agg_time_dimension_name is not None, (
|
assert agg_time_dimension_name is not None, (
|
||||||
f"Aggregation time dimension for measure {measure.name} is not set! This should either be set directly on "
|
f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! "
|
||||||
f"the measure specification in the model, or else defaulted to the primary time dimension in the data "
|
"To fix this either specify a default `agg_time_dimension` for the semantic model or define an "
|
||||||
f"source containing the measure."
|
"`agg_time_dimension` on the measure directly."
|
||||||
)
|
)
|
||||||
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
return TimeDimensionReference(element_name=agg_time_dimension_name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def primary_entity_reference(self) -> Optional[EntityReference]:
|
||||||
|
return (
|
||||||
|
EntityReference(element_name=self.primary_entity)
|
||||||
|
if self.primary_entity is not None
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# ====================================
|
# ====================================
|
||||||
# Patches
|
# Patches
|
||||||
|
|||||||
@@ -728,6 +728,7 @@ class UnparsedSemanticModel(dbtClassMixin):
|
|||||||
entities: List[UnparsedEntity] = field(default_factory=list)
|
entities: List[UnparsedEntity] = field(default_factory=list)
|
||||||
measures: List[UnparsedMeasure] = field(default_factory=list)
|
measures: List[UnparsedMeasure] = field(default_factory=list)
|
||||||
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
||||||
|
primary_entity: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
||||||
|
|||||||
@@ -8,12 +8,12 @@ import logging
|
|||||||
from logging.handlers import RotatingFileHandler
|
from logging.handlers import RotatingFileHandler
|
||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from typing import Any, Callable, List, Optional, TextIO
|
from typing import Any, Callable, List, Optional, TextIO, Protocol
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from dbt.events.format import timestamp_to_datetime_string
|
from dbt.events.format import timestamp_to_datetime_string
|
||||||
|
|
||||||
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg
|
||||||
|
import dbt.utils
|
||||||
|
|
||||||
# A Filter is a function which takes a BaseEvent and returns True if the event
|
# A Filter is a function which takes a BaseEvent and returns True if the event
|
||||||
# should be logged, False otherwise.
|
# should be logged, False otherwise.
|
||||||
@@ -80,6 +80,7 @@ class LoggerConfig:
|
|||||||
use_colors: bool = False
|
use_colors: bool = False
|
||||||
output_stream: Optional[TextIO] = None
|
output_stream: Optional[TextIO] = None
|
||||||
output_file_name: Optional[str] = None
|
output_file_name: Optional[str] = None
|
||||||
|
output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb
|
||||||
logger: Optional[Any] = None
|
logger: Optional[Any] = None
|
||||||
|
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ class _Logger:
|
|||||||
file_handler = RotatingFileHandler(
|
file_handler = RotatingFileHandler(
|
||||||
filename=str(config.output_file_name),
|
filename=str(config.output_file_name),
|
||||||
encoding="utf8",
|
encoding="utf8",
|
||||||
maxBytes=10 * 1024 * 1024, # 10 mb
|
maxBytes=config.output_file_max_bytes, # type: ignore
|
||||||
backupCount=5,
|
backupCount=5,
|
||||||
)
|
)
|
||||||
self._python_logger = self._get_python_log_for_handler(file_handler)
|
self._python_logger = self._get_python_log_for_handler(file_handler)
|
||||||
@@ -175,7 +176,7 @@ class _JsonLogger(_Logger):
|
|||||||
from dbt.events.functions import msg_to_dict
|
from dbt.events.functions import msg_to_dict
|
||||||
|
|
||||||
msg_dict = msg_to_dict(msg)
|
msg_dict = msg_to_dict(msg)
|
||||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||||
line = self.scrubber(raw_log_line) # type: ignore
|
line = self.scrubber(raw_log_line) # type: ignore
|
||||||
return line
|
return line
|
||||||
|
|
||||||
@@ -205,7 +206,7 @@ class EventManager:
|
|||||||
for callback in self.callbacks:
|
for callback in self.callbacks:
|
||||||
callback(msg)
|
callback(msg)
|
||||||
|
|
||||||
def add_logger(self, config: LoggerConfig):
|
def add_logger(self, config: LoggerConfig) -> None:
|
||||||
logger = (
|
logger = (
|
||||||
_JsonLogger(self, config)
|
_JsonLogger(self, config)
|
||||||
if config.line_format == LineFormat.Json
|
if config.line_format == LineFormat.Json
|
||||||
@@ -217,3 +218,25 @@ class EventManager:
|
|||||||
def flush(self):
|
def flush(self):
|
||||||
for logger in self.loggers:
|
for logger in self.loggers:
|
||||||
logger.flush()
|
logger.flush()
|
||||||
|
|
||||||
|
|
||||||
|
class IEventManager(Protocol):
|
||||||
|
callbacks: List[Callable[[EventMsg], None]]
|
||||||
|
invocation_id: str
|
||||||
|
|
||||||
|
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
def add_logger(self, config: LoggerConfig) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class TestEventManager(IEventManager):
|
||||||
|
def __init__(self):
|
||||||
|
self.event_history = []
|
||||||
|
|
||||||
|
def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None:
|
||||||
|
self.event_history.append((e, level))
|
||||||
|
|
||||||
|
def add_logger(self, config: LoggerConfig) -> None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from dbt.constants import METADATA_ENV_PREFIX
|
from dbt.constants import METADATA_ENV_PREFIX
|
||||||
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
|
from dbt.events.base_types import BaseEvent, EventLevel, EventMsg
|
||||||
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter
|
from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter, IEventManager
|
||||||
from dbt.events.helpers import env_secrets, scrub_secrets
|
from dbt.events.helpers import env_secrets, scrub_secrets
|
||||||
from dbt.events.types import Formatting, Note
|
from dbt.events.types import Formatting, Note
|
||||||
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
|
from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER
|
||||||
@@ -13,6 +13,7 @@ from typing import Callable, Dict, List, Optional, TextIO
|
|||||||
import uuid
|
import uuid
|
||||||
from google.protobuf.json_format import MessageToDict
|
from google.protobuf.json_format import MessageToDict
|
||||||
|
|
||||||
|
import dbt.utils
|
||||||
|
|
||||||
LOG_VERSION = 3
|
LOG_VERSION = 3
|
||||||
metadata_vars: Optional[Dict[str, str]] = None
|
metadata_vars: Optional[Dict[str, str]] = None
|
||||||
@@ -67,7 +68,11 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
|||||||
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE)
|
||||||
EVENT_MANAGER.add_logger(
|
EVENT_MANAGER.add_logger(
|
||||||
_get_logfile_config(
|
_get_logfile_config(
|
||||||
log_file, flags.USE_COLORS_FILE, log_file_format, log_level_file
|
log_file,
|
||||||
|
flags.USE_COLORS_FILE,
|
||||||
|
log_file_format,
|
||||||
|
log_level_file,
|
||||||
|
flags.LOG_FILE_MAX_BYTES,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -116,7 +121,11 @@ def _stdout_filter(
|
|||||||
|
|
||||||
|
|
||||||
def _get_logfile_config(
|
def _get_logfile_config(
|
||||||
log_path: str, use_colors: bool, line_format: LineFormat, level: EventLevel
|
log_path: str,
|
||||||
|
use_colors: bool,
|
||||||
|
line_format: LineFormat,
|
||||||
|
level: EventLevel,
|
||||||
|
log_file_max_bytes: int,
|
||||||
) -> LoggerConfig:
|
) -> LoggerConfig:
|
||||||
return LoggerConfig(
|
return LoggerConfig(
|
||||||
name="file_log",
|
name="file_log",
|
||||||
@@ -126,6 +135,7 @@ def _get_logfile_config(
|
|||||||
scrubber=env_scrubber,
|
scrubber=env_scrubber,
|
||||||
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format),
|
||||||
output_file_name=log_path,
|
output_file_name=log_path,
|
||||||
|
output_file_max_bytes=log_file_max_bytes,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -172,7 +182,7 @@ def cleanup_event_logger():
|
|||||||
# Since dbt-rpc does not do its own log setup, and since some events can
|
# Since dbt-rpc does not do its own log setup, and since some events can
|
||||||
# currently fire before logs can be configured by setup_event_logger(), we
|
# currently fire before logs can be configured by setup_event_logger(), we
|
||||||
# create a default configuration with default settings and no file output.
|
# create a default configuration with default settings and no file output.
|
||||||
EVENT_MANAGER: EventManager = EventManager()
|
EVENT_MANAGER: IEventManager = EventManager()
|
||||||
EVENT_MANAGER.add_logger(
|
EVENT_MANAGER.add_logger(
|
||||||
_get_logbook_log_config(False, True, False, False) # type: ignore
|
_get_logbook_log_config(False, True, False, False) # type: ignore
|
||||||
if ENABLE_LEGACY_LOGGER
|
if ENABLE_LEGACY_LOGGER
|
||||||
@@ -200,7 +210,7 @@ def stop_capture_stdout_logs():
|
|||||||
# the message may contain secrets which must be scrubbed at the usage site.
|
# the message may contain secrets which must be scrubbed at the usage site.
|
||||||
def msg_to_json(msg: EventMsg) -> str:
|
def msg_to_json(msg: EventMsg) -> str:
|
||||||
msg_dict = msg_to_dict(msg)
|
msg_dict = msg_to_dict(msg)
|
||||||
raw_log_line = json.dumps(msg_dict, sort_keys=True)
|
raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder)
|
||||||
return raw_log_line
|
return raw_log_line
|
||||||
|
|
||||||
|
|
||||||
@@ -285,3 +295,8 @@ def set_invocation_id() -> None:
|
|||||||
# This is primarily for setting the invocation_id for separate
|
# This is primarily for setting the invocation_id for separate
|
||||||
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
# commands in the dbt servers. It shouldn't be necessary for the CLI.
|
||||||
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
EVENT_MANAGER.invocation_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
def ctx_set_event_manager(event_manager: IEventManager):
|
||||||
|
global EVENT_MANAGER
|
||||||
|
EVENT_MANAGER = event_manager
|
||||||
|
|||||||
@@ -2245,25 +2245,7 @@ message CheckNodeTestFailureMsg {
|
|||||||
CheckNodeTestFailure data = 2;
|
CheckNodeTestFailure data = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Z028
|
// Skipped Z028, Z029
|
||||||
message FirstRunResultError {
|
|
||||||
string msg = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message FirstRunResultErrorMsg {
|
|
||||||
EventInfo info = 1;
|
|
||||||
FirstRunResultError data = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Z029
|
|
||||||
message AfterFirstRunResultError {
|
|
||||||
string msg = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
message AfterFirstRunResultErrorMsg {
|
|
||||||
EventInfo info = 1;
|
|
||||||
AfterFirstRunResultError data = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Z030
|
// Z030
|
||||||
message EndOfRunSummary {
|
message EndOfRunSummary {
|
||||||
|
|||||||
@@ -2171,25 +2171,7 @@ class CheckNodeTestFailure(InfoLevel):
|
|||||||
return f" See test failures:\n {border}\n {msg}\n {border}"
|
return f" See test failures:\n {border}\n {msg}\n {border}"
|
||||||
|
|
||||||
|
|
||||||
# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result
|
# Skipped Z028, Z029
|
||||||
# object into multiple log lines
|
|
||||||
# TODO: is this reallly needed? See printer.py
|
|
||||||
|
|
||||||
|
|
||||||
class FirstRunResultError(ErrorLevel):
|
|
||||||
def code(self):
|
|
||||||
return "Z028"
|
|
||||||
|
|
||||||
def message(self) -> str:
|
|
||||||
return yellow(self.msg)
|
|
||||||
|
|
||||||
|
|
||||||
class AfterFirstRunResultError(ErrorLevel):
|
|
||||||
def code(self):
|
|
||||||
return "Z029"
|
|
||||||
|
|
||||||
def message(self) -> str:
|
|
||||||
return self.msg
|
|
||||||
|
|
||||||
|
|
||||||
class EndOfRunSummary(InfoLevel):
|
class EndOfRunSummary(InfoLevel):
|
||||||
|
|||||||
@@ -486,7 +486,7 @@ class InvalidConnectionError(DbtRuntimeError):
|
|||||||
self.thread_id = thread_id
|
self.thread_id = thread_id
|
||||||
self.known = known
|
self.known = known
|
||||||
super().__init__(
|
super().__init__(
|
||||||
msg="connection never acquired for thread {self.thread_id}, have {self.known}"
|
msg=f"connection never acquired for thread {self.thread_id}, have {self.known}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -63,3 +63,12 @@
|
|||||||
{{ exceptions.raise_not_implemented(
|
{{ exceptions.raise_not_implemented(
|
||||||
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro get_relations() %}
|
||||||
|
{{ return(adapter.dispatch('get_relations', 'dbt')()) }}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro default__get_relations() %}
|
||||||
|
{{ exceptions.raise_not_implemented(
|
||||||
|
'get_relations macro not implemented for adapter '+adapter.type()) }}
|
||||||
|
{% endmacro %}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@
|
|||||||
{% set day_count = (end_date - start_date).days %}
|
{% set day_count = (end_date - start_date).days %}
|
||||||
{% if day_count < 0 %}
|
{% if day_count < 0 %}
|
||||||
{% set msg -%}
|
{% set msg -%}
|
||||||
Partiton start date is after the end date ({{ start_date }}, {{ end_date }})
|
Partition start date is after the end date ({{ start_date }}, {{ end_date }})
|
||||||
{%- endset %}
|
{%- endset %}
|
||||||
|
|
||||||
{{ exceptions.raise_compiler_error(msg, model) }}
|
{{ exceptions.raise_compiler_error(msg, model) }}
|
||||||
|
|||||||
@@ -33,7 +33,12 @@
|
|||||||
|
|
||||||
-- cleanup
|
-- cleanup
|
||||||
{% if existing_relation is not none %}
|
{% if existing_relation is not none %}
|
||||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||||
|
since the variable was first set. */
|
||||||
|
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||||
|
{% if existing_relation is not none %}
|
||||||
|
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||||
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||||
|
|||||||
@@ -45,7 +45,12 @@
|
|||||||
-- cleanup
|
-- cleanup
|
||||||
-- move the existing view out of the way
|
-- move the existing view out of the way
|
||||||
{% if existing_relation is not none %}
|
{% if existing_relation is not none %}
|
||||||
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
/* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped
|
||||||
|
since the variable was first set. */
|
||||||
|
{% set existing_relation = load_cached_relation(existing_relation) %}
|
||||||
|
{% if existing_relation is not none %}
|
||||||
|
{{ adapter.rename_relation(existing_relation, backup_relation) }}
|
||||||
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||||
|
|
||||||
|
|||||||
@@ -102,8 +102,7 @@ class RelationUpdate:
|
|||||||
self.package_updaters = package_updaters
|
self.package_updaters = package_updaters
|
||||||
self.component = component
|
self.component = component
|
||||||
|
|
||||||
def __call__(self, parsed_node: Any, config_dict: Dict[str, Any]) -> None:
|
def __call__(self, parsed_node: Any, override: Optional[str]) -> None:
|
||||||
override = config_dict.get(self.component)
|
|
||||||
if parsed_node.package_name in self.package_updaters:
|
if parsed_node.package_name in self.package_updaters:
|
||||||
new_value = self.package_updaters[parsed_node.package_name](override, parsed_node)
|
new_value = self.package_updaters[parsed_node.package_name](override, parsed_node)
|
||||||
else:
|
else:
|
||||||
@@ -280,9 +279,19 @@ class ConfiguredParser(
|
|||||||
def update_parsed_node_relation_names(
|
def update_parsed_node_relation_names(
|
||||||
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
self._update_node_database(parsed_node, config_dict)
|
|
||||||
self._update_node_schema(parsed_node, config_dict)
|
# These call the RelationUpdate callable to go through generate_name macros
|
||||||
self._update_node_alias(parsed_node, config_dict)
|
self._update_node_database(parsed_node, config_dict.get("database"))
|
||||||
|
self._update_node_schema(parsed_node, config_dict.get("schema"))
|
||||||
|
self._update_node_alias(parsed_node, config_dict.get("alias"))
|
||||||
|
|
||||||
|
# Snapshot nodes use special "target_database" and "target_schema" fields for some reason
|
||||||
|
if parsed_node.resource_type == NodeType.Snapshot:
|
||||||
|
if "target_database" in config_dict and config_dict["target_database"]:
|
||||||
|
parsed_node.database = config_dict["target_database"]
|
||||||
|
if "target_schema" in config_dict and config_dict["target_schema"]:
|
||||||
|
parsed_node.schema = config_dict["target_schema"]
|
||||||
|
|
||||||
self._update_node_relation_name(parsed_node)
|
self._update_node_relation_name(parsed_node)
|
||||||
|
|
||||||
def update_parsed_node_config(
|
def update_parsed_node_config(
|
||||||
@@ -349,7 +358,7 @@ class ConfiguredParser(
|
|||||||
# do this once before we parse the node database/schema/alias, so
|
# do this once before we parse the node database/schema/alias, so
|
||||||
# parsed_node.config is what it would be if they did nothing
|
# parsed_node.config is what it would be if they did nothing
|
||||||
self.update_parsed_node_config_dict(parsed_node, config_dict)
|
self.update_parsed_node_config_dict(parsed_node, config_dict)
|
||||||
# This updates the node database/schema/alias
|
# This updates the node database/schema/alias/relation_name
|
||||||
self.update_parsed_node_relation_names(parsed_node, config_dict)
|
self.update_parsed_node_relation_names(parsed_node, config_dict)
|
||||||
|
|
||||||
# tests don't have hooks
|
# tests don't have hooks
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ class MacroParser(BaseParser[Macro]):
|
|||||||
name: str = macro.name.replace(MACRO_PREFIX, "")
|
name: str = macro.name.replace(MACRO_PREFIX, "")
|
||||||
node = self.parse_macro(block, base_node, name)
|
node = self.parse_macro(block, base_node, name)
|
||||||
# get supported_languages for materialization macro
|
# get supported_languages for materialization macro
|
||||||
if "materialization" in name:
|
if block.block_type_name == "materialization":
|
||||||
node.supported_languages = jinja.get_supported_languages(macro)
|
node.supported_languages = jinja.get_supported_languages(macro)
|
||||||
yield node
|
yield node
|
||||||
|
|
||||||
|
|||||||
@@ -497,12 +497,10 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
|||||||
# set refs and sources on the node object
|
# set refs and sources on the node object
|
||||||
refs: List[RefArgs] = []
|
refs: List[RefArgs] = []
|
||||||
for ref in statically_parsed["refs"]:
|
for ref in statically_parsed["refs"]:
|
||||||
if len(ref) == 1:
|
name = ref.get("name")
|
||||||
package, name = None, ref[0]
|
package = ref.get("package")
|
||||||
else:
|
version = ref.get("version")
|
||||||
package, name = ref
|
refs.append(RefArgs(name, package, version))
|
||||||
|
|
||||||
refs.append(RefArgs(package=package, name=name))
|
|
||||||
|
|
||||||
node.refs += refs
|
node.refs += refs
|
||||||
node.sources += statically_parsed["sources"]
|
node.sources += statically_parsed["sources"]
|
||||||
|
|||||||
@@ -532,6 +532,7 @@ class SemanticModelParser(YamlReader):
|
|||||||
measures=self._get_measures(unparsed.measures),
|
measures=self._get_measures(unparsed.measures),
|
||||||
dimensions=self._get_dimensions(unparsed.dimensions),
|
dimensions=self._get_dimensions(unparsed.dimensions),
|
||||||
defaults=unparsed.defaults,
|
defaults=unparsed.defaults,
|
||||||
|
primary_entity=unparsed.primary_entity,
|
||||||
)
|
)
|
||||||
|
|
||||||
ctx = generate_parse_semantic_models(
|
ctx = generate_parse_semantic_models(
|
||||||
|
|||||||
@@ -29,8 +29,11 @@ class dbtPlugin:
|
|||||||
self.project_name = project_name
|
self.project_name = project_name
|
||||||
try:
|
try:
|
||||||
self.initialize()
|
self.initialize()
|
||||||
|
except DbtRuntimeError as e:
|
||||||
|
# Remove the first line of DbtRuntimeError to avoid redundant "Runtime Error" line
|
||||||
|
raise DbtRuntimeError("\n".join(str(e).split("\n")[1:]))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise DbtRuntimeError(f"initialize: {e}")
|
raise DbtRuntimeError(str(e))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
|||||||
@@ -139,6 +139,7 @@ class CompileTask(GraphRunnableTask):
|
|||||||
"node_path": "sql/inline_query",
|
"node_path": "sql/inline_query",
|
||||||
"node_name": "inline_query",
|
"node_name": "inline_query",
|
||||||
"unique_id": "sqloperation.test.inline_query",
|
"unique_id": "sqloperation.test.inline_query",
|
||||||
|
"node_status": "failed",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -14,8 +14,6 @@ from dbt.events.types import (
|
|||||||
RunResultErrorNoMessage,
|
RunResultErrorNoMessage,
|
||||||
SQLCompiledPath,
|
SQLCompiledPath,
|
||||||
CheckNodeTestFailure,
|
CheckNodeTestFailure,
|
||||||
FirstRunResultError,
|
|
||||||
AfterFirstRunResultError,
|
|
||||||
EndOfRunSummary,
|
EndOfRunSummary,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -118,15 +116,7 @@ def print_run_result_error(result, newline: bool = True, is_warning: bool = Fals
|
|||||||
fire_event(CheckNodeTestFailure(relation_name=result.node.relation_name))
|
fire_event(CheckNodeTestFailure(relation_name=result.node.relation_name))
|
||||||
|
|
||||||
elif result.message is not None:
|
elif result.message is not None:
|
||||||
first = True
|
fire_event(RunResultError(msg=result.message))
|
||||||
for line in result.message.split("\n"):
|
|
||||||
# TODO: why do we format like this? Is there a reason this needs to
|
|
||||||
# be split instead of sending it as a single log line?
|
|
||||||
if first:
|
|
||||||
fire_event(FirstRunResultError(msg=line))
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
fire_event(AfterFirstRunResultError(msg=line))
|
|
||||||
|
|
||||||
|
|
||||||
def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None:
|
def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None:
|
||||||
|
|||||||
@@ -375,15 +375,17 @@ class GraphRunnableTask(ConfiguredTask):
|
|||||||
)
|
)
|
||||||
|
|
||||||
print_run_result_error(failure.result)
|
print_run_result_error(failure.result)
|
||||||
raise
|
# ensure information about all nodes is propagated to run results when failing fast
|
||||||
|
return self.node_results
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
self._cancel_connections(pool)
|
self._cancel_connections(pool)
|
||||||
print_run_end_messages(self.node_results, keyboard_interrupt=True)
|
print_run_end_messages(self.node_results, keyboard_interrupt=True)
|
||||||
raise
|
raise
|
||||||
finally:
|
|
||||||
pool.close()
|
pool.close()
|
||||||
pool.join()
|
pool.join()
|
||||||
return self.node_results
|
|
||||||
|
return self.node_results
|
||||||
|
|
||||||
def _mark_dependent_errors(self, node_id, result, cause):
|
def _mark_dependent_errors(self, node_id, result, cause):
|
||||||
if self.graph is None:
|
if self.graph is None:
|
||||||
|
|||||||
1
core/dbt/tests/fixtures/project.py
vendored
1
core/dbt/tests/fixtures/project.py
vendored
@@ -502,6 +502,7 @@ def project(
|
|||||||
DEBUG=False,
|
DEBUG=False,
|
||||||
LOG_CACHE_EVENTS=False,
|
LOG_CACHE_EVENTS=False,
|
||||||
QUIET=False,
|
QUIET=False,
|
||||||
|
LOG_FILE_MAX_BYTES=1000000,
|
||||||
)
|
)
|
||||||
setup_event_logger(log_flags)
|
setup_event_logger(log_flags)
|
||||||
orig_cwd = os.getcwd()
|
orig_cwd = os.getcwd()
|
||||||
|
|||||||
@@ -16,9 +16,8 @@ import time
|
|||||||
from pathlib import PosixPath, WindowsPath
|
from pathlib import PosixPath, WindowsPath
|
||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from dbt.exceptions import ConnectionError, DuplicateAliasError
|
|
||||||
from dbt.events.functions import fire_event
|
|
||||||
from dbt.events.types import RetryExternalCall, RecordRetryException
|
from dbt.events.types import RetryExternalCall, RecordRetryException
|
||||||
|
from dbt.helper_types import WarnErrorOptions
|
||||||
from dbt import flags
|
from dbt import flags
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing_extensions import Protocol
|
from typing_extensions import Protocol
|
||||||
@@ -40,6 +39,7 @@ from typing import (
|
|||||||
Sequence,
|
Sequence,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import dbt.events.functions
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
|
|
||||||
DECIMALS: Tuple[Type[Any], ...]
|
DECIMALS: Tuple[Type[Any], ...]
|
||||||
@@ -337,15 +337,18 @@ class JSONEncoder(json.JSONEncoder):
|
|||||||
def default(self, obj):
|
def default(self, obj):
|
||||||
if isinstance(obj, DECIMALS):
|
if isinstance(obj, DECIMALS):
|
||||||
return float(obj)
|
return float(obj)
|
||||||
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
elif isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
|
||||||
return obj.isoformat()
|
return obj.isoformat()
|
||||||
if isinstance(obj, jinja2.Undefined):
|
elif isinstance(obj, jinja2.Undefined):
|
||||||
return ""
|
return ""
|
||||||
if hasattr(obj, "to_dict"):
|
elif isinstance(obj, Exception):
|
||||||
|
return repr(obj)
|
||||||
|
elif hasattr(obj, "to_dict"):
|
||||||
# if we have a to_dict we should try to serialize the result of
|
# if we have a to_dict we should try to serialize the result of
|
||||||
# that!
|
# that!
|
||||||
return obj.to_dict(omit_none=True)
|
return obj.to_dict(omit_none=True)
|
||||||
return super().default(obj)
|
else:
|
||||||
|
return super().default(obj)
|
||||||
|
|
||||||
|
|
||||||
class ForgivingJSONEncoder(JSONEncoder):
|
class ForgivingJSONEncoder(JSONEncoder):
|
||||||
@@ -369,7 +372,7 @@ class Translator:
|
|||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
canonical_key = self.aliases.get(key, key)
|
canonical_key = self.aliases.get(key, key)
|
||||||
if canonical_key in result:
|
if canonical_key in result:
|
||||||
raise DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
raise dbt.exceptions.DuplicateAliasError(kwargs, self.aliases, canonical_key)
|
||||||
result[canonical_key] = self.translate_value(value)
|
result[canonical_key] = self.translate_value(value)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -389,9 +392,7 @@ class Translator:
|
|||||||
return self.translate_mapping(value)
|
return self.translate_mapping(value)
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
if "maximum recursion depth exceeded" in str(exc):
|
if "maximum recursion depth exceeded" in str(exc):
|
||||||
raise dbt.exceptions.RecursionError(
|
raise RecursionError("Cycle detected in a value passed to translate!")
|
||||||
"Cycle detected in a value passed to translate!"
|
|
||||||
)
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
@@ -601,14 +602,17 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0):
|
|||||||
except (
|
except (
|
||||||
requests.exceptions.RequestException,
|
requests.exceptions.RequestException,
|
||||||
ReadError,
|
ReadError,
|
||||||
|
EOFError,
|
||||||
) as exc:
|
) as exc:
|
||||||
if attempt <= max_attempts - 1:
|
if attempt <= max_attempts - 1:
|
||||||
fire_event(RecordRetryException(exc=str(exc)))
|
dbt.events.functions.fire_event(RecordRetryException(exc=str(exc)))
|
||||||
fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
dbt.events.functions.fire_event(RetryExternalCall(attempt=attempt, max=max_attempts))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
return _connection_exception_retry(fn, max_attempts, attempt + 1)
|
return _connection_exception_retry(fn, max_attempts, attempt + 1)
|
||||||
else:
|
else:
|
||||||
raise ConnectionError("External connection exception occurred: " + str(exc))
|
raise dbt.exceptions.ConnectionError(
|
||||||
|
"External connection exception occurred: " + str(exc)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# This is used to serialize the args in the run_results and in the logs.
|
# This is used to serialize the args in the run_results and in the logs.
|
||||||
@@ -652,6 +656,9 @@ def args_to_dict(args):
|
|||||||
# this was required for a test case
|
# this was required for a test case
|
||||||
if isinstance(var_args[key], PosixPath) or isinstance(var_args[key], WindowsPath):
|
if isinstance(var_args[key], PosixPath) or isinstance(var_args[key], WindowsPath):
|
||||||
var_args[key] = str(var_args[key])
|
var_args[key] = str(var_args[key])
|
||||||
|
if isinstance(var_args[key], WarnErrorOptions):
|
||||||
|
var_args[key] = var_args[key].to_dict()
|
||||||
|
|
||||||
dict_args[key] = var_args[key]
|
dict_args[key] = var_args[key]
|
||||||
return dict_args
|
return dict_args
|
||||||
|
|
||||||
|
|||||||
@@ -59,8 +59,7 @@ setup(
|
|||||||
# ----
|
# ----
|
||||||
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
|
# dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility
|
||||||
# with major versions in each new minor version of dbt-core.
|
# with major versions in each new minor version of dbt-core.
|
||||||
# temporarily pinning click for mypy failures: https://github.com/pallets/click/issues/2558
|
"click<9",
|
||||||
"click>=8.1.1,<8.1.4",
|
|
||||||
"networkx>=2.3,<4",
|
"networkx>=2.3,<4",
|
||||||
# ----
|
# ----
|
||||||
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
# These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes)
|
||||||
@@ -69,16 +68,14 @@ setup(
|
|||||||
"pathspec>=0.9,<0.12",
|
"pathspec>=0.9,<0.12",
|
||||||
"isodate>=0.6,<0.7",
|
"isodate>=0.6,<0.7",
|
||||||
# ----
|
# ----
|
||||||
# There was a pin to below 0.4.4 for a while due to a bug in Ubuntu/sqlparse 0.4.4
|
"sqlparse>=0.2.3,<0.5",
|
||||||
"sqlparse>=0.2.3",
|
|
||||||
# ----
|
# ----
|
||||||
# These are major-version-0 packages also maintained by dbt-labs. Accept patches.
|
# These are major-version-0 packages also maintained by dbt-labs. Accept patches.
|
||||||
"dbt-extractor~=0.4.1",
|
"dbt-extractor~=0.5.0",
|
||||||
"hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema
|
"hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema
|
||||||
"minimal-snowplow-tracker~=0.0.2",
|
"minimal-snowplow-tracker~=0.0.2",
|
||||||
# DSI is under active development, so we're pinning to specific dev versions for now.
|
# DSI is under active development, so we're pinning to specific dev versions for now.
|
||||||
# TODO: Before RC/final release, update to use ~= pinning.
|
"dbt-semantic-interfaces~=0.2.0",
|
||||||
"dbt-semantic-interfaces~=0.1.0rc1",
|
|
||||||
# ----
|
# ----
|
||||||
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
# Expect compatibility with all new versions of these packages, so lower bounds only.
|
||||||
"packaging>20.9",
|
"packaging>20.9",
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ flake8
|
|||||||
flaky
|
flaky
|
||||||
freezegun==0.3.12
|
freezegun==0.3.12
|
||||||
ipdb
|
ipdb
|
||||||
mypy==1.3.0
|
mypy==1.4.1
|
||||||
pip-tools
|
pip-tools
|
||||||
pre-commit
|
pre-commit
|
||||||
protobuf>=4.0.0
|
protobuf>=4.0.0
|
||||||
@@ -16,6 +16,7 @@ pytest-csv
|
|||||||
pytest-dotenv
|
pytest-dotenv
|
||||||
pytest-logbook
|
pytest-logbook
|
||||||
pytest-mock
|
pytest-mock
|
||||||
|
pytest-split
|
||||||
pytest-xdist
|
pytest-xdist
|
||||||
sphinx
|
sphinx
|
||||||
tox>=3.13
|
tox>=3.13
|
||||||
|
|||||||
@@ -20,8 +20,7 @@ from dbt.exceptions import (
|
|||||||
import dbt.utils
|
import dbt.utils
|
||||||
|
|
||||||
|
|
||||||
# note that this isn't an adapter macro, so just a single underscore
|
GET_RELATIONS_MACRO_NAME = "postgres__get_relations"
|
||||||
GET_RELATIONS_MACRO_NAME = "postgres_get_relations"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{% macro postgres_get_relations () -%}
|
{% macro postgres__get_relations() -%}
|
||||||
|
|
||||||
{#
|
{#
|
||||||
-- in pg_depend, objid is the dependent, refobjid is the referenced object
|
-- in pg_depend, objid is the dependent, refobjid is the referenced object
|
||||||
@@ -74,3 +74,7 @@
|
|||||||
|
|
||||||
{{ return(load_result('relations').table) }}
|
{{ return(load_result('relations').table) }}
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro postgres_get_relations() %}
|
||||||
|
{{ return(postgres__get_relations()) }}
|
||||||
|
{% endmacro %}
|
||||||
|
|||||||
@@ -34,6 +34,10 @@ macros__schema_test = """
|
|||||||
models__downstream_from_seed_actual = """
|
models__downstream_from_seed_actual = """
|
||||||
select * from {{ ref('seed_actual') }}
|
select * from {{ ref('seed_actual') }}
|
||||||
|
|
||||||
|
"""
|
||||||
|
models__downstream_from_seed_pipe_separated = """
|
||||||
|
select * from {{ ref('seed_pipe_separated') }}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
models__from_basic_seed = """
|
models__from_basic_seed = """
|
||||||
select * from {{ this.schema }}.seed_expected
|
select * from {{ this.schema }}.seed_expected
|
||||||
|
|||||||
@@ -509,6 +509,511 @@ seed_id,first_name,email,ip_address,birthday
|
|||||||
500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20
|
500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20
|
||||||
""".lstrip()
|
""".lstrip()
|
||||||
|
|
||||||
|
seeds__pipe_separated_csv = """
|
||||||
|
seed_id|first_name|email|ip_address|birthday
|
||||||
|
1|Larry|lking0@miitbeian.gov.cn|69.135.206.194|2008-09-12 19:08:31
|
||||||
|
2|Larry|lperkins1@toplist.cz|64.210.133.162|1978-05-09 04:15:14
|
||||||
|
3|Anna|amontgomery2@miitbeian.gov.cn|168.104.64.114|2011-10-16 04:07:57
|
||||||
|
4|Sandra|sgeorge3@livejournal.com|229.235.252.98|1973-07-19 10:52:43
|
||||||
|
5|Fred|fwoods4@google.cn|78.229.170.124|2012-09-30 16:38:29
|
||||||
|
6|Stephen|shanson5@livejournal.com|182.227.157.105|1995-11-07 21:40:50
|
||||||
|
7|William|wmartinez6@upenn.edu|135.139.249.50|1982-09-05 03:11:59
|
||||||
|
8|Jessica|jlong7@hao123.com|203.62.178.210|1991-10-16 11:03:15
|
||||||
|
9|Douglas|dwhite8@tamu.edu|178.187.247.1|1979-10-01 09:49:48
|
||||||
|
10|Lisa|lcoleman9@nydailynews.com|168.234.128.249|2011-05-26 07:45:49
|
||||||
|
11|Ralph|rfieldsa@home.pl|55.152.163.149|1972-11-18 19:06:11
|
||||||
|
12|Louise|lnicholsb@samsung.com|141.116.153.154|2014-11-25 20:56:14
|
||||||
|
13|Clarence|cduncanc@sfgate.com|81.171.31.133|2011-11-17 07:02:36
|
||||||
|
14|Daniel|dfranklind@omniture.com|8.204.211.37|1980-09-13 00:09:04
|
||||||
|
15|Katherine|klanee@auda.org.au|176.96.134.59|1997-08-22 19:36:56
|
||||||
|
16|Billy|bwardf@wikia.com|214.108.78.85|2003-10-19 02:14:47
|
||||||
|
17|Annie|agarzag@ocn.ne.jp|190.108.42.70|1988-10-28 15:12:35
|
||||||
|
18|Shirley|scolemanh@fastcompany.com|109.251.164.84|1988-08-24 10:50:57
|
||||||
|
19|Roger|rfrazieri@scribd.com|38.145.218.108|1985-12-31 15:17:15
|
||||||
|
20|Lillian|lstanleyj@goodreads.com|47.57.236.17|1970-06-08 02:09:05
|
||||||
|
21|Aaron|arodriguezk@nps.gov|205.245.118.221|1985-10-11 23:07:49
|
||||||
|
22|Patrick|pparkerl@techcrunch.com|19.8.100.182|2006-03-29 12:53:56
|
||||||
|
23|Phillip|pmorenom@intel.com|41.38.254.103|2011-11-07 15:35:43
|
||||||
|
24|Henry|hgarcian@newsvine.com|1.191.216.252|2008-08-28 08:30:44
|
||||||
|
25|Irene|iturnero@opera.com|50.17.60.190|1994-04-01 07:15:02
|
||||||
|
26|Andrew|adunnp@pen.io|123.52.253.176|2000-11-01 06:03:25
|
||||||
|
27|David|dgutierrezq@wp.com|238.23.203.42|1988-01-25 07:29:18
|
||||||
|
28|Henry|hsanchezr@cyberchimps.com|248.102.2.185|1983-01-01 13:36:37
|
||||||
|
29|Evelyn|epetersons@gizmodo.com|32.80.46.119|1979-07-16 17:24:12
|
||||||
|
30|Tammy|tmitchellt@purevolume.com|249.246.167.88|2001-04-03 10:00:23
|
||||||
|
31|Jacqueline|jlittleu@domainmarket.com|127.181.97.47|1986-02-11 21:35:50
|
||||||
|
32|Earl|eortizv@opera.com|166.47.248.240|1996-07-06 08:16:27
|
||||||
|
33|Juan|jgordonw@sciencedirect.com|71.77.2.200|1987-01-31 03:46:44
|
||||||
|
34|Diane|dhowellx@nyu.edu|140.94.133.12|1994-06-11 02:30:05
|
||||||
|
35|Randy|rkennedyy@microsoft.com|73.255.34.196|2005-05-26 20:28:39
|
||||||
|
36|Janice|jriveraz@time.com|22.214.227.32|1990-02-09 04:16:52
|
||||||
|
37|Laura|lperry10@diigo.com|159.148.145.73|2015-03-17 05:59:25
|
||||||
|
38|Gary|gray11@statcounter.com|40.193.124.56|1970-01-27 10:04:51
|
||||||
|
39|Jesse|jmcdonald12@typepad.com|31.7.86.103|2009-03-14 08:14:29
|
||||||
|
40|Sandra|sgonzalez13@goodreads.com|223.80.168.239|1993-05-21 14:08:54
|
||||||
|
41|Scott|smoore14@archive.org|38.238.46.83|1980-08-30 11:16:56
|
||||||
|
42|Phillip|pevans15@cisco.com|158.234.59.34|2011-12-15 23:26:31
|
||||||
|
43|Steven|sriley16@google.ca|90.247.57.68|2011-10-29 19:03:28
|
||||||
|
44|Deborah|dbrown17@hexun.com|179.125.143.240|1995-04-10 14:36:07
|
||||||
|
45|Lori|lross18@ow.ly|64.80.162.180|1980-12-27 16:49:15
|
||||||
|
46|Sean|sjackson19@tumblr.com|240.116.183.69|1988-06-12 21:24:45
|
||||||
|
47|Terry|tbarnes1a@163.com|118.38.213.137|1997-09-22 16:43:19
|
||||||
|
48|Dorothy|dross1b@ebay.com|116.81.76.49|2005-02-28 13:33:24
|
||||||
|
49|Samuel|swashington1c@house.gov|38.191.253.40|1989-01-19 21:15:48
|
||||||
|
50|Ralph|rcarter1d@tinyurl.com|104.84.60.174|2007-08-11 10:21:49
|
||||||
|
51|Wayne|whudson1e@princeton.edu|90.61.24.102|1983-07-03 16:58:12
|
||||||
|
52|Rose|rjames1f@plala.or.jp|240.83.81.10|1995-06-08 11:46:23
|
||||||
|
53|Louise|lcox1g@theglobeandmail.com|105.11.82.145|2016-09-19 14:45:51
|
||||||
|
54|Kenneth|kjohnson1h@independent.co.uk|139.5.45.94|1976-08-17 11:26:19
|
||||||
|
55|Donna|dbrown1i@amazon.co.uk|19.45.169.45|2006-05-27 16:51:40
|
||||||
|
56|Johnny|jvasquez1j@trellian.com|118.202.238.23|1975-11-17 08:42:32
|
||||||
|
57|Patrick|pramirez1k@tamu.edu|231.25.153.198|1997-08-06 11:51:09
|
||||||
|
58|Helen|hlarson1l@prweb.com|8.40.21.39|1993-08-04 19:53:40
|
||||||
|
59|Patricia|pspencer1m@gmpg.org|212.198.40.15|1977-08-03 16:37:27
|
||||||
|
60|Joseph|jspencer1n@marriott.com|13.15.63.238|2005-07-23 20:22:06
|
||||||
|
61|Phillip|pschmidt1o@blogtalkradio.com|177.98.201.190|1976-05-19 21:47:44
|
||||||
|
62|Joan|jwebb1p@google.ru|105.229.170.71|1972-09-07 17:53:47
|
||||||
|
63|Phyllis|pkennedy1q@imgur.com|35.145.8.244|2000-01-01 22:33:37
|
||||||
|
64|Katherine|khunter1r@smh.com.au|248.168.205.32|1991-01-09 06:40:24
|
||||||
|
65|Laura|lvasquez1s@wiley.com|128.129.115.152|1997-10-23 12:04:56
|
||||||
|
66|Juan|jdunn1t@state.gov|44.228.124.51|2004-11-10 05:07:35
|
||||||
|
67|Judith|jholmes1u@wiley.com|40.227.179.115|1977-08-02 17:01:45
|
||||||
|
68|Beverly|bbaker1v@wufoo.com|208.34.84.59|2016-03-06 20:07:23
|
||||||
|
69|Lawrence|lcarr1w@flickr.com|59.158.212.223|1988-09-13 06:07:21
|
||||||
|
70|Gloria|gwilliams1x@mtv.com|245.231.88.33|1995-03-18 22:32:46
|
||||||
|
71|Steven|ssims1y@cbslocal.com|104.50.58.255|2001-08-05 21:26:20
|
||||||
|
72|Betty|bmills1z@arstechnica.com|103.177.214.220|1981-12-14 21:26:54
|
||||||
|
73|Mildred|mfuller20@prnewswire.com|151.158.8.130|2000-04-19 10:13:55
|
||||||
|
74|Donald|dday21@icq.com|9.178.102.255|1972-12-03 00:58:24
|
||||||
|
75|Eric|ethomas22@addtoany.com|85.2.241.227|1992-11-01 05:59:30
|
||||||
|
76|Joyce|jarmstrong23@sitemeter.com|169.224.20.36|1985-10-24 06:50:01
|
||||||
|
77|Maria|mmartinez24@amazonaws.com|143.189.167.135|2005-10-05 05:17:42
|
||||||
|
78|Harry|hburton25@youtube.com|156.47.176.237|1978-03-26 05:53:33
|
||||||
|
79|Kevin|klawrence26@hao123.com|79.136.183.83|1994-10-12 04:38:52
|
||||||
|
80|David|dhall27@prweb.com|133.149.172.153|1976-12-15 16:24:24
|
||||||
|
81|Kathy|kperry28@twitter.com|229.242.72.228|1979-03-04 02:58:56
|
||||||
|
82|Adam|aprice29@elegantthemes.com|13.145.21.10|1982-11-07 11:46:59
|
||||||
|
83|Brandon|bgriffin2a@va.gov|73.249.128.212|2013-10-30 05:30:36
|
||||||
|
84|Henry|hnguyen2b@discovery.com|211.36.214.242|1985-01-09 06:37:27
|
||||||
|
85|Eric|esanchez2c@edublogs.org|191.166.188.251|2004-05-01 23:21:42
|
||||||
|
86|Jason|jlee2d@jimdo.com|193.92.16.182|1973-01-08 09:05:39
|
||||||
|
87|Diana|drichards2e@istockphoto.com|19.130.175.245|1994-10-05 22:50:49
|
||||||
|
88|Andrea|awelch2f@abc.net.au|94.155.233.96|2002-04-26 08:41:44
|
||||||
|
89|Louis|lwagner2g@miitbeian.gov.cn|26.217.34.111|2003-08-25 07:56:39
|
||||||
|
90|Jane|jsims2h@seesaa.net|43.4.220.135|1987-03-20 20:39:04
|
||||||
|
91|Larry|lgrant2i@si.edu|97.126.79.34|2000-09-07 20:26:19
|
||||||
|
92|Louis|ldean2j@prnewswire.com|37.148.40.127|2011-09-16 20:12:14
|
||||||
|
93|Jennifer|jcampbell2k@xing.com|38.106.254.142|1988-07-15 05:06:49
|
||||||
|
94|Wayne|wcunningham2l@google.com.hk|223.28.26.187|2009-12-15 06:16:54
|
||||||
|
95|Lori|lstevens2m@icq.com|181.250.181.58|1984-10-28 03:29:19
|
||||||
|
96|Judy|jsimpson2n@marriott.com|180.121.239.219|1986-02-07 15:18:10
|
||||||
|
97|Phillip|phoward2o@usa.gov|255.247.0.175|2002-12-26 08:44:45
|
||||||
|
98|Gloria|gwalker2p@usa.gov|156.140.7.128|1997-10-04 07:58:58
|
||||||
|
99|Paul|pjohnson2q@umn.edu|183.59.198.197|1991-11-14 12:33:55
|
||||||
|
100|Frank|fgreene2r@blogspot.com|150.143.68.121|2010-06-12 23:55:39
|
||||||
|
101|Deborah|dknight2s@reverbnation.com|222.131.211.191|1970-07-08 08:54:23
|
||||||
|
102|Sandra|sblack2t@tripadvisor.com|254.183.128.254|2000-04-12 02:39:36
|
||||||
|
103|Edward|eburns2u@dailymotion.com|253.89.118.18|1993-10-10 10:54:01
|
||||||
|
104|Anthony|ayoung2v@ustream.tv|118.4.193.176|1978-08-26 17:07:29
|
||||||
|
105|Donald|dlawrence2w@wp.com|139.200.159.227|2007-07-21 20:56:20
|
||||||
|
106|Matthew|mfreeman2x@google.fr|205.26.239.92|2014-12-05 17:05:39
|
||||||
|
107|Sean|ssanders2y@trellian.com|143.89.82.108|1993-07-14 21:45:02
|
||||||
|
108|Sharon|srobinson2z@soundcloud.com|66.234.247.54|1977-04-06 19:07:03
|
||||||
|
109|Jennifer|jwatson30@t-online.de|196.102.127.7|1998-03-07 05:12:23
|
||||||
|
110|Clarence|cbrooks31@si.edu|218.93.234.73|2002-11-06 17:22:25
|
||||||
|
111|Jose|jflores32@goo.gl|185.105.244.231|1995-01-05 06:32:21
|
||||||
|
112|George|glee33@adobe.com|173.82.249.196|2015-01-04 02:47:46
|
||||||
|
113|Larry|lhill34@linkedin.com|66.5.206.195|2010-11-02 10:21:17
|
||||||
|
114|Marie|mmeyer35@mysql.com|151.152.88.107|1990-05-22 20:52:51
|
||||||
|
115|Clarence|cwebb36@skype.com|130.198.55.217|1972-10-27 07:38:54
|
||||||
|
116|Sarah|scarter37@answers.com|80.89.18.153|1971-08-24 19:29:30
|
||||||
|
117|Henry|hhughes38@webeden.co.uk|152.60.114.174|1973-01-27 09:00:42
|
||||||
|
118|Teresa|thenry39@hao123.com|32.187.239.106|2015-11-06 01:48:44
|
||||||
|
119|Billy|bgutierrez3a@sun.com|52.37.70.134|2002-03-19 03:20:19
|
||||||
|
120|Anthony|agibson3b@github.io|154.251.232.213|1991-04-19 01:08:15
|
||||||
|
121|Sandra|sromero3c@wikia.com|44.124.171.2|1998-09-06 20:30:34
|
||||||
|
122|Paula|pandrews3d@blogs.com|153.142.118.226|2003-06-24 16:31:24
|
||||||
|
123|Terry|tbaker3e@csmonitor.com|99.120.45.219|1970-12-09 23:57:21
|
||||||
|
124|Lois|lwilson3f@reuters.com|147.44.171.83|1971-01-09 22:28:51
|
||||||
|
125|Sara|smorgan3g@nature.com|197.67.192.230|1992-01-28 20:33:24
|
||||||
|
126|Charles|ctorres3h@china.com.cn|156.115.216.2|1993-10-02 19:36:34
|
||||||
|
127|Richard|ralexander3i@marriott.com|248.235.180.59|1999-02-03 18:40:55
|
||||||
|
128|Christina|charper3j@cocolog-nifty.com|152.114.116.129|1978-09-13 00:37:32
|
||||||
|
129|Steve|sadams3k@economist.com|112.248.91.98|2004-03-21 09:07:43
|
||||||
|
130|Katherine|krobertson3l@ow.ly|37.220.107.28|1977-03-18 19:28:50
|
||||||
|
131|Donna|dgibson3m@state.gov|222.218.76.221|1999-02-01 06:46:16
|
||||||
|
132|Christina|cwest3n@mlb.com|152.114.6.160|1979-12-24 15:30:35
|
||||||
|
133|Sandra|swillis3o@meetup.com|180.71.49.34|1984-09-27 08:05:54
|
||||||
|
134|Clarence|cedwards3p@smugmug.com|10.64.180.186|1979-04-16 16:52:10
|
||||||
|
135|Ruby|rjames3q@wp.com|98.61.54.20|2007-01-13 14:25:52
|
||||||
|
136|Sarah|smontgomery3r@tripod.com|91.45.164.172|2009-07-25 04:34:30
|
||||||
|
137|Sarah|soliver3s@eventbrite.com|30.106.39.146|2012-05-09 22:12:33
|
||||||
|
138|Deborah|dwheeler3t@biblegateway.com|59.105.213.173|1999-11-09 08:08:44
|
||||||
|
139|Deborah|dray3u@i2i.jp|11.108.186.217|2014-02-04 03:15:19
|
||||||
|
140|Paul|parmstrong3v@alexa.com|6.250.59.43|2009-12-21 10:08:53
|
||||||
|
141|Aaron|abishop3w@opera.com|207.145.249.62|1996-04-25 23:20:23
|
||||||
|
142|Henry|hsanders3x@google.ru|140.215.203.171|2012-01-29 11:52:32
|
||||||
|
143|Anne|aanderson3y@1688.com|74.150.102.118|1982-04-03 13:46:17
|
||||||
|
144|Victor|vmurphy3z@hugedomains.com|222.155.99.152|1987-11-03 19:58:41
|
||||||
|
145|Evelyn|ereid40@pbs.org|249.122.33.117|1977-12-14 17:09:57
|
||||||
|
146|Brian|bgonzalez41@wikia.com|246.254.235.141|1991-02-24 00:45:58
|
||||||
|
147|Sandra|sgray42@squarespace.com|150.73.28.159|1972-07-28 17:26:32
|
||||||
|
148|Alice|ajones43@a8.net|78.253.12.177|2002-12-05 16:57:46
|
||||||
|
149|Jessica|jhanson44@mapquest.com|87.229.30.160|1994-01-30 11:40:04
|
||||||
|
150|Louise|lbailey45@reuters.com|191.219.31.101|2011-09-07 21:11:45
|
||||||
|
151|Christopher|cgonzalez46@printfriendly.com|83.137.213.239|1984-10-24 14:58:04
|
||||||
|
152|Gregory|gcollins47@yandex.ru|28.176.10.115|1998-07-25 17:17:10
|
||||||
|
153|Jane|jperkins48@usnews.com|46.53.164.159|1979-08-19 15:25:00
|
||||||
|
154|Phyllis|plong49@yahoo.co.jp|208.140.88.2|1985-07-06 02:16:36
|
||||||
|
155|Adam|acarter4a@scribd.com|78.48.148.204|2005-07-20 03:31:09
|
||||||
|
156|Frank|fweaver4b@angelfire.com|199.180.255.224|2011-03-04 23:07:54
|
||||||
|
157|Ronald|rmurphy4c@cloudflare.com|73.42.97.231|1991-01-11 10:39:41
|
||||||
|
158|Richard|rmorris4d@e-recht24.de|91.9.97.223|2009-01-17 21:05:15
|
||||||
|
159|Rose|rfoster4e@woothemes.com|203.169.53.16|1991-04-21 02:09:38
|
||||||
|
160|George|ggarrett4f@uiuc.edu|186.61.5.167|1989-11-11 11:29:42
|
||||||
|
161|Victor|vhamilton4g@biblegateway.com|121.229.138.38|2012-06-22 18:01:23
|
||||||
|
162|Mark|mbennett4h@businessinsider.com|209.184.29.203|1980-04-16 15:26:34
|
||||||
|
163|Martin|mwells4i@ifeng.com|97.223.55.105|2010-05-26 14:08:18
|
||||||
|
164|Diana|dstone4j@google.ru|90.155.52.47|2013-02-11 00:14:54
|
||||||
|
165|Walter|wferguson4k@blogger.com|30.63.212.44|1986-02-20 17:46:46
|
||||||
|
166|Denise|dcoleman4l@vistaprint.com|10.209.153.77|1992-05-13 20:14:14
|
||||||
|
167|Philip|pknight4m@xing.com|15.28.135.167|2000-09-11 18:41:13
|
||||||
|
168|Russell|rcarr4n@youtube.com|113.55.165.50|2008-07-10 17:49:27
|
||||||
|
169|Donna|dburke4o@dion.ne.jp|70.0.105.111|1992-02-10 17:24:58
|
||||||
|
170|Anne|along4p@squidoo.com|36.154.58.107|2012-08-19 23:35:31
|
||||||
|
171|Clarence|cbanks4q@webeden.co.uk|94.57.53.114|1972-03-11 21:46:44
|
||||||
|
172|Betty|bbowman4r@cyberchimps.com|178.115.209.69|2013-01-13 21:34:51
|
||||||
|
173|Andrew|ahudson4s@nytimes.com|84.32.252.144|1998-09-15 14:20:04
|
||||||
|
174|Keith|kgordon4t@cam.ac.uk|189.237.211.102|2009-01-22 05:34:38
|
||||||
|
175|Patrick|pwheeler4u@mysql.com|47.22.117.226|1984-09-05 22:33:15
|
||||||
|
176|Jesse|jfoster4v@mapquest.com|229.95.131.46|1990-01-20 12:19:15
|
||||||
|
177|Arthur|afisher4w@jugem.jp|107.255.244.98|1983-10-13 11:08:46
|
||||||
|
178|Nicole|nryan4x@wsj.com|243.211.33.221|1974-05-30 23:19:14
|
||||||
|
179|Bruce|bjohnson4y@sfgate.com|17.41.200.101|1992-09-23 02:02:19
|
||||||
|
180|Terry|tcox4z@reference.com|20.189.120.106|1982-02-13 12:43:14
|
||||||
|
181|Ashley|astanley50@kickstarter.com|86.3.56.98|1976-05-09 01:27:16
|
||||||
|
182|Michael|mrivera51@about.me|72.118.249.0|1971-11-11 17:28:37
|
||||||
|
183|Steven|sgonzalez52@mozilla.org|169.112.247.47|2002-08-24 14:59:25
|
||||||
|
184|Kathleen|kfuller53@bloglovin.com|80.93.59.30|2002-03-11 13:41:29
|
||||||
|
185|Nicole|nhenderson54@usda.gov|39.253.60.30|1995-04-24 05:55:07
|
||||||
|
186|Ralph|rharper55@purevolume.com|167.147.142.189|1980-02-10 18:35:45
|
||||||
|
187|Heather|hcunningham56@photobucket.com|96.222.196.229|2007-06-15 05:37:50
|
||||||
|
188|Nancy|nlittle57@cbc.ca|241.53.255.175|2007-07-12 23:42:48
|
||||||
|
189|Juan|jramirez58@pinterest.com|190.128.84.27|1978-11-07 23:37:37
|
||||||
|
190|Beverly|bfowler59@chronoengine.com|54.144.230.49|1979-03-31 23:27:28
|
||||||
|
191|Shirley|sstevens5a@prlog.org|200.97.231.248|2011-12-06 07:08:50
|
||||||
|
192|Annie|areyes5b@squidoo.com|223.32.182.101|2011-05-28 02:42:09
|
||||||
|
193|Jack|jkelley5c@tiny.cc|47.34.118.150|1981-12-05 17:31:40
|
||||||
|
194|Keith|krobinson5d@1und1.de|170.210.209.31|1999-03-09 11:05:43
|
||||||
|
195|Joseph|jmiller5e@google.com.au|136.74.212.139|1984-10-08 13:18:20
|
||||||
|
196|Annie|aday5f@blogspot.com|71.99.186.69|1986-02-18 12:27:34
|
||||||
|
197|Nancy|nperez5g@liveinternet.ru|28.160.6.107|1983-10-20 17:51:20
|
||||||
|
198|Tammy|tward5h@ucoz.ru|141.43.164.70|1980-03-31 04:45:29
|
||||||
|
199|Doris|dryan5i@ted.com|239.117.202.188|1985-07-03 03:17:53
|
||||||
|
200|Rose|rmendoza5j@photobucket.com|150.200.206.79|1973-04-21 21:36:40
|
||||||
|
201|Cynthia|cbutler5k@hubpages.com|80.153.174.161|2001-01-20 01:42:26
|
||||||
|
202|Samuel|soliver5l@people.com.cn|86.127.246.140|1970-09-02 02:19:00
|
||||||
|
203|Carl|csanchez5m@mysql.com|50.149.237.107|1993-12-01 07:02:09
|
||||||
|
204|Kathryn|kowens5n@geocities.jp|145.166.205.201|2004-07-06 18:39:33
|
||||||
|
205|Nicholas|nnichols5o@parallels.com|190.240.66.170|2014-11-11 18:52:19
|
||||||
|
206|Keith|kwillis5p@youtube.com|181.43.206.100|1998-06-13 06:30:51
|
||||||
|
207|Justin|jwebb5q@intel.com|211.54.245.74|2000-11-04 16:58:26
|
||||||
|
208|Gary|ghicks5r@wikipedia.org|196.154.213.104|1992-12-01 19:48:28
|
||||||
|
209|Martin|mpowell5s@flickr.com|153.67.12.241|1983-06-30 06:24:32
|
||||||
|
210|Brenda|bkelley5t@xinhuanet.com|113.100.5.172|2005-01-08 20:50:22
|
||||||
|
211|Edward|eray5u@a8.net|205.187.246.65|2011-09-26 08:04:44
|
||||||
|
212|Steven|slawson5v@senate.gov|238.150.250.36|1978-11-22 02:48:09
|
||||||
|
213|Robert|rthompson5w@furl.net|70.7.89.236|2001-09-12 08:52:07
|
||||||
|
214|Jack|jporter5x@diigo.com|220.172.29.99|1976-07-26 14:29:21
|
||||||
|
215|Lisa|ljenkins5y@oakley.com|150.151.170.180|2010-03-20 19:21:16
|
||||||
|
216|Theresa|tbell5z@mayoclinic.com|247.25.53.173|2001-03-11 05:36:40
|
||||||
|
217|Jimmy|jstephens60@weather.com|145.101.93.235|1983-04-12 09:35:30
|
||||||
|
218|Louis|lhunt61@amazon.co.jp|78.137.6.253|1997-08-29 19:34:34
|
||||||
|
219|Lawrence|lgilbert62@ted.com|243.132.8.78|2015-04-08 22:06:56
|
||||||
|
220|David|dgardner63@4shared.com|204.40.46.136|1971-07-09 03:29:11
|
||||||
|
221|Charles|ckennedy64@gmpg.org|211.83.233.2|2011-02-26 11:55:04
|
||||||
|
222|Lillian|lbanks65@msu.edu|124.233.12.80|2010-05-16 20:29:02
|
||||||
|
223|Ernest|enguyen66@baidu.com|82.45.128.148|1996-07-04 10:07:04
|
||||||
|
224|Ryan|rrussell67@cloudflare.com|202.53.240.223|1983-08-05 12:36:29
|
||||||
|
225|Donald|ddavis68@ustream.tv|47.39.218.137|1989-05-27 02:30:56
|
||||||
|
226|Joe|jscott69@blogspot.com|140.23.131.75|1973-03-16 12:21:31
|
||||||
|
227|Anne|amarshall6a@google.ca|113.162.200.197|1988-12-09 03:38:29
|
||||||
|
228|Willie|wturner6b@constantcontact.com|85.83.182.249|1991-10-06 01:51:10
|
||||||
|
229|Nicole|nwilson6c@sogou.com|30.223.51.135|1977-05-29 19:54:56
|
||||||
|
230|Janet|jwheeler6d@stumbleupon.com|153.194.27.144|2011-03-13 12:48:47
|
||||||
|
231|Lois|lcarr6e@statcounter.com|0.41.36.53|1993-02-06 04:52:01
|
||||||
|
232|Shirley|scruz6f@tmall.com|37.156.39.223|2007-02-18 17:47:01
|
||||||
|
233|Patrick|pford6g@reverbnation.com|36.198.200.89|1977-03-06 15:47:24
|
||||||
|
234|Lisa|lhudson6h@usatoday.com|134.213.58.137|2014-10-28 01:56:56
|
||||||
|
235|Pamela|pmartinez6i@opensource.org|5.151.127.202|1987-11-30 16:44:47
|
||||||
|
236|Larry|lperez6j@infoseek.co.jp|235.122.96.148|1979-01-18 06:33:45
|
||||||
|
237|Pamela|pramirez6k@census.gov|138.233.34.163|2012-01-29 10:35:20
|
||||||
|
238|Daniel|dcarr6l@php.net|146.21.152.242|1984-11-17 08:22:59
|
||||||
|
239|Patrick|psmith6m@indiegogo.com|136.222.199.36|2001-05-30 22:16:44
|
||||||
|
240|Raymond|rhenderson6n@hc360.com|116.31.112.38|2000-01-05 20:35:41
|
||||||
|
241|Teresa|treynolds6o@miitbeian.gov.cn|198.126.205.220|1996-11-08 01:27:31
|
||||||
|
242|Johnny|jmason6p@flickr.com|192.8.232.114|2013-05-14 05:35:50
|
||||||
|
243|Angela|akelly6q@guardian.co.uk|234.116.60.197|1977-08-20 02:05:17
|
||||||
|
244|Douglas|dcole6r@cmu.edu|128.135.212.69|2016-10-26 17:40:36
|
||||||
|
245|Frances|fcampbell6s@twitpic.com|94.22.243.235|1987-04-26 07:07:13
|
||||||
|
246|Donna|dgreen6t@chron.com|227.116.46.107|2011-07-25 12:59:54
|
||||||
|
247|Benjamin|bfranklin6u@redcross.org|89.141.142.89|1974-05-03 20:28:18
|
||||||
|
248|Randy|rpalmer6v@rambler.ru|70.173.63.178|2011-12-20 17:40:18
|
||||||
|
249|Melissa|mmurray6w@bbb.org|114.234.118.137|1991-02-26 12:45:44
|
||||||
|
250|Jean|jlittle6x@epa.gov|141.21.163.254|1991-08-16 04:57:09
|
||||||
|
251|Daniel|dolson6y@nature.com|125.75.104.97|2010-04-23 06:25:54
|
||||||
|
252|Kathryn|kwells6z@eventbrite.com|225.104.28.249|2015-01-31 02:21:50
|
||||||
|
253|Theresa|tgonzalez70@ox.ac.uk|91.93.156.26|1971-12-11 10:31:31
|
||||||
|
254|Beverly|broberts71@bluehost.com|244.40.158.89|2013-09-21 13:02:31
|
||||||
|
255|Pamela|pmurray72@netscape.com|218.54.95.216|1985-04-16 00:34:00
|
||||||
|
256|Timothy|trichardson73@amazonaws.com|235.49.24.229|2000-11-11 09:48:28
|
||||||
|
257|Mildred|mpalmer74@is.gd|234.125.95.132|1992-05-25 02:25:02
|
||||||
|
258|Jessica|jcampbell75@google.it|55.98.30.140|2014-08-26 00:26:34
|
||||||
|
259|Beverly|bthomas76@cpanel.net|48.78.228.176|1970-08-18 10:40:05
|
||||||
|
260|Eugene|eward77@cargocollective.com|139.226.204.2|1996-12-04 23:17:00
|
||||||
|
261|Andrea|aallen78@webnode.com|160.31.214.38|2009-07-06 07:22:37
|
||||||
|
262|Justin|jruiz79@merriam-webster.com|150.149.246.122|2005-06-06 11:44:19
|
||||||
|
263|Kenneth|kedwards7a@networksolutions.com|98.82.193.128|2001-07-03 02:00:10
|
||||||
|
264|Rachel|rday7b@miibeian.gov.cn|114.15.247.221|1994-08-18 19:45:40
|
||||||
|
265|Russell|rmiller7c@instagram.com|184.130.152.253|1977-11-06 01:58:12
|
||||||
|
266|Bonnie|bhudson7d@cornell.edu|235.180.186.206|1990-12-03 22:45:24
|
||||||
|
267|Raymond|rknight7e@yandex.ru|161.2.44.252|1995-08-25 04:31:19
|
||||||
|
268|Bonnie|brussell7f@elpais.com|199.237.57.207|1991-03-29 08:32:06
|
||||||
|
269|Marie|mhenderson7g@elpais.com|52.203.131.144|2004-06-04 21:50:28
|
||||||
|
270|Alan|acarr7h@trellian.com|147.51.205.72|2005-03-03 10:51:31
|
||||||
|
271|Barbara|bturner7i@hugedomains.com|103.160.110.226|2004-08-04 13:42:40
|
||||||
|
272|Christina|cdaniels7j@census.gov|0.238.61.251|1972-10-18 12:47:33
|
||||||
|
273|Jeremy|jgomez7k@reuters.com|111.26.65.56|2013-01-13 10:41:35
|
||||||
|
274|Laura|lwood7l@icio.us|149.153.38.205|2011-06-25 09:33:59
|
||||||
|
275|Matthew|mbowman7m@auda.org.au|182.138.206.172|1999-03-05 03:25:36
|
||||||
|
276|Denise|dparker7n@icq.com|0.213.88.138|2011-11-04 09:43:06
|
||||||
|
277|Phillip|pparker7o@discuz.net|219.242.165.240|1973-10-19 04:22:29
|
||||||
|
278|Joan|jpierce7p@salon.com|63.31.213.202|1989-04-09 22:06:24
|
||||||
|
279|Irene|ibaker7q@cbc.ca|102.33.235.114|1992-09-04 13:00:57
|
||||||
|
280|Betty|bbowman7r@ted.com|170.91.249.242|2015-09-28 08:14:22
|
||||||
|
281|Teresa|truiz7s@boston.com|82.108.158.207|1999-07-18 05:17:09
|
||||||
|
282|Helen|hbrooks7t@slideshare.net|102.87.162.187|2003-01-06 15:45:29
|
||||||
|
283|Karen|kgriffin7u@wunderground.com|43.82.44.184|2010-05-28 01:56:37
|
||||||
|
284|Lisa|lfernandez7v@mtv.com|200.238.218.220|1993-04-03 20:33:51
|
||||||
|
285|Jesse|jlawrence7w@timesonline.co.uk|95.122.105.78|1990-01-05 17:28:43
|
||||||
|
286|Terry|tross7x@macromedia.com|29.112.114.133|2009-08-29 21:32:17
|
||||||
|
287|Angela|abradley7y@icq.com|177.44.27.72|1989-10-04 21:46:06
|
||||||
|
288|Maria|mhart7z@dailymotion.com|55.27.55.202|1975-01-21 01:22:57
|
||||||
|
289|Raymond|randrews80@pinterest.com|88.90.78.67|1992-03-16 21:37:40
|
||||||
|
290|Kathy|krice81@bluehost.com|212.63.196.102|2000-12-14 03:06:44
|
||||||
|
291|Cynthia|cramos82@nymag.com|107.89.190.6|2005-06-28 02:02:33
|
||||||
|
292|Kimberly|kjones83@mysql.com|86.169.101.101|2007-06-13 22:56:49
|
||||||
|
293|Timothy|thansen84@microsoft.com|108.100.254.90|2003-04-04 10:31:57
|
||||||
|
294|Carol|cspencer85@berkeley.edu|75.118.144.187|1999-03-30 14:53:21
|
||||||
|
295|Louis|lmedina86@latimes.com|141.147.163.24|1991-04-11 17:53:13
|
||||||
|
296|Margaret|mcole87@google.fr|53.184.26.83|1991-12-19 01:54:10
|
||||||
|
297|Mary|mgomez88@yellowpages.com|208.56.57.99|1976-05-21 18:05:08
|
||||||
|
298|Amanda|aanderson89@geocities.com|147.73.15.252|1987-08-22 15:05:28
|
||||||
|
299|Kathryn|kgarrett8a@nature.com|27.29.177.220|1976-07-15 04:25:04
|
||||||
|
300|Dorothy|dmason8b@shareasale.com|106.210.99.193|1990-09-03 21:39:31
|
||||||
|
301|Lois|lkennedy8c@amazon.de|194.169.29.187|2007-07-29 14:09:31
|
||||||
|
302|Irene|iburton8d@washingtonpost.com|196.143.110.249|2013-09-05 11:32:46
|
||||||
|
303|Betty|belliott8e@wired.com|183.105.222.199|1979-09-19 19:29:13
|
||||||
|
304|Bobby|bmeyer8f@census.gov|36.13.161.145|2014-05-24 14:34:39
|
||||||
|
305|Ann|amorrison8g@sfgate.com|72.154.54.137|1978-10-05 14:22:34
|
||||||
|
306|Daniel|djackson8h@wunderground.com|144.95.32.34|1990-07-27 13:23:05
|
||||||
|
307|Joe|jboyd8i@alibaba.com|187.105.86.178|2011-09-28 16:46:32
|
||||||
|
308|Ralph|rdunn8j@fc2.com|3.19.87.255|1984-10-18 08:00:40
|
||||||
|
309|Craig|ccarter8k@gizmodo.com|235.152.76.215|1998-07-04 12:15:21
|
||||||
|
310|Paula|pdean8l@hhs.gov|161.100.173.197|1973-02-13 09:38:55
|
||||||
|
311|Andrew|agarrett8m@behance.net|199.253.123.218|1991-02-14 13:36:32
|
||||||
|
312|Janet|jhowell8n@alexa.com|39.189.139.79|2012-11-24 20:17:33
|
||||||
|
313|Keith|khansen8o@godaddy.com|116.186.223.196|1987-08-23 21:22:05
|
||||||
|
314|Nicholas|nedwards8p@state.gov|142.175.142.11|1977-03-28 18:27:27
|
||||||
|
315|Jacqueline|jallen8q@oaic.gov.au|189.66.135.192|1994-10-26 11:44:26
|
||||||
|
316|Frank|fgardner8r@mapy.cz|154.77.119.169|1983-01-29 19:19:51
|
||||||
|
317|Eric|eharrison8s@google.cn|245.139.65.123|1984-02-04 09:54:36
|
||||||
|
318|Gregory|gcooper8t@go.com|171.147.0.221|2004-06-14 05:22:08
|
||||||
|
319|Jean|jfreeman8u@rakuten.co.jp|67.243.121.5|1977-01-07 18:23:43
|
||||||
|
320|Juan|jlewis8v@shinystat.com|216.181.171.189|2001-08-23 17:32:43
|
||||||
|
321|Randy|rwilliams8w@shinystat.com|105.152.146.28|1983-02-17 00:05:50
|
||||||
|
322|Stephen|shart8x@sciencedirect.com|196.131.205.148|2004-02-15 10:12:03
|
||||||
|
323|Annie|ahunter8y@example.com|63.36.34.103|2003-07-23 21:15:25
|
||||||
|
324|Melissa|mflores8z@cbc.ca|151.230.217.90|1983-11-02 14:53:56
|
||||||
|
325|Jane|jweaver90@about.me|0.167.235.217|1987-07-29 00:13:44
|
||||||
|
326|Anthony|asmith91@oracle.com|97.87.48.41|2001-05-31 18:44:11
|
||||||
|
327|Terry|tdavis92@buzzfeed.com|46.20.12.51|2015-09-12 23:13:55
|
||||||
|
328|Brandon|bmontgomery93@gravatar.com|252.101.48.186|2010-10-28 08:26:27
|
||||||
|
329|Chris|cmurray94@bluehost.com|25.158.167.97|2004-05-05 16:10:31
|
||||||
|
330|Denise|dfuller95@hugedomains.com|216.210.149.28|1979-04-20 08:57:24
|
||||||
|
331|Arthur|amcdonald96@sakura.ne.jp|206.42.36.213|2009-08-15 03:26:16
|
||||||
|
332|Jesse|jhoward97@google.cn|46.181.118.30|1974-04-18 14:08:41
|
||||||
|
333|Frank|fsimpson98@domainmarket.com|163.220.211.87|2006-06-30 14:46:52
|
||||||
|
334|Janice|jwoods99@pen.io|229.245.237.182|1988-04-06 11:52:58
|
||||||
|
335|Rebecca|rroberts9a@huffingtonpost.com|148.96.15.80|1976-10-05 08:44:16
|
||||||
|
336|Joshua|jray9b@opensource.org|192.253.12.198|1971-12-25 22:27:07
|
||||||
|
337|Joyce|jcarpenter9c@statcounter.com|125.171.46.215|2001-12-31 22:08:13
|
||||||
|
338|Andrea|awest9d@privacy.gov.au|79.101.180.201|1983-02-18 20:07:47
|
||||||
|
339|Christine|chudson9e@yelp.com|64.198.43.56|1997-09-08 08:03:43
|
||||||
|
340|Joe|jparker9f@earthlink.net|251.215.148.153|1973-11-04 05:08:18
|
||||||
|
341|Thomas|tkim9g@answers.com|49.187.34.47|1991-08-07 21:13:48
|
||||||
|
342|Janice|jdean9h@scientificamerican.com|4.197.117.16|2009-12-08 02:35:49
|
||||||
|
343|James|jmitchell9i@umich.edu|43.121.18.147|2011-04-28 17:04:09
|
||||||
|
344|Charles|cgardner9j@purevolume.com|197.78.240.240|1998-02-11 06:47:07
|
||||||
|
345|Robert|rhenderson9k@friendfeed.com|215.84.180.88|2002-05-10 15:33:14
|
||||||
|
346|Chris|cgray9l@4shared.com|249.70.192.240|1998-10-03 16:43:42
|
||||||
|
347|Gloria|ghayes9m@hibu.com|81.103.138.26|1999-12-26 11:23:13
|
||||||
|
348|Edward|eramirez9n@shareasale.com|38.136.90.136|2010-08-19 08:01:06
|
||||||
|
349|Cheryl|cbutler9o@google.ca|172.180.78.172|1995-05-27 20:03:52
|
||||||
|
350|Margaret|mwatkins9p@sfgate.com|3.20.198.6|2014-10-21 01:42:58
|
||||||
|
351|Rebecca|rwelch9q@examiner.com|45.81.42.208|2001-02-08 12:19:06
|
||||||
|
352|Joe|jpalmer9r@phpbb.com|163.202.92.190|1970-01-05 11:29:12
|
||||||
|
353|Sandra|slewis9s@dyndns.org|77.215.201.236|1974-01-05 07:04:04
|
||||||
|
354|Todd|tfranklin9t@g.co|167.125.181.82|2009-09-28 10:13:58
|
||||||
|
355|Joseph|jlewis9u@webmd.com|244.204.6.11|1990-10-21 15:49:57
|
||||||
|
356|Alan|aknight9v@nydailynews.com|152.197.95.83|1996-03-08 08:43:17
|
||||||
|
357|Sharon|sdean9w@123-reg.co.uk|237.46.40.26|1985-11-30 12:09:24
|
||||||
|
358|Annie|awright9x@cafepress.com|190.45.231.111|2000-08-24 11:56:06
|
||||||
|
359|Diane|dhamilton9y@youtube.com|85.146.171.196|2015-02-24 02:03:57
|
||||||
|
360|Antonio|alane9z@auda.org.au|61.63.146.203|2001-05-13 03:43:34
|
||||||
|
361|Matthew|mallena0@hhs.gov|29.97.32.19|1973-02-19 23:43:32
|
||||||
|
362|Bonnie|bfowlera1@soup.io|251.216.99.53|2013-08-01 15:35:41
|
||||||
|
363|Margaret|mgraya2@examiner.com|69.255.151.79|1998-01-23 22:24:59
|
||||||
|
364|Joan|jwagnera3@printfriendly.com|192.166.120.61|1973-07-13 00:30:22
|
||||||
|
365|Catherine|cperkinsa4@nytimes.com|58.21.24.214|2006-11-19 11:52:26
|
||||||
|
366|Mark|mcartera5@cpanel.net|220.33.102.142|2007-09-09 09:43:27
|
||||||
|
367|Paula|ppricea6@msn.com|36.182.238.124|2009-11-11 09:13:05
|
||||||
|
368|Catherine|cgreena7@army.mil|228.203.58.19|2005-08-09 16:52:15
|
||||||
|
369|Helen|hhamiltona8@symantec.com|155.56.194.99|2005-02-01 05:40:36
|
||||||
|
370|Jane|jmeyera9@ezinearticles.com|133.244.113.213|2013-11-06 22:10:23
|
||||||
|
371|Wanda|wevansaa@bloglovin.com|233.125.192.48|1994-12-26 23:43:42
|
||||||
|
372|Mark|mmarshallab@tumblr.com|114.74.60.47|2016-09-29 18:03:01
|
||||||
|
373|Andrew|amartinezac@google.cn|182.54.37.130|1976-06-06 17:04:17
|
||||||
|
374|Helen|hmoralesad@e-recht24.de|42.45.4.123|1977-03-28 19:06:59
|
||||||
|
375|Bonnie|bstoneae@php.net|196.149.79.137|1970-02-05 17:05:58
|
||||||
|
376|Douglas|dfreemanaf@nasa.gov|215.65.124.218|2008-11-20 21:51:55
|
||||||
|
377|Willie|wwestag@army.mil|35.189.92.118|1992-07-24 05:08:08
|
||||||
|
378|Cheryl|cwagnerah@upenn.edu|228.239.222.141|2010-01-25 06:29:01
|
||||||
|
379|Sandra|swardai@baidu.com|63.11.113.240|1985-05-23 08:07:37
|
||||||
|
380|Julie|jrobinsonaj@jugem.jp|110.58.202.50|2015-03-05 09:42:07
|
||||||
|
381|Larry|lwagnerak@shop-pro.jp|98.234.25.24|1975-07-22 22:22:02
|
||||||
|
382|Juan|jcastilloal@yelp.com|24.174.74.202|2007-01-17 09:32:43
|
||||||
|
383|Donna|dfrazieram@artisteer.com|205.26.147.45|1990-02-11 20:55:46
|
||||||
|
384|Rachel|rfloresan@w3.org|109.60.216.162|1983-05-22 22:42:18
|
||||||
|
385|Robert|rreynoldsao@theguardian.com|122.65.209.130|2009-05-01 18:02:51
|
||||||
|
386|Donald|dbradleyap@etsy.com|42.54.35.126|1997-01-16 16:31:52
|
||||||
|
387|Rachel|rfisheraq@nih.gov|160.243.250.45|2006-02-17 22:05:49
|
||||||
|
388|Nicholas|nhamiltonar@princeton.edu|156.211.37.111|1976-06-21 03:36:29
|
||||||
|
389|Timothy|twhiteas@ca.gov|36.128.23.70|1975-09-24 03:51:18
|
||||||
|
390|Diana|dbradleyat@odnoklassniki.ru|44.102.120.184|1983-04-27 09:02:50
|
||||||
|
391|Billy|bfowlerau@jimdo.com|91.200.68.196|1995-01-29 06:57:35
|
||||||
|
392|Bruce|bandrewsav@ucoz.com|48.12.101.125|1992-10-27 04:31:39
|
||||||
|
393|Linda|lromeroaw@usa.gov|100.71.233.19|1992-06-08 15:13:18
|
||||||
|
394|Debra|dwatkinsax@ucoz.ru|52.160.233.193|2001-11-11 06:51:01
|
||||||
|
395|Katherine|kburkeay@wix.com|151.156.242.141|2010-06-14 19:54:28
|
||||||
|
396|Martha|mharrisonaz@youku.com|21.222.10.199|1989-10-16 14:17:55
|
||||||
|
397|Dennis|dwellsb0@youtu.be|103.16.29.3|1985-12-21 06:05:51
|
||||||
|
398|Gloria|grichardsb1@bloglines.com|90.147.120.234|1982-08-27 01:04:43
|
||||||
|
399|Brenda|bfullerb2@t.co|33.253.63.90|2011-04-20 05:00:35
|
||||||
|
400|Larry|lhendersonb3@disqus.com|88.95.132.128|1982-08-31 02:15:12
|
||||||
|
401|Richard|rlarsonb4@wisc.edu|13.48.231.150|1979-04-15 14:08:09
|
||||||
|
402|Terry|thuntb5@usa.gov|65.91.103.240|1998-05-15 11:50:49
|
||||||
|
403|Harry|hburnsb6@nasa.gov|33.38.21.244|1981-04-12 14:02:20
|
||||||
|
404|Diana|dellisb7@mlb.com|218.229.81.135|1997-01-29 00:17:25
|
||||||
|
405|Jack|jburkeb8@tripadvisor.com|210.227.182.216|1984-03-09 17:24:03
|
||||||
|
406|Julia|jlongb9@fotki.com|10.210.12.104|2005-10-26 03:54:13
|
||||||
|
407|Lois|lscottba@msu.edu|188.79.136.138|1973-02-02 18:40:39
|
||||||
|
408|Sandra|shendersonbb@shareasale.com|114.171.220.108|2012-06-09 18:22:26
|
||||||
|
409|Irene|isanchezbc@cdbaby.com|109.255.50.119|1983-09-28 21:11:27
|
||||||
|
410|Emily|ebrooksbd@bandcamp.com|227.81.93.79|1970-08-31 21:08:01
|
||||||
|
411|Michelle|mdiazbe@businessweek.com|236.249.6.226|1993-05-22 08:07:07
|
||||||
|
412|Tammy|tbennettbf@wisc.edu|145.253.239.152|1978-12-31 20:24:51
|
||||||
|
413|Christine|cgreenebg@flickr.com|97.25.140.118|1978-07-17 12:55:30
|
||||||
|
414|Patricia|pgarzabh@tuttocitta.it|139.246.192.211|1984-02-27 13:40:08
|
||||||
|
415|Kimberly|kromerobi@aol.com|73.56.88.247|1976-09-16 14:22:04
|
||||||
|
416|George|gjohnstonbj@fda.gov|240.36.245.185|1979-07-24 14:36:02
|
||||||
|
417|Eugene|efullerbk@sciencedaily.com|42.38.105.140|2012-09-12 01:56:41
|
||||||
|
418|Andrea|astevensbl@goo.gl|31.152.207.204|1979-05-24 11:06:21
|
||||||
|
419|Shirley|sreidbm@scientificamerican.com|103.60.31.241|1984-02-23 04:07:41
|
||||||
|
420|Terry|tmorenobn@blinklist.com|92.161.34.42|1994-06-25 14:01:35
|
||||||
|
421|Christopher|cmorenobo@go.com|158.86.176.82|1973-09-05 09:18:47
|
||||||
|
422|Dennis|dhansonbp@ning.com|40.160.81.75|1982-01-20 10:19:41
|
||||||
|
423|Beverly|brussellbq@de.vu|138.32.56.204|1997-11-06 07:20:19
|
||||||
|
424|Howard|hparkerbr@163.com|103.171.134.171|2015-06-24 15:37:10
|
||||||
|
425|Helen|hmccoybs@fema.gov|61.200.4.71|1995-06-20 08:59:10
|
||||||
|
426|Ann|ahudsonbt@cafepress.com|239.187.71.125|1977-04-11 07:59:28
|
||||||
|
427|Tina|twestbu@nhs.uk|80.213.117.74|1992-08-19 05:54:44
|
||||||
|
428|Terry|tnguyenbv@noaa.gov|21.93.118.95|1991-09-19 23:22:55
|
||||||
|
429|Ashley|aburtonbw@wix.com|233.176.205.109|2009-11-10 05:01:20
|
||||||
|
430|Eric|emyersbx@1und1.de|168.91.212.67|1987-08-10 07:16:20
|
||||||
|
431|Barbara|blittleby@lycos.com|242.14.189.239|2008-08-02 12:13:04
|
||||||
|
432|Sean|sevansbz@instagram.com|14.39.177.13|2007-04-16 17:28:49
|
||||||
|
433|Shirley|sburtonc0@newsvine.com|34.107.138.76|1980-12-10 02:19:29
|
||||||
|
434|Patricia|pfreemanc1@so-net.ne.jp|219.213.142.117|1987-03-01 02:25:45
|
||||||
|
435|Paula|pfosterc2@vkontakte.ru|227.14.138.141|1972-09-22 12:59:34
|
||||||
|
436|Nicole|nstewartc3@1688.com|8.164.23.115|1998-10-27 00:10:17
|
||||||
|
437|Earl|ekimc4@ovh.net|100.26.244.177|2013-01-22 10:05:46
|
||||||
|
438|Beverly|breedc5@reuters.com|174.12.226.27|1974-09-22 07:29:36
|
||||||
|
439|Lawrence|lbutlerc6@a8.net|105.164.42.164|1992-06-05 00:43:40
|
||||||
|
440|Charles|cmoorec7@ucoz.com|252.197.131.69|1990-04-09 02:34:05
|
||||||
|
441|Alice|alawsonc8@live.com|183.73.220.232|1989-02-28 09:11:04
|
||||||
|
442|Dorothy|dcarpenterc9@arstechnica.com|241.47.200.14|2005-05-02 19:57:21
|
||||||
|
443|Carolyn|cfowlerca@go.com|213.109.55.202|1978-09-10 20:18:20
|
||||||
|
444|Anthony|alongcb@free.fr|169.221.158.204|1984-09-13 01:59:23
|
||||||
|
445|Annie|amoorecc@e-recht24.de|50.34.148.61|2009-03-26 03:41:07
|
||||||
|
446|Carlos|candrewscd@ihg.com|236.69.59.212|1972-03-29 22:42:48
|
||||||
|
447|Beverly|bramosce@google.ca|164.250.184.49|1982-11-10 04:34:01
|
||||||
|
448|Teresa|tlongcf@umich.edu|174.88.53.223|1987-05-17 12:48:00
|
||||||
|
449|Roy|rboydcg@uol.com.br|91.58.243.215|1974-06-16 17:59:54
|
||||||
|
450|Ashley|afieldsch@tamu.edu|130.138.11.126|1983-09-15 05:52:36
|
||||||
|
451|Judith|jhawkinsci@cmu.edu|200.187.103.245|2003-10-22 12:24:03
|
||||||
|
452|Rebecca|rwestcj@ocn.ne.jp|72.85.3.103|1980-11-13 11:01:26
|
||||||
|
453|Raymond|rporterck@infoseek.co.jp|146.33.216.151|1982-05-17 23:58:03
|
||||||
|
454|Janet|jmarshallcl@odnoklassniki.ru|52.46.193.166|1998-10-04 00:02:21
|
||||||
|
455|Shirley|speterscm@salon.com|248.126.31.15|1987-01-30 06:04:59
|
||||||
|
456|Annie|abowmancn@economist.com|222.213.248.59|2006-03-14 23:52:59
|
||||||
|
457|Jean|jlarsonco@blogspot.com|71.41.25.195|2007-09-08 23:49:45
|
||||||
|
458|Phillip|pmoralescp@stanford.edu|74.119.87.28|2011-03-14 20:25:40
|
||||||
|
459|Norma|nrobinsoncq@economist.com|28.225.21.54|1989-10-21 01:22:43
|
||||||
|
460|Kimberly|kclarkcr@dion.ne.jp|149.171.132.153|2008-06-27 02:27:30
|
||||||
|
461|Ruby|rmorriscs@ucla.edu|177.85.163.249|2016-01-28 16:43:44
|
||||||
|
462|Jonathan|jcastilloct@tripod.com|78.4.28.77|2000-05-24 17:33:06
|
||||||
|
463|Edward|ebryantcu@jigsy.com|140.31.98.193|1992-12-17 08:32:47
|
||||||
|
464|Chris|chamiltoncv@eepurl.com|195.171.234.206|1970-12-05 03:42:19
|
||||||
|
465|Michael|mweavercw@reference.com|7.233.133.213|1987-03-29 02:30:54
|
||||||
|
466|Howard|hlawrencecx@businessweek.com|113.225.124.224|1990-07-30 07:20:57
|
||||||
|
467|Philip|phowardcy@comsenz.com|159.170.247.249|2010-10-15 10:18:37
|
||||||
|
468|Mary|mmarshallcz@xing.com|125.132.189.70|2007-07-19 13:48:47
|
||||||
|
469|Scott|salvarezd0@theguardian.com|78.49.103.230|1987-10-31 06:10:44
|
||||||
|
470|Wayne|wcarrolld1@blog.com|238.1.120.204|1980-11-19 03:26:10
|
||||||
|
471|Jennifer|jwoodsd2@multiply.com|92.20.224.49|2010-05-06 22:17:04
|
||||||
|
472|Raymond|rwelchd3@toplist.cz|176.158.35.240|2007-12-12 19:02:51
|
||||||
|
473|Steven|sdixond4@wisc.edu|167.55.237.52|1984-05-05 11:44:37
|
||||||
|
474|Ralph|rjamesd5@ameblo.jp|241.190.50.133|2000-07-06 08:44:37
|
||||||
|
475|Jason|jrobinsond6@hexun.com|138.119.139.56|2006-02-03 05:27:45
|
||||||
|
476|Doris|dwoodd7@fema.gov|180.220.156.190|1978-05-11 20:14:20
|
||||||
|
477|Elizabeth|eberryd8@youtu.be|74.188.53.229|2006-11-18 08:29:06
|
||||||
|
478|Irene|igilbertd9@privacy.gov.au|194.152.218.1|1985-09-17 02:46:52
|
||||||
|
479|Jessica|jdeanda@ameblo.jp|178.103.93.118|1974-06-07 19:04:05
|
||||||
|
480|Rachel|ralvarezdb@phoca.cz|17.22.223.174|1999-03-08 02:43:25
|
||||||
|
481|Kenneth|kthompsondc@shinystat.com|229.119.91.234|2007-05-15 13:17:32
|
||||||
|
482|Harold|hmurraydd@parallels.com|133.26.188.80|1993-11-15 03:42:07
|
||||||
|
483|Paula|phowellde@samsung.com|34.215.28.216|1993-11-29 15:55:00
|
||||||
|
484|Ruth|rpiercedf@tripadvisor.com|111.30.130.123|1986-08-17 10:19:38
|
||||||
|
485|Phyllis|paustindg@vk.com|50.84.34.178|1994-04-13 03:05:24
|
||||||
|
486|Laura|lfosterdh@usnews.com|37.8.101.33|2001-06-30 08:58:59
|
||||||
|
487|Eric|etaylordi@com.com|103.183.253.45|2006-09-15 20:18:46
|
||||||
|
488|Doris|driveradj@prweb.com|247.16.2.199|1989-05-08 09:27:09
|
||||||
|
489|Ryan|rhughesdk@elegantthemes.com|103.234.153.232|1989-08-01 18:36:06
|
||||||
|
490|Steve|smoralesdl@jigsy.com|3.76.84.207|2011-03-13 17:01:05
|
||||||
|
491|Louis|lsullivandm@who.int|78.135.44.208|1975-11-26 16:01:23
|
||||||
|
492|Catherine|ctuckerdn@seattletimes.com|93.137.106.21|1990-03-13 16:14:56
|
||||||
|
493|Ann|adixondo@gmpg.org|191.136.222.111|2002-06-05 14:22:18
|
||||||
|
494|Johnny|jhartdp@amazon.com|103.252.198.39|1988-07-30 23:54:49
|
||||||
|
495|Susan|srichardsdq@skype.com|126.247.192.11|2005-01-09 12:08:14
|
||||||
|
496|Brenda|bparkerdr@skype.com|63.232.216.86|1974-05-18 05:58:29
|
||||||
|
497|Tammy|tmurphyds@constantcontact.com|56.56.37.112|2014-08-05 18:22:25
|
||||||
|
498|Larry|lhayesdt@wordpress.com|162.146.13.46|1997-02-26 14:01:53
|
||||||
|
499||ethomasdu@hhs.gov|6.241.88.250|2007-09-14 13:03:34
|
||||||
|
500|Paula|pshawdv@networksolutions.com|123.27.47.249|2003-10-30 21:19:20
|
||||||
|
""".lstrip()
|
||||||
|
|
||||||
|
|
||||||
seeds__expected_sql = """
|
seeds__expected_sql = """
|
||||||
create table {schema}.seed_expected (
|
create table {schema}.seed_expected (
|
||||||
seed_id INTEGER,
|
seed_id INTEGER,
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from dbt.tests.util import (
|
|||||||
from dbt.tests.adapter.simple_seed.fixtures import (
|
from dbt.tests.adapter.simple_seed.fixtures import (
|
||||||
models__downstream_from_seed_actual,
|
models__downstream_from_seed_actual,
|
||||||
models__from_basic_seed,
|
models__from_basic_seed,
|
||||||
|
models__downstream_from_seed_pipe_separated,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.tests.adapter.simple_seed.seeds import (
|
from dbt.tests.adapter.simple_seed.seeds import (
|
||||||
@@ -29,6 +30,7 @@ from dbt.tests.adapter.simple_seed.seeds import (
|
|||||||
seeds__wont_parse_csv,
|
seeds__wont_parse_csv,
|
||||||
seed__unicode_csv,
|
seed__unicode_csv,
|
||||||
seed__with_dots_csv,
|
seed__with_dots_csv,
|
||||||
|
seeds__pipe_separated_csv,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -163,6 +165,84 @@ class TestSeedCustomSchema(SeedTestBase):
|
|||||||
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
|
check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"])
|
||||||
|
|
||||||
|
|
||||||
|
class SeedUniqueDelimiterTestBase(SeedConfigBase):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
return {
|
||||||
|
"seeds": {"quote_columns": False, "delimiter": "|"},
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class", autouse=True)
|
||||||
|
def setUp(self, project):
|
||||||
|
"""Create table for ensuring seeds and models used in tests build correctly"""
|
||||||
|
project.run_sql(seeds__expected_sql)
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def seeds(self, test_data_dir):
|
||||||
|
return {"seed_pipe_separated.csv": seeds__pipe_separated_csv}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models__downstream_from_seed_pipe_separated.sql": models__downstream_from_seed_pipe_separated,
|
||||||
|
}
|
||||||
|
|
||||||
|
def _build_relations_for_test(self, project):
|
||||||
|
"""The testing environment needs seeds and models to interact with"""
|
||||||
|
seed_result = run_dbt(["seed"])
|
||||||
|
assert len(seed_result) == 1
|
||||||
|
check_relations_equal(project.adapter, ["seed_expected", "seed_pipe_separated"])
|
||||||
|
|
||||||
|
run_result = run_dbt()
|
||||||
|
assert len(run_result) == 1
|
||||||
|
check_relations_equal(
|
||||||
|
project.adapter, ["models__downstream_from_seed_pipe_separated", "seed_expected"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_relation_end_state(self, run_result, project, exists: bool):
|
||||||
|
assert len(run_result) == 1
|
||||||
|
check_relations_equal(project.adapter, ["seed_pipe_separated", "seed_expected"])
|
||||||
|
if exists:
|
||||||
|
check_table_does_exist(project.adapter, "models__downstream_from_seed_pipe_separated")
|
||||||
|
else:
|
||||||
|
check_table_does_not_exist(
|
||||||
|
project.adapter, "models__downstream_from_seed_pipe_separated"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSeedWithUniqueDelimiter(SeedUniqueDelimiterTestBase):
|
||||||
|
def test_seed_with_unique_delimiter(self, project):
|
||||||
|
"""Testing correct run of seeds with a unique delimiter (pipe in this case)"""
|
||||||
|
self._build_relations_for_test(project)
|
||||||
|
self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSeedWithWrongDelimiter(SeedUniqueDelimiterTestBase):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
return {
|
||||||
|
"seeds": {"quote_columns": False, "delimiter": ";"},
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_seed_with_wrong_delimiter(self, project):
|
||||||
|
"""Testing failure of running dbt seed with a wrongly configured delimiter"""
|
||||||
|
seed_result = run_dbt(["seed"], expect_pass=False)
|
||||||
|
assert "syntax error" in seed_result.results[0].message.lower()
|
||||||
|
|
||||||
|
|
||||||
|
class TestSeedWithEmptyDelimiter(SeedUniqueDelimiterTestBase):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self):
|
||||||
|
return {
|
||||||
|
"seeds": {"quote_columns": False, "delimiter": ""},
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_seed_with_empty_delimiter(self, project):
|
||||||
|
"""Testing failure of running dbt seed with an empty configured delimiter value"""
|
||||||
|
seed_result = run_dbt(["seed"], expect_pass=False)
|
||||||
|
assert "compilation error" in seed_result.results[0].message.lower()
|
||||||
|
|
||||||
|
|
||||||
class TestSimpleSeedEnabledViaConfig(object):
|
class TestSimpleSeedEnabledViaConfig(object):
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def seeds(self):
|
def seeds(self):
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -55,6 +55,7 @@ def get_rendered_seed_config(**updates):
|
|||||||
"pre-hook": [],
|
"pre-hook": [],
|
||||||
"post-hook": [],
|
"post-hook": [],
|
||||||
"column_types": {},
|
"column_types": {},
|
||||||
|
"delimiter": ",",
|
||||||
"quoting": {},
|
"quoting": {},
|
||||||
"tags": [],
|
"tags": [],
|
||||||
"quote_columns": True,
|
"quote_columns": True,
|
||||||
|
|||||||
@@ -49,9 +49,8 @@ class TestIntrospectFlag:
|
|||||||
assert get_lines("first_model") == ["select 1 as fun"]
|
assert get_lines("first_model") == ["select 1 as fun"]
|
||||||
assert any("_test_compile as schema" in line for line in get_lines("second_model"))
|
assert any("_test_compile as schema" in line for line in get_lines("second_model"))
|
||||||
|
|
||||||
@pytest.mark.skip("Investigate flaky test #7179")
|
|
||||||
def test_no_introspect(self, project):
|
def test_no_introspect(self, project):
|
||||||
with pytest.raises(DbtRuntimeError):
|
with pytest.raises(DbtRuntimeError, match="connection never acquired for thread"):
|
||||||
run_dbt(["compile", "--no-introspect"])
|
run_dbt(["compile", "--no-introspect"])
|
||||||
|
|
||||||
|
|
||||||
@@ -164,6 +163,10 @@ class TestCompile:
|
|||||||
with pytest.raises(DbtException, match="Error parsing inline query"):
|
with pytest.raises(DbtException, match="Error parsing inline query"):
|
||||||
run_dbt(["compile", "--inline", "select * from {{ ref('third_model') }}"])
|
run_dbt(["compile", "--inline", "select * from {{ ref('third_model') }}"])
|
||||||
|
|
||||||
|
def test_inline_fail_database_error(self, project):
|
||||||
|
with pytest.raises(DbtRuntimeError, match="Database Error"):
|
||||||
|
run_dbt(["show", "--inline", "slect asdlkjfsld;j"])
|
||||||
|
|
||||||
def test_multiline_jinja(self, project):
|
def test_multiline_jinja(self, project):
|
||||||
(results, log_output) = run_dbt_and_capture(["compile", "--inline", model_multiline_jinja])
|
(results, log_output) = run_dbt_and_capture(["compile", "--inline", model_multiline_jinja])
|
||||||
assert len(results) == 1
|
assert len(results) == 1
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from contextlib import contextmanager
|
|||||||
import dbt.semver
|
import dbt.semver
|
||||||
import dbt.config
|
import dbt.config
|
||||||
import dbt.exceptions
|
import dbt.exceptions
|
||||||
from dbt.contracts.results import RunStatus
|
|
||||||
|
|
||||||
from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture
|
from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture
|
||||||
|
|
||||||
@@ -208,9 +207,8 @@ class TestMissingDependency(object):
|
|||||||
|
|
||||||
def test_missing_dependency(self, project):
|
def test_missing_dependency(self, project):
|
||||||
# dbt should raise a runtime exception
|
# dbt should raise a runtime exception
|
||||||
res = run_dbt(["compile"], expect_pass=False)
|
with pytest.raises(dbt.exceptions.DbtRuntimeError):
|
||||||
assert len(res) == 1
|
run_dbt(["compile"])
|
||||||
assert res[0].status == RunStatus.Error
|
|
||||||
|
|
||||||
|
|
||||||
class TestSimpleDependencyWithSchema(BaseDependencyTest):
|
class TestSimpleDependencyWithSchema(BaseDependencyTest):
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: "years_tenure"
|
name: "years_tenure"
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('people_entity__loves_dbt') }} is true"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -392,6 +392,7 @@ class TestList:
|
|||||||
"pre-hook": [],
|
"pre-hook": [],
|
||||||
"quoting": {},
|
"quoting": {},
|
||||||
"column_types": {},
|
"column_types": {},
|
||||||
|
"delimiter": ",",
|
||||||
"persist_docs": {},
|
"persist_docs": {},
|
||||||
"quote_columns": False,
|
"quote_columns": False,
|
||||||
"full_refresh": None,
|
"full_refresh": None,
|
||||||
|
|||||||
@@ -4,6 +4,12 @@ models__dep_macro = """
|
|||||||
}}
|
}}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
models__materialization_macro = """
|
||||||
|
{{
|
||||||
|
materialization_macro()
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
models__with_undefined_macro = """
|
models__with_undefined_macro = """
|
||||||
{{ dispatch_to_nowhere() }}
|
{{ dispatch_to_nowhere() }}
|
||||||
select 1 as id
|
select 1 as id
|
||||||
@@ -75,6 +81,12 @@ macros__my_macros = """
|
|||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
macros__named_materialization = """
|
||||||
|
{% macro materialization_macro() %}
|
||||||
|
select 1 as foo
|
||||||
|
{% endmacro %}
|
||||||
|
"""
|
||||||
|
|
||||||
macros__no_default_macros = """
|
macros__no_default_macros = """
|
||||||
{% macro do_something2(foo2, bar2) %}
|
{% macro do_something2(foo2, bar2) %}
|
||||||
|
|
||||||
|
|||||||
@@ -20,12 +20,14 @@ from tests.functional.macros.fixtures import (
|
|||||||
models__override_get_columns_macros,
|
models__override_get_columns_macros,
|
||||||
models__deprecated_adapter_macro_model,
|
models__deprecated_adapter_macro_model,
|
||||||
models__incorrect_dispatch,
|
models__incorrect_dispatch,
|
||||||
|
models__materialization_macro,
|
||||||
macros__my_macros,
|
macros__my_macros,
|
||||||
macros__no_default_macros,
|
macros__no_default_macros,
|
||||||
macros__override_get_columns_macros,
|
macros__override_get_columns_macros,
|
||||||
macros__package_override_get_columns_macros,
|
macros__package_override_get_columns_macros,
|
||||||
macros__deprecated_adapter_macro,
|
macros__deprecated_adapter_macro,
|
||||||
macros__incorrect_dispatch,
|
macros__incorrect_dispatch,
|
||||||
|
macros__named_materialization,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -78,6 +80,21 @@ class TestMacros:
|
|||||||
check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"])
|
check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"])
|
||||||
|
|
||||||
|
|
||||||
|
class TestMacrosNamedMaterialization:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"models_materialization_macro.sql": models__materialization_macro,
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def macros(self):
|
||||||
|
return {"macros_named_materialization.sql": macros__named_materialization}
|
||||||
|
|
||||||
|
def test_macro_with_materialization_in_name_works(self, project):
|
||||||
|
run_dbt(expect_pass=True)
|
||||||
|
|
||||||
|
|
||||||
class TestInvalidMacros:
|
class TestInvalidMacros:
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def models(self):
|
def models(self):
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: "years_tenure"
|
name: "years_tenure"
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
- name: average_tenure
|
- name: average_tenure
|
||||||
label: "Average tenure"
|
label: "Average tenure"
|
||||||
@@ -115,7 +115,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
- name: collective_window
|
- name: collective_window
|
||||||
label: "Collective window"
|
label: "Collective window"
|
||||||
@@ -124,7 +124,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
window: 14 days
|
window: 14 days
|
||||||
|
|
||||||
- name: average_tenure
|
- name: average_tenure
|
||||||
@@ -452,7 +452,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -479,7 +479,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -353,7 +353,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: customers
|
name: customers
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
+meta:
|
+meta:
|
||||||
is_okr: True
|
is_okr: True
|
||||||
tags:
|
tags:
|
||||||
@@ -472,7 +472,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -619,7 +619,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -1008,7 +1008,7 @@ metrics:
|
|||||||
type_params:
|
type_params:
|
||||||
measure:
|
measure:
|
||||||
name: years_tenure
|
name: years_tenure
|
||||||
filter: "{{dimension('loves_dbt')}} is true"
|
filter: "{{ Dimension('id__loves_dbt') }} is true"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -8,9 +8,6 @@ from tests.functional.partial_parsing.fixtures import (
|
|||||||
models_schema1_yml,
|
models_schema1_yml,
|
||||||
models_schema2_yml,
|
models_schema2_yml,
|
||||||
models_schema2b_yml,
|
models_schema2b_yml,
|
||||||
models_versions_schema_yml,
|
|
||||||
models_versions_defined_in_schema_yml,
|
|
||||||
models_versions_updated_schema_yml,
|
|
||||||
model_three_sql,
|
model_three_sql,
|
||||||
model_three_modified_sql,
|
model_three_modified_sql,
|
||||||
model_four1_sql,
|
model_four1_sql,
|
||||||
@@ -71,7 +68,7 @@ from tests.functional.partial_parsing.fixtures import (
|
|||||||
groups_schema_yml_two_groups_private_orders_invalid_access,
|
groups_schema_yml_two_groups_private_orders_invalid_access,
|
||||||
)
|
)
|
||||||
|
|
||||||
from dbt.exceptions import CompilationError, ParsingError, DuplicateVersionedUnversionedError
|
from dbt.exceptions import CompilationError, ParsingError
|
||||||
from dbt.contracts.files import ParseFileType
|
from dbt.contracts.files import ParseFileType
|
||||||
from dbt.contracts.results import TestStatus
|
from dbt.contracts.results import TestStatus
|
||||||
|
|
||||||
@@ -303,72 +300,6 @@ class TestModels:
|
|||||||
assert model_id not in manifest.disabled
|
assert model_id not in manifest.disabled
|
||||||
|
|
||||||
|
|
||||||
class TestVersionedModels:
|
|
||||||
@pytest.fixture(scope="class")
|
|
||||||
def models(self):
|
|
||||||
return {
|
|
||||||
"model_one_v1.sql": model_one_sql,
|
|
||||||
"model_one.sql": model_one_sql,
|
|
||||||
"model_one_downstream.sql": model_four2_sql,
|
|
||||||
"schema.yml": models_versions_schema_yml,
|
|
||||||
}
|
|
||||||
|
|
||||||
def test_pp_versioned_models(self, project):
|
|
||||||
results = run_dbt(["run"])
|
|
||||||
assert len(results) == 3
|
|
||||||
|
|
||||||
manifest = get_manifest(project.project_root)
|
|
||||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
|
||||||
assert not model_one_node.is_latest_version
|
|
||||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
|
||||||
assert model_two_node.is_latest_version
|
|
||||||
# assert unpinned ref points to latest version
|
|
||||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
|
||||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
|
||||||
|
|
||||||
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
|
||||||
rm_file(project.project_root, "models", "model_one.sql")
|
|
||||||
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
|
||||||
write_file(
|
|
||||||
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
|
||||||
)
|
|
||||||
results = run_dbt(["--partial-parse", "run"])
|
|
||||||
assert len(results) == 3
|
|
||||||
|
|
||||||
# update versions schema.yml block - latest_version from 2 to 1
|
|
||||||
write_file(
|
|
||||||
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
|
||||||
)
|
|
||||||
results, log_output = run_dbt_and_capture(
|
|
||||||
["--partial-parse", "--log-format", "json", "run"]
|
|
||||||
)
|
|
||||||
assert len(results) == 3
|
|
||||||
|
|
||||||
manifest = get_manifest(project.project_root)
|
|
||||||
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
|
||||||
assert model_one_node.is_latest_version
|
|
||||||
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
|
||||||
assert not model_two_node.is_latest_version
|
|
||||||
# assert unpinned ref points to latest version
|
|
||||||
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
|
||||||
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
|
||||||
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
|
||||||
assert "UnpinnedRefNewVersionAvailable" in log_output
|
|
||||||
|
|
||||||
# update versioned model
|
|
||||||
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
|
||||||
results = run_dbt(["--partial-parse", "run"])
|
|
||||||
assert len(results) == 3
|
|
||||||
manifest = get_manifest(project.project_root)
|
|
||||||
assert len(manifest.nodes) == 3
|
|
||||||
print(f"--- nodes: {manifest.nodes.keys()}")
|
|
||||||
|
|
||||||
# create a new model_one in model_one.sql and re-parse
|
|
||||||
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
|
||||||
with pytest.raises(DuplicateVersionedUnversionedError):
|
|
||||||
run_dbt(["parse"])
|
|
||||||
|
|
||||||
|
|
||||||
class TestSources:
|
class TestSources:
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def models(self):
|
def models(self):
|
||||||
|
|||||||
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
126
tests/functional/partial_parsing/test_versioned_models.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
import pytest
|
||||||
|
import pathlib
|
||||||
|
from dbt.tests.util import (
|
||||||
|
run_dbt,
|
||||||
|
get_manifest,
|
||||||
|
write_file,
|
||||||
|
rm_file,
|
||||||
|
read_file,
|
||||||
|
)
|
||||||
|
from dbt.exceptions import DuplicateVersionedUnversionedError
|
||||||
|
|
||||||
|
model_one_sql = """
|
||||||
|
select 1 as fun
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_one_downstream_sql = """
|
||||||
|
select fun from {{ ref('model_one') }}
|
||||||
|
"""
|
||||||
|
|
||||||
|
models_versions_schema_yml = """
|
||||||
|
|
||||||
|
models:
|
||||||
|
- name: model_one
|
||||||
|
description: "The first model"
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
- v: 2
|
||||||
|
"""
|
||||||
|
|
||||||
|
models_versions_defined_in_schema_yml = """
|
||||||
|
models:
|
||||||
|
- name: model_one
|
||||||
|
description: "The first model"
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
- v: 2
|
||||||
|
defined_in: model_one_different
|
||||||
|
"""
|
||||||
|
|
||||||
|
models_versions_updated_schema_yml = """
|
||||||
|
models:
|
||||||
|
- name: model_one
|
||||||
|
latest_version: 1
|
||||||
|
description: "The first model"
|
||||||
|
versions:
|
||||||
|
- v: 1
|
||||||
|
- v: 2
|
||||||
|
defined_in: model_one_different
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_two_sql = """
|
||||||
|
select 1 as notfun
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TestVersionedModels:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"model_one_v1.sql": model_one_sql,
|
||||||
|
"model_one.sql": model_one_sql,
|
||||||
|
"model_one_downstream.sql": model_one_downstream_sql,
|
||||||
|
"schema.yml": models_versions_schema_yml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_pp_versioned_models(self, project):
|
||||||
|
results = run_dbt(["run"])
|
||||||
|
assert len(results) == 3
|
||||||
|
|
||||||
|
manifest = get_manifest(project.project_root)
|
||||||
|
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||||
|
assert not model_one_node.is_latest_version
|
||||||
|
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||||
|
assert model_two_node.is_latest_version
|
||||||
|
# assert unpinned ref points to latest version
|
||||||
|
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||||
|
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"]
|
||||||
|
|
||||||
|
# update schema.yml block - model_one is now 'defined_in: model_one_different'
|
||||||
|
rm_file(project.project_root, "models", "model_one.sql")
|
||||||
|
write_file(model_one_sql, project.project_root, "models", "model_one_different.sql")
|
||||||
|
write_file(
|
||||||
|
models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml"
|
||||||
|
)
|
||||||
|
results = run_dbt(["--partial-parse", "run"])
|
||||||
|
assert len(results) == 3
|
||||||
|
|
||||||
|
# update versions schema.yml block - latest_version from 2 to 1
|
||||||
|
write_file(
|
||||||
|
models_versions_updated_schema_yml, project.project_root, "models", "schema.yml"
|
||||||
|
)
|
||||||
|
# This is where the test was failings in a CI run with:
|
||||||
|
# relation \"test..._test_partial_parsing.model_one_downstream\" does not exist
|
||||||
|
# because in core/dbt/include/global_project/macros/materializations/models/view/view.sql
|
||||||
|
# "existing_relation" didn't actually exist by the time it gets to the rename of the
|
||||||
|
# existing relation.
|
||||||
|
(pathlib.Path(project.project_root) / "log_output").mkdir(parents=True, exist_ok=True)
|
||||||
|
results = run_dbt(
|
||||||
|
["--partial-parse", "--log-format-file", "json", "--log-path", "log_output", "run"]
|
||||||
|
)
|
||||||
|
assert len(results) == 3
|
||||||
|
|
||||||
|
manifest = get_manifest(project.project_root)
|
||||||
|
model_one_node = manifest.nodes["model.test.model_one.v1"]
|
||||||
|
assert model_one_node.is_latest_version
|
||||||
|
model_two_node = manifest.nodes["model.test.model_one.v2"]
|
||||||
|
assert not model_two_node.is_latest_version
|
||||||
|
# assert unpinned ref points to latest version
|
||||||
|
model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"]
|
||||||
|
assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"]
|
||||||
|
|
||||||
|
# assert unpinned ref to latest-not-max version yields an "FYI" info-level log
|
||||||
|
log_output = read_file("log_output", "dbt.log").replace("\n", " ").replace("\\n", " ")
|
||||||
|
assert "UnpinnedRefNewVersionAvailable" in log_output
|
||||||
|
|
||||||
|
# update versioned model
|
||||||
|
write_file(model_two_sql, project.project_root, "models", "model_one_different.sql")
|
||||||
|
results = run_dbt(["--partial-parse", "run"])
|
||||||
|
assert len(results) == 3
|
||||||
|
manifest = get_manifest(project.project_root)
|
||||||
|
assert len(manifest.nodes) == 3
|
||||||
|
|
||||||
|
# create a new model_one in model_one.sql and re-parse
|
||||||
|
write_file(model_one_sql, project.project_root, "models", "model_one.sql")
|
||||||
|
with pytest.raises(DuplicateVersionedUnversionedError):
|
||||||
|
run_dbt(["parse"])
|
||||||
52
tests/functional/semantic_models/fixtures.py
Normal file
52
tests/functional/semantic_models/fixtures.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
metricflow_time_spine_sql = """
|
||||||
|
SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day
|
||||||
|
"""
|
||||||
|
|
||||||
|
models_people_sql = """
|
||||||
|
select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at
|
||||||
|
union all
|
||||||
|
select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at
|
||||||
|
union all
|
||||||
|
select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at
|
||||||
|
"""
|
||||||
|
|
||||||
|
models_people_metrics_yml = """
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
metrics:
|
||||||
|
- name: number_of_people
|
||||||
|
label: "Number of people"
|
||||||
|
description: Total count of people
|
||||||
|
type: simple
|
||||||
|
type_params:
|
||||||
|
measure: people
|
||||||
|
meta:
|
||||||
|
my_meta: 'testing'
|
||||||
|
"""
|
||||||
|
|
||||||
|
semantic_model_people_yml = """
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
semantic_models:
|
||||||
|
- name: semantic_people
|
||||||
|
model: ref('people')
|
||||||
|
dimensions:
|
||||||
|
- name: favorite_color
|
||||||
|
type: categorical
|
||||||
|
- name: created_at
|
||||||
|
type: TIME
|
||||||
|
type_params:
|
||||||
|
time_granularity: day
|
||||||
|
measures:
|
||||||
|
- name: years_tenure
|
||||||
|
agg: SUM
|
||||||
|
expr: tenure
|
||||||
|
- name: people
|
||||||
|
agg: count
|
||||||
|
expr: id
|
||||||
|
entities:
|
||||||
|
- name: id
|
||||||
|
type: primary
|
||||||
|
defaults:
|
||||||
|
agg_time_dimension: created_at
|
||||||
|
"""
|
||||||
@@ -61,6 +61,8 @@ semantic_models:
|
|||||||
- name: user
|
- name: user
|
||||||
type: foreign
|
type: foreign
|
||||||
expr: user_id
|
expr: user_id
|
||||||
|
- name: id
|
||||||
|
type: primary
|
||||||
|
|
||||||
metrics:
|
metrics:
|
||||||
- name: records_with_revenue
|
- name: records_with_revenue
|
||||||
|
|||||||
52
tests/functional/semantic_models/test_semantic_models.py
Normal file
52
tests/functional/semantic_models/test_semantic_models.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from dbt.contracts.graph.manifest import Manifest
|
||||||
|
from dbt.exceptions import CompilationError
|
||||||
|
from dbt.tests.util import run_dbt
|
||||||
|
|
||||||
|
|
||||||
|
from tests.functional.semantic_models.fixtures import (
|
||||||
|
models_people_sql,
|
||||||
|
metricflow_time_spine_sql,
|
||||||
|
semantic_model_people_yml,
|
||||||
|
models_people_metrics_yml,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSemanticModelDependsOn:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"people.sql": models_people_sql,
|
||||||
|
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||||
|
"semantic_models.yml": semantic_model_people_yml,
|
||||||
|
"people_metrics.yml": models_people_metrics_yml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_depends_on(self, project):
|
||||||
|
manifest = run_dbt(["parse"])
|
||||||
|
assert isinstance(manifest, Manifest)
|
||||||
|
|
||||||
|
expected_depends_on_for_people_semantic_model = ["model.test.people"]
|
||||||
|
|
||||||
|
number_of_people_metric = manifest.semantic_models["semantic_model.test.semantic_people"]
|
||||||
|
assert (
|
||||||
|
number_of_people_metric.depends_on.nodes
|
||||||
|
== expected_depends_on_for_people_semantic_model
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSemanticModelUnknownModel:
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def models(self):
|
||||||
|
return {
|
||||||
|
"not_people.sql": models_people_sql,
|
||||||
|
"metricflow_time_spine.sql": metricflow_time_spine_sql,
|
||||||
|
"semantic_models.yml": semantic_model_people_yml,
|
||||||
|
"people_metrics.yml": models_people_metrics_yml,
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_unknown_model_raises_issue(self, project):
|
||||||
|
with pytest.raises(CompilationError) as excinfo:
|
||||||
|
run_dbt(["parse"])
|
||||||
|
assert "depends on a node named 'people' which was not found" in str(excinfo.value)
|
||||||
@@ -72,10 +72,13 @@ class TestShow:
|
|||||||
assert "sample_bool" in log_output
|
assert "sample_bool" in log_output
|
||||||
|
|
||||||
def test_inline_fail(self, project):
|
def test_inline_fail(self, project):
|
||||||
run_dbt(["build"])
|
|
||||||
with pytest.raises(DbtException, match="Error parsing inline query"):
|
with pytest.raises(DbtException, match="Error parsing inline query"):
|
||||||
run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"])
|
run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"])
|
||||||
|
|
||||||
|
def test_inline_fail_database_error(self, project):
|
||||||
|
with pytest.raises(DbtRuntimeError, match="Database Error"):
|
||||||
|
run_dbt(["show", "--inline", "slect asdlkjfsld;j"])
|
||||||
|
|
||||||
def test_ephemeral_model(self, project):
|
def test_ephemeral_model(self, project):
|
||||||
run_dbt(["build"])
|
run_dbt(["build"])
|
||||||
(results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
|
(results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"])
|
||||||
|
|||||||
@@ -96,6 +96,18 @@ snapshots:
|
|||||||
owner: 'a_owner'
|
owner: 'a_owner'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
models__schema_with_target_schema_yml = """
|
||||||
|
version: 2
|
||||||
|
snapshots:
|
||||||
|
- name: snapshot_actual
|
||||||
|
tests:
|
||||||
|
- mutually_exclusive_ranges
|
||||||
|
config:
|
||||||
|
meta:
|
||||||
|
owner: 'a_owner'
|
||||||
|
target_schema: schema_from_schema_yml
|
||||||
|
"""
|
||||||
|
|
||||||
models__ref_snapshot_sql = """
|
models__ref_snapshot_sql = """
|
||||||
select * from {{ ref('snapshot_actual') }}
|
select * from {{ ref('snapshot_actual') }}
|
||||||
"""
|
"""
|
||||||
@@ -281,6 +293,26 @@ snapshots_pg__snapshot_sql = """
|
|||||||
{% endsnapshot %}
|
{% endsnapshot %}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
snapshots_pg__snapshot_no_target_schema_sql = """
|
||||||
|
{% snapshot snapshot_actual %}
|
||||||
|
|
||||||
|
{{
|
||||||
|
config(
|
||||||
|
target_database=var('target_database', database),
|
||||||
|
unique_key='id || ' ~ "'-'" ~ ' || first_name',
|
||||||
|
strategy='timestamp',
|
||||||
|
updated_at='updated_at',
|
||||||
|
)
|
||||||
|
}}
|
||||||
|
|
||||||
|
{% if var('invalidate_hard_deletes', 'false') | as_bool %}
|
||||||
|
{{ config(invalidate_hard_deletes=True) }}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
select * from {{target.database}}.{{target.schema}}.seed
|
||||||
|
|
||||||
|
{% endsnapshot %}
|
||||||
|
"""
|
||||||
|
|
||||||
models_slow__gen_sql = """
|
models_slow__gen_sql = """
|
||||||
|
|
||||||
|
|||||||
@@ -2,13 +2,15 @@ import os
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import pytz
|
import pytz
|
||||||
import pytest
|
import pytest
|
||||||
from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name
|
from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name, write_file
|
||||||
from tests.functional.simple_snapshot.fixtures import (
|
from tests.functional.simple_snapshot.fixtures import (
|
||||||
models__schema_yml,
|
models__schema_yml,
|
||||||
|
models__schema_with_target_schema_yml,
|
||||||
models__ref_snapshot_sql,
|
models__ref_snapshot_sql,
|
||||||
seeds__seed_newcol_csv,
|
seeds__seed_newcol_csv,
|
||||||
seeds__seed_csv,
|
seeds__seed_csv,
|
||||||
snapshots_pg__snapshot_sql,
|
snapshots_pg__snapshot_sql,
|
||||||
|
snapshots_pg__snapshot_no_target_schema_sql,
|
||||||
macros__test_no_overlaps_sql,
|
macros__test_no_overlaps_sql,
|
||||||
macros_custom_snapshot__custom_sql,
|
macros_custom_snapshot__custom_sql,
|
||||||
snapshots_pg_custom_namespaced__snapshot_sql,
|
snapshots_pg_custom_namespaced__snapshot_sql,
|
||||||
@@ -123,6 +125,41 @@ class TestBasicRef(Basic):
|
|||||||
ref_setup(project, num_snapshot_models=1)
|
ref_setup(project, num_snapshot_models=1)
|
||||||
|
|
||||||
|
|
||||||
|
class TestBasicTargetSchemaConfig(Basic):
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def snapshots(self):
|
||||||
|
return {"snapshot.sql": snapshots_pg__snapshot_no_target_schema_sql}
|
||||||
|
|
||||||
|
@pytest.fixture(scope="class")
|
||||||
|
def project_config_update(self, unique_schema):
|
||||||
|
return {
|
||||||
|
"snapshots": {
|
||||||
|
"test": {
|
||||||
|
"target_schema": unique_schema + "_alt",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_target_schema(self, project):
|
||||||
|
manifest = run_dbt(["parse"])
|
||||||
|
assert len(manifest.nodes) == 5
|
||||||
|
# ensure that the schema in the snapshot node is the same as target_schema
|
||||||
|
snapshot_id = "snapshot.test.snapshot_actual"
|
||||||
|
snapshot_node = manifest.nodes[snapshot_id]
|
||||||
|
assert snapshot_node.schema == f"{project.test_schema}_alt"
|
||||||
|
assert (
|
||||||
|
snapshot_node.relation_name
|
||||||
|
== f'"{project.database}"."{project.test_schema}_alt"."snapshot_actual"'
|
||||||
|
)
|
||||||
|
assert snapshot_node.meta == {"owner": "a_owner"}
|
||||||
|
|
||||||
|
# write out schema.yml file and check again
|
||||||
|
write_file(models__schema_with_target_schema_yml, "models", "schema.yml")
|
||||||
|
manifest = run_dbt(["parse"])
|
||||||
|
snapshot_node = manifest.nodes[snapshot_id]
|
||||||
|
assert snapshot_node.schema == "schema_from_schema_yml"
|
||||||
|
|
||||||
|
|
||||||
class CustomNamespace:
|
class CustomNamespace:
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def snapshots(self):
|
def snapshots(self):
|
||||||
|
|||||||
@@ -57,6 +57,11 @@ class TestFlags:
|
|||||||
assert hasattr(flags, "LOG_PATH")
|
assert hasattr(flags, "LOG_PATH")
|
||||||
assert getattr(flags, "LOG_PATH") == Path("logs")
|
assert getattr(flags, "LOG_PATH") == Path("logs")
|
||||||
|
|
||||||
|
def test_log_file_max_size_default(self, run_context):
|
||||||
|
flags = Flags(run_context)
|
||||||
|
assert hasattr(flags, "LOG_FILE_MAX_BYTES")
|
||||||
|
assert getattr(flags, "LOG_FILE_MAX_BYTES") == 10 * 1024 * 1024
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"set_stats_param,do_not_track,expected_anonymous_usage_stats",
|
"set_stats_param,do_not_track,expected_anonymous_usage_stats",
|
||||||
[
|
[
|
||||||
@@ -386,3 +391,8 @@ class TestFlags:
|
|||||||
args_dict = {"which": "some bad command"}
|
args_dict = {"which": "some bad command"}
|
||||||
with pytest.raises(DbtInternalError, match=r"does not match value of which"):
|
with pytest.raises(DbtInternalError, match=r"does not match value of which"):
|
||||||
self._create_flags_from_dict(Command.RUN, args_dict)
|
self._create_flags_from_dict(Command.RUN, args_dict)
|
||||||
|
|
||||||
|
def test_from_dict_0_value(self):
|
||||||
|
args_dict = {"log_file_max_bytes": 0}
|
||||||
|
flags = Flags.from_dict(Command.RUN, args_dict)
|
||||||
|
assert flags.LOG_FILE_MAX_BYTES == 0
|
||||||
|
|||||||
@@ -424,6 +424,9 @@ def test_invocation_args_to_dict_in_macro_runtime_context(
|
|||||||
# Comes from unit/utils.py config_from_parts_or_dicts method
|
# Comes from unit/utils.py config_from_parts_or_dicts method
|
||||||
assert ctx["invocation_args_dict"]["profile_dir"] == "/dev/null"
|
assert ctx["invocation_args_dict"]["profile_dir"] == "/dev/null"
|
||||||
|
|
||||||
|
assert isinstance(ctx["invocation_args_dict"]["warn_error_options"], Dict)
|
||||||
|
assert ctx["invocation_args_dict"]["warn_error_options"] == {"include": [], "exclude": []}
|
||||||
|
|
||||||
|
|
||||||
def test_model_parse_context(config_postgres, manifest_fx, get_adapter, get_include_paths):
|
def test_model_parse_context(config_postgres, manifest_fx, get_adapter, get_include_paths):
|
||||||
ctx = providers.generate_parser_model_context(
|
ctx = providers.generate_parser_model_context(
|
||||||
|
|||||||
@@ -520,6 +520,7 @@ def basic_parsed_seed_dict():
|
|||||||
"alias": "foo",
|
"alias": "foo",
|
||||||
"config": {
|
"config": {
|
||||||
"column_types": {},
|
"column_types": {},
|
||||||
|
"delimiter": ",",
|
||||||
"enabled": True,
|
"enabled": True,
|
||||||
"materialized": "seed",
|
"materialized": "seed",
|
||||||
"persist_docs": {},
|
"persist_docs": {},
|
||||||
@@ -611,6 +612,7 @@ def complex_parsed_seed_dict():
|
|||||||
"alias": "foo",
|
"alias": "foo",
|
||||||
"config": {
|
"config": {
|
||||||
"column_types": {},
|
"column_types": {},
|
||||||
|
"delimiter": ",",
|
||||||
"enabled": True,
|
"enabled": True,
|
||||||
"materialized": "seed",
|
"materialized": "seed",
|
||||||
"persist_docs": {"relation": True, "columns": True},
|
"persist_docs": {"relation": True, "columns": True},
|
||||||
@@ -669,6 +671,7 @@ def complex_parsed_seed_object():
|
|||||||
alias="foo",
|
alias="foo",
|
||||||
config=SeedConfig(
|
config=SeedConfig(
|
||||||
quote_columns=True,
|
quote_columns=True,
|
||||||
|
delimiter=",",
|
||||||
persist_docs={"relation": True, "columns": True},
|
persist_docs={"relation": True, "columns": True},
|
||||||
),
|
),
|
||||||
deferred=False,
|
deferred=False,
|
||||||
|
|||||||
@@ -28,6 +28,11 @@ class TestCoreDbtUtils(unittest.TestCase):
|
|||||||
connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5)
|
connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5)
|
||||||
self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry
|
self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry
|
||||||
|
|
||||||
|
def test_connection_exception_retry_success_failed_eofexception(self):
|
||||||
|
Counter._reset()
|
||||||
|
connection_exception_retry(lambda: Counter._add_with_eof_exception(), 5)
|
||||||
|
self.assertEqual(2, counter) # 2 = original attempt returned EOFError, plus 1 retry
|
||||||
|
|
||||||
|
|
||||||
counter: int = 0
|
counter: int = 0
|
||||||
|
|
||||||
@@ -57,6 +62,12 @@ class Counter:
|
|||||||
if counter < 2:
|
if counter < 2:
|
||||||
raise tarfile.ReadError
|
raise tarfile.ReadError
|
||||||
|
|
||||||
|
def _add_with_eof_exception():
|
||||||
|
global counter
|
||||||
|
counter += 1
|
||||||
|
if counter < 2:
|
||||||
|
raise EOFError
|
||||||
|
|
||||||
def _reset():
|
def _reset():
|
||||||
global counter
|
global counter
|
||||||
counter = 0
|
counter = 0
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import pytest
|
|||||||
import re
|
import re
|
||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
|
|
||||||
from dbt.contracts.results import TimingInfo
|
from dbt.contracts.results import TimingInfo, RunResult, RunStatus
|
||||||
from dbt.events import AdapterLogger, types
|
from dbt.events import AdapterLogger, types
|
||||||
from dbt.events.base_types import (
|
from dbt.events.base_types import (
|
||||||
BaseEvent,
|
BaseEvent,
|
||||||
@@ -14,11 +14,15 @@ from dbt.events.base_types import (
|
|||||||
WarnLevel,
|
WarnLevel,
|
||||||
msg_from_base_event,
|
msg_from_base_event,
|
||||||
)
|
)
|
||||||
from dbt.events.functions import msg_to_dict, msg_to_json
|
from dbt.events.eventmgr import TestEventManager, EventManager
|
||||||
|
from dbt.events.functions import msg_to_dict, msg_to_json, ctx_set_event_manager
|
||||||
from dbt.events.helpers import get_json_string_utcnow
|
from dbt.events.helpers import get_json_string_utcnow
|
||||||
|
from dbt.events.types import RunResultError
|
||||||
from dbt.flags import set_from_args
|
from dbt.flags import set_from_args
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
|
from dbt.task.printer import print_run_result_error
|
||||||
|
|
||||||
set_from_args(Namespace(WARN_ERROR=False), None)
|
set_from_args(Namespace(WARN_ERROR=False), None)
|
||||||
|
|
||||||
|
|
||||||
@@ -388,8 +392,6 @@ sample_values = [
|
|||||||
types.RunResultErrorNoMessage(status=""),
|
types.RunResultErrorNoMessage(status=""),
|
||||||
types.SQLCompiledPath(path=""),
|
types.SQLCompiledPath(path=""),
|
||||||
types.CheckNodeTestFailure(relation_name=""),
|
types.CheckNodeTestFailure(relation_name=""),
|
||||||
types.FirstRunResultError(msg=""),
|
|
||||||
types.AfterFirstRunResultError(msg=""),
|
|
||||||
types.EndOfRunSummary(num_errors=0, num_warnings=0, keyboard_interrupt=False),
|
types.EndOfRunSummary(num_errors=0, num_warnings=0, keyboard_interrupt=False),
|
||||||
types.LogSkipBecauseError(schema="", relation="", index=0, total=0),
|
types.LogSkipBecauseError(schema="", relation="", index=0, total=0),
|
||||||
types.EnsureGitInstalled(),
|
types.EnsureGitInstalled(),
|
||||||
@@ -485,3 +487,34 @@ def test_bad_serialization():
|
|||||||
str(excinfo.value)
|
str(excinfo.value)
|
||||||
== "[Note]: Unable to parse dict {'param_event_doesnt_have': 'This should break'}"
|
== "[Note]: Unable to parse dict {'param_event_doesnt_have': 'This should break'}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_single_run_error():
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Add a recording event manager to the context, so we can test events.
|
||||||
|
event_mgr = TestEventManager()
|
||||||
|
ctx_set_event_manager(event_mgr)
|
||||||
|
|
||||||
|
error_result = RunResult(
|
||||||
|
status=RunStatus.Error,
|
||||||
|
timing=[],
|
||||||
|
thread_id="",
|
||||||
|
execution_time=0.0,
|
||||||
|
node=None,
|
||||||
|
adapter_response=dict(),
|
||||||
|
message="oh no!",
|
||||||
|
failures=[],
|
||||||
|
)
|
||||||
|
|
||||||
|
print_run_result_error(error_result)
|
||||||
|
events = [e for e in event_mgr.event_history if isinstance(e[0], RunResultError)]
|
||||||
|
|
||||||
|
assert len(events) == 1
|
||||||
|
assert events[0][0].msg == "oh no!"
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Set an empty event manager unconditionally on exit. This is an early
|
||||||
|
# attempt at unit testing events, and we need to think about how it
|
||||||
|
# could be done in a thread safe way in the long run.
|
||||||
|
ctx_set_event_manager(EventManager())
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ from argparse import Namespace
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import dbt.flags as flags
|
import dbt.flags as flags
|
||||||
from dbt.events.functions import msg_to_dict, warn_or_error
|
from dbt.events.functions import msg_to_dict, warn_or_error, setup_event_logger
|
||||||
from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria
|
from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria
|
||||||
from dbt.exceptions import EventCompilationError
|
from dbt.exceptions import EventCompilationError
|
||||||
|
|
||||||
@@ -59,3 +59,13 @@ def test_msg_to_dict_handles_exceptions_gracefully():
|
|||||||
assert (
|
assert (
|
||||||
False
|
False
|
||||||
), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}"
|
), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_event_logger_specify_max_bytes(mocker):
|
||||||
|
patched_file_handler = mocker.patch("dbt.events.eventmgr.RotatingFileHandler")
|
||||||
|
args = Namespace(log_file_max_bytes=1234567)
|
||||||
|
flags.set_from_args(args, {})
|
||||||
|
setup_event_logger(flags.get_flags())
|
||||||
|
patched_file_handler.assert_called_once_with(
|
||||||
|
filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5
|
||||||
|
)
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from dbt import tracking
|
|||||||
from dbt.contracts.files import SourceFile, FileHash, FilePath
|
from dbt.contracts.files import SourceFile, FileHash, FilePath
|
||||||
from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck
|
from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck
|
||||||
from dbt.graph import NodeSelector, parse_difference
|
from dbt.graph import NodeSelector, parse_difference
|
||||||
|
from dbt.events.functions import setup_event_logger
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from queue import Empty
|
from queue import Empty
|
||||||
@@ -140,6 +141,7 @@ class GraphTest(unittest.TestCase):
|
|||||||
|
|
||||||
config = config_from_parts_or_dicts(project=cfg, profile=self.profile)
|
config = config_from_parts_or_dicts(project=cfg, profile=self.profile)
|
||||||
dbt.flags.set_from_args(Namespace(), config)
|
dbt.flags.set_from_args(Namespace(), config)
|
||||||
|
setup_event_logger(dbt.flags.get_flags())
|
||||||
object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False)
|
object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False)
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,20 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from dbt.exceptions import DbtRuntimeError
|
||||||
from dbt.plugins import PluginManager, dbtPlugin, dbt_hook
|
from dbt.plugins import PluginManager, dbtPlugin, dbt_hook
|
||||||
from dbt.plugins.manifest import PluginNodes, ModelNodeArgs
|
from dbt.plugins.manifest import PluginNodes, ModelNodeArgs
|
||||||
from dbt.plugins.contracts import PluginArtifacts, PluginArtifact
|
from dbt.plugins.contracts import PluginArtifacts, PluginArtifact
|
||||||
|
from dbt.plugins.exceptions import dbtPluginError
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionInitializePlugin(dbtPlugin):
|
||||||
|
def initialize(self) -> None:
|
||||||
|
raise Exception("plugin error message")
|
||||||
|
|
||||||
|
|
||||||
|
class dbtRuntimeErrorInitializePlugin(dbtPlugin):
|
||||||
|
def initialize(self) -> None:
|
||||||
|
raise dbtPluginError("plugin error message")
|
||||||
|
|
||||||
|
|
||||||
class GetNodesPlugin(dbtPlugin):
|
class GetNodesPlugin(dbtPlugin):
|
||||||
@@ -42,6 +55,14 @@ class TestPluginManager:
|
|||||||
def get_artifacts_plugins(self, get_artifacts_plugin):
|
def get_artifacts_plugins(self, get_artifacts_plugin):
|
||||||
return [get_artifacts_plugin, GetArtifactsPlugin(project_name="test2")]
|
return [get_artifacts_plugin, GetArtifactsPlugin(project_name="test2")]
|
||||||
|
|
||||||
|
def test_plugin_manager_init_exception(self):
|
||||||
|
with pytest.raises(DbtRuntimeError, match="plugin error message"):
|
||||||
|
PluginManager(plugins=[ExceptionInitializePlugin(project_name="test")])
|
||||||
|
|
||||||
|
def test_plugin_manager_init_plugin_exception(self):
|
||||||
|
with pytest.raises(DbtRuntimeError, match="^Runtime Error\n plugin error message"):
|
||||||
|
PluginManager(plugins=[dbtRuntimeErrorInitializePlugin(project_name="test")])
|
||||||
|
|
||||||
def test_plugin_manager_init_single_hook(self, get_nodes_plugin):
|
def test_plugin_manager_init_single_hook(self, get_nodes_plugin):
|
||||||
pm = PluginManager(plugins=[get_nodes_plugin])
|
pm = PluginManager(plugins=[get_nodes_plugin])
|
||||||
assert len(pm.hooks) == 1
|
assert len(pm.hooks) == 1
|
||||||
|
|||||||
@@ -169,7 +169,7 @@ def test_metric_node_satisfies_protocol():
|
|||||||
|
|
||||||
def test_where_filter_satisfies_protocol():
|
def test_where_filter_satisfies_protocol():
|
||||||
where_filter = WhereFilter(
|
where_filter = WhereFilter(
|
||||||
where_sql_template="{{ dimension('dimension_name') }} AND {{ time_dimension('time_dimension_name', 'month') }} AND {{ entity('entity_name') }}"
|
where_sql_template="{{ Dimension('enity_name__dimension_name') }} AND {{ TimeDimension('entity_name__time_dimension_name', 'month') }} AND {{ Entity('entity_name') }}"
|
||||||
)
|
)
|
||||||
assert isinstance(where_filter, RuntimeCheckableWhereFilter)
|
assert isinstance(where_filter, RuntimeCheckableWhereFilter)
|
||||||
|
|
||||||
|
|||||||
81
tests/unit/test_semantic_models.py
Normal file
81
tests/unit/test_semantic_models.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from dbt.contracts.graph.nodes import SemanticModel
|
||||||
|
from dbt.contracts.graph.semantic_models import Dimension, Entity, Measure, Defaults
|
||||||
|
from dbt.node_types import NodeType
|
||||||
|
from dbt_semantic_interfaces.references import MeasureReference
|
||||||
|
from dbt_semantic_interfaces.type_enums import AggregationType, DimensionType, EntityType
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def dimensions() -> List[Dimension]:
|
||||||
|
return [Dimension(name="ds", type=DimensionType)]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def entities() -> List[Entity]:
|
||||||
|
return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def measures() -> List[Measure]:
|
||||||
|
return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
def default_semantic_model(
|
||||||
|
dimensions: List[Dimension], entities: List[Entity], measures: List[Measure]
|
||||||
|
) -> SemanticModel:
|
||||||
|
return SemanticModel(
|
||||||
|
name="test_semantic_model",
|
||||||
|
resource_type=NodeType.SemanticModel,
|
||||||
|
model="ref('test_model')",
|
||||||
|
package_name="test",
|
||||||
|
path="test_path",
|
||||||
|
original_file_path="test_fixture",
|
||||||
|
unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model",
|
||||||
|
fqn=[],
|
||||||
|
defaults=Defaults(agg_time_dimension="ds"),
|
||||||
|
dimensions=dimensions,
|
||||||
|
entities=entities,
|
||||||
|
measures=measures,
|
||||||
|
node_relation=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_checked_agg_time_dimension_for_measure_via_defaults(
|
||||||
|
default_semantic_model: SemanticModel,
|
||||||
|
):
|
||||||
|
assert default_semantic_model.defaults.agg_time_dimension is not None
|
||||||
|
measure = default_semantic_model.measures[0]
|
||||||
|
measure.agg_time_dimension = None
|
||||||
|
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||||
|
MeasureReference(element_name=measure.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_checked_agg_time_dimension_for_measure_via_measure(default_semantic_model: SemanticModel):
|
||||||
|
default_semantic_model.defaults = None
|
||||||
|
measure = default_semantic_model.measures[0]
|
||||||
|
measure.agg_time_dimension = default_semantic_model.dimensions[0].name
|
||||||
|
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||||
|
MeasureReference(element_name=measure.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_checked_agg_time_dimension_for_measure_exception(default_semantic_model: SemanticModel):
|
||||||
|
default_semantic_model.defaults = None
|
||||||
|
measure = default_semantic_model.measures[0]
|
||||||
|
measure.agg_time_dimension = None
|
||||||
|
|
||||||
|
with pytest.raises(AssertionError) as execinfo:
|
||||||
|
default_semantic_model.checked_agg_time_dimension_for_measure(
|
||||||
|
MeasureReference(measure.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}"
|
||||||
|
in str(execinfo.value)
|
||||||
|
)
|
||||||
@@ -105,14 +105,14 @@ class TestYamlRendering(unittest.TestCase):
|
|||||||
dct = {
|
dct = {
|
||||||
"name": "test{{ metric_name_end }}",
|
"name": "test{{ metric_name_end }}",
|
||||||
"description": "{{ docs('my_doc') }}",
|
"description": "{{ docs('my_doc') }}",
|
||||||
"filter": "{{ dimension('my_dim') }} = false",
|
"filter": "{{ Dimension('my_entity__my_dim') }} = false",
|
||||||
}
|
}
|
||||||
# We expect the expression and description will not be rendered, but
|
# We expect the expression and description will not be rendered, but
|
||||||
# other fields will be
|
# other fields will be
|
||||||
expected = {
|
expected = {
|
||||||
"name": "test_metric",
|
"name": "test_metric",
|
||||||
"description": "{{ docs('my_doc') }}",
|
"description": "{{ docs('my_doc') }}",
|
||||||
"filter": "{{ dimension('my_dim') }} = false",
|
"filter": "{{ Dimension('my_entity__my_dim') }} = false",
|
||||||
}
|
}
|
||||||
dct = renderer.render_data(dct)
|
dct = renderer.render_data(dct)
|
||||||
self.assertEqual(dct, expected)
|
self.assertEqual(dct, expected)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user