mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-21 07:51:27 +00:00
Compare commits
45 Commits
jerco/test
...
test-docs-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
734b6429c7 | ||
|
|
a75b2c0a90 | ||
|
|
6b6ae22434 | ||
|
|
287f443ec9 | ||
|
|
aea2c4a29b | ||
|
|
21ffe31270 | ||
|
|
70c9074625 | ||
|
|
2048a1af6f | ||
|
|
71223dc253 | ||
|
|
e03d35a9fc | ||
|
|
f988f76fcc | ||
|
|
0cacfd0f88 | ||
|
|
c25260e5dd | ||
|
|
c521fa6b74 | ||
|
|
f304b4b2da | ||
|
|
064d890172 | ||
|
|
febbd978b5 | ||
|
|
0d7e87fac6 | ||
|
|
3500528506 | ||
|
|
c42221fcf3 | ||
|
|
f49f28c331 | ||
|
|
dc964c43d9 | ||
|
|
60e491b3c1 | ||
|
|
3bfce2bac9 | ||
|
|
d63ad4cd82 | ||
|
|
d5608dca32 | ||
|
|
e7031f2d74 | ||
|
|
68a2996788 | ||
|
|
f5f0a7f908 | ||
|
|
1cfc0851ca | ||
|
|
9fca33cb29 | ||
|
|
6360247d39 | ||
|
|
d257d0b44c | ||
|
|
f8d347e5f8 | ||
|
|
a02db03f45 | ||
|
|
6e8388c653 | ||
|
|
6572b7e0a5 | ||
|
|
26bb5c3484 | ||
|
|
83f4992073 | ||
|
|
8392023e9f | ||
|
|
309efaa141 | ||
|
|
a5993fc866 | ||
|
|
5b1bc72ae1 | ||
|
|
72b6a80b07 | ||
|
|
f0fbb0e551 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.2.0a1
|
||||
current_version = 1.3.0a1
|
||||
parse = (?P<major>\d+)
|
||||
\.(?P<minor>\d+)
|
||||
\.(?P<patch>\d+)
|
||||
@@ -28,8 +28,6 @@ first_value = 1
|
||||
|
||||
[bumpversion:file:core/dbt/version.py]
|
||||
|
||||
[bumpversion:file:core/scripts/create_adapter_plugins.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/setup.py]
|
||||
|
||||
[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py]
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: "Bump ubuntu from 20.04 to 22.04"
|
||||
time: 2022-04-27T19:51:28.000000-05:00
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: "4904"
|
||||
PR: "5141"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: "Bumping hologram version"
|
||||
time: 2022-05-06T16:09:07.000000-05:00
|
||||
custom:
|
||||
Author: leahwicz
|
||||
Issue: "5219"
|
||||
PR: "5218"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Add selector method when reading selector definitions
|
||||
time: 2022-04-08T11:26:10.713088+10:00
|
||||
custom:
|
||||
Author: danieldiamond
|
||||
Issue: "4821"
|
||||
PR: "4827"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Add set and zip function to contexts
|
||||
time: 2022-04-23T23:17:56.851793+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "2345"
|
||||
PR: "5107"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Adds itertools to modules Jinja namespace
|
||||
time: 2022-04-24T13:26:55.008246+01:00
|
||||
custom:
|
||||
Author: bd3dowling
|
||||
Issue: "5130"
|
||||
PR: "5140"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: allow target as an option in profile_template.yml
|
||||
time: 2022-04-28T06:56:44.511519-04:00
|
||||
custom:
|
||||
Author: alexrosenfeld10
|
||||
Issue: "5179"
|
||||
PR: "5184"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: 'seed: Add new macro get_csv_sql'
|
||||
time: 2022-05-03T14:29:34.847959075Z
|
||||
custom:
|
||||
Author: adamantike
|
||||
Issue: "5206"
|
||||
PR: "5207"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Grants as Node Configs
|
||||
time: 2022-05-10T20:49:49.197999-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5189"
|
||||
PR: "5230"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Adds file selectors and support for file selectors in the default method selector
|
||||
time: 2022-05-12T21:57:48.289674-07:00
|
||||
custom:
|
||||
Author: jwills
|
||||
Issue: "5240"
|
||||
PR: "5241"
|
||||
8
.changes/unreleased/Features-20220715-035555.yaml
Normal file
8
.changes/unreleased/Features-20220715-035555.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
kind: Features
|
||||
body: Add reusable function for retrying adapter connections. Utilize said function
|
||||
to add retries for Postgres (and Redshift).
|
||||
time: 2022-07-15T03:55:55.270637265+02:00
|
||||
custom:
|
||||
Author: tomasfarias
|
||||
Issue: "5022"
|
||||
PR: "5432"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Adding new cols to check_cols in snapshots
|
||||
time: 2022-03-17T21:09:16.977086+01:00
|
||||
custom:
|
||||
Author: GtheSheep
|
||||
Issue: "3146"
|
||||
PR: "4893"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Truncate relation names when appending a suffix that will result in len > 63
|
||||
characters using make_temp_relation and make_backup_relation macros
|
||||
time: 2022-03-22T17:37:53.320082-07:00
|
||||
custom:
|
||||
Author: epapineau
|
||||
Issue: "2869"
|
||||
PR: "4921"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Restore ability to utilize `updated_at` for check_cols snapshots
|
||||
time: 2022-04-15T11:29:27.063462-06:00
|
||||
custom:
|
||||
Author: dbeatty10
|
||||
Issue: "5076"
|
||||
PR: "5077"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix retry logic to return values after initial try
|
||||
time: 2022-04-22T13:12:27.239055-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "5023"
|
||||
PR: "5137"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Use yaml renderer (with target context) for rendering selectors
|
||||
time: 2022-04-22T13:56:45.147893-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5131"
|
||||
PR: "5136"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Scrub secret env vars from CommandError in exception stacktrace
|
||||
time: 2022-04-25T20:39:24.365495+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5151"
|
||||
PR: "5152"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure the metric name does not contain spaces
|
||||
time: 2022-04-26T20:21:04.360693-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "4572"
|
||||
PR: "5173"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: When parsing 'all_sources' should be a list of unique dirs
|
||||
time: 2022-04-27T10:26:48.648388-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5120"
|
||||
PR: "5176"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Add warning if yaml contains duplicate keys
|
||||
time: 2022-04-28T10:01:57.893956+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "5114"
|
||||
PR: "5146"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Modifying the drop_test_schema to work better with Redshift issues around locked
|
||||
tables and current transactions
|
||||
time: 2022-04-29T16:07:42.750046-05:00
|
||||
custom:
|
||||
Author: Mcknight-42
|
||||
Issue: "5200"
|
||||
PR: "5198"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix column comparison in snapshot_check_all_get_existing_columns for check-strategy
|
||||
snapshots with explicit check_cols defined
|
||||
time: 2022-05-09T13:00:21.649028+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5222"
|
||||
PR: "5223"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Changed how `--select state:modified` detects changes for macros nodes depend
|
||||
on
|
||||
time: 2022-05-09T13:13:12.889074-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5202"
|
||||
PR: "5224"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix column comparison in snapshot_check_all_get_existing_columns to use adapter.get_columns_in_relation
|
||||
time: 2022-05-11T12:32:38.313321+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "5222"
|
||||
PR: "5232"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Remove docs file from manifest when removing doc node
|
||||
time: 2022-05-18T13:46:10.167143-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "4146"
|
||||
PR: "5270"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Change node ancestor/descendant algo, fixes issue where downstream models aren't
|
||||
run when using networkx >= 2.8.1
|
||||
time: 2022-06-01T13:59:08.886215-05:00
|
||||
custom:
|
||||
Author: iknox-fa
|
||||
Issue: "5286"
|
||||
PR: "5326"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fixing Windows color regression
|
||||
time: 2022-06-01T19:42:34.263009-04:00
|
||||
custom:
|
||||
Author: leahwicz
|
||||
Issue: "5191"
|
||||
PR: "5327"
|
||||
7
.changes/unreleased/Fixes-20220715-231148.yaml
Normal file
7
.changes/unreleased/Fixes-20220715-231148.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Rename try to strict for more intuitiveness
|
||||
time: 2022-07-15T23:11:48.327928+12:00
|
||||
custom:
|
||||
Author: jeremyyeo
|
||||
Issue: "5475"
|
||||
PR: "5477"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Migrating 005_simple_seed to the new test framework.
|
||||
time: 2022-04-09T04:05:39.20045-07:00
|
||||
custom:
|
||||
Author: versusfacit
|
||||
Issue: "200"
|
||||
PR: "5013"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Convert 029_docs_generate tests to new framework
|
||||
time: 2022-04-13T18:30:14.706391-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5035"
|
||||
PR: "5058"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Move package deprecation check outside of package cache
|
||||
time: 2022-04-14T13:22:06.157579-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "5068"
|
||||
PR: "5069"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Mypy -> 0.942 + fixed import logic to allow for full mypy coverage
|
||||
time: 2022-04-27T11:21:27.499359-05:00
|
||||
custom:
|
||||
Author: iknox-fa
|
||||
Issue: "4805"
|
||||
PR: "5171"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Converted dbt list tests to pytest
|
||||
time: 2022-04-27T14:06:28.882908-05:00
|
||||
custom:
|
||||
Author: stu-k
|
||||
Issue: "5049"
|
||||
PR: "5178"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'Fix: Call str and repr for UnsetProfileConfig without a RuntimeException'
|
||||
time: 2022-05-03T19:52:12.793729384+02:00
|
||||
custom:
|
||||
Author: tomasfarias
|
||||
Issue: "5081"
|
||||
PR: "5209"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Improve tracking error logging message
|
||||
time: 2022-05-04T01:00:31.60387036+02:00
|
||||
custom:
|
||||
Author: NicolasPA
|
||||
Issue: "5197"
|
||||
PR: "5211"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: 'Clean up materialization logic: more consistent relation names, loading from
|
||||
cache'
|
||||
time: 2022-05-09T09:26:28.551068+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "2869"
|
||||
PR: "4921"
|
||||
@@ -1,8 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Use the default Python version for local dev and test instead of requiring Python
|
||||
3.8
|
||||
time: 2022-05-18T09:51:44.603193-07:00
|
||||
custom:
|
||||
Author: jwills
|
||||
Issue: "5257"
|
||||
PR: "5269"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix test for context set function
|
||||
time: 2022-05-18T14:55:22.554316-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5266"
|
||||
PR: "5272"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix pip upgrade step in CI for Windows
|
||||
time: 2022-06-01T10:52:45.872931-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5321"
|
||||
PR: "5320"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix unit test test_graph_selection
|
||||
time: 2022-06-01T11:26:48.725831-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "5323"
|
||||
PR: "5324"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Update context readme + clean up context code"
|
||||
time: 2022-06-06T23:03:53.022568+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "4796"
|
||||
PR: "5334"
|
||||
@@ -14,6 +14,7 @@ kinds:
|
||||
- label: Docs
|
||||
- label: Under the Hood
|
||||
- label: Dependencies
|
||||
- label: Security
|
||||
custom:
|
||||
- key: Author
|
||||
label: GitHub Username(s) (separated by a single space if multiple)
|
||||
|
||||
6
.github/workflows/main.yml
vendored
6
.github/workflows/main.yml
vendored
@@ -38,6 +38,7 @@ jobs:
|
||||
name: code-quality
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
@@ -65,6 +66,7 @@ jobs:
|
||||
name: unit test / python ${{ matrix.python-version }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -109,6 +111,7 @@ jobs:
|
||||
name: integration test / python ${{ matrix.python-version }} / ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@@ -125,6 +128,9 @@ jobs:
|
||||
TOXENV: integration
|
||||
PYTEST_ADDOPTS: "-v --color=yes -n4 --csv integration_results.csv"
|
||||
DBT_INVOCATION_ENV: github-actions
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
|
||||
62
.github/workflows/release-branch-tests.yml
vendored
Normal file
62
.github/workflows/release-branch-tests.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# **what?**
|
||||
# The purpose of this workflow is to trigger CI to run for each
|
||||
# release branch and main branch on a regular cadence. If the CI workflow
|
||||
# fails for a branch, it will post to dev-core-alerts to raise awareness.
|
||||
# The 'aurelien-baudet/workflow-dispatch' Action triggers the existing
|
||||
# CI worklow file on the given branch to run so that even if we change the
|
||||
# CI workflow file in the future, the one that is tailored for the given
|
||||
# release branch will be used.
|
||||
|
||||
# **why?**
|
||||
# Ensures release branches and main are always shippable and not broken.
|
||||
# Also, can catch any dependencies shifting beneath us that might
|
||||
# introduce breaking changes (could also impact Cloud).
|
||||
|
||||
# **when?**
|
||||
# Mainly on a schedule of 9:00, 13:00, 18:00 UTC everyday.
|
||||
# Manual trigger can also test on demand
|
||||
|
||||
name: Release branch scheduled testing
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 9,13,18 * * *' # 9:00, 13:00, 18:00 UTC
|
||||
|
||||
workflow_dispatch: # for manual triggering
|
||||
|
||||
# no special access is needed
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
kick-off-ci:
|
||||
name: Kick-off CI
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
# must run CI 1 branch at a time b/c the workflow-dispatch Action polls for
|
||||
# latest run for results and it gets confused when we kick off multiple runs
|
||||
# at once. There is a race condition so we will just run in sequential order.
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix:
|
||||
branch: [1.0.latest, 1.1.latest, main]
|
||||
|
||||
steps:
|
||||
- name: Call CI workflow for ${{ matrix.branch }} branch
|
||||
id: trigger-step
|
||||
uses: aurelien-baudet/workflow-dispatch@v2.1.1
|
||||
with:
|
||||
workflow: main.yml
|
||||
ref: ${{ matrix.branch }}
|
||||
token: ${{ secrets.FISHTOWN_BOT_PAT }}
|
||||
|
||||
- name: Post failure to Slack
|
||||
uses: ravsamhq/notify-slack-action@v1
|
||||
if: ${{ always() && !contains(steps.trigger-step.outputs.workflow-conclusion,'success') }}
|
||||
with:
|
||||
status: ${{ job.status }}
|
||||
notification_title: 'dbt-core scheduled run of "${{ matrix.branch }}" branch not successful'
|
||||
message_format: ':x: CI on branch "${{ matrix.branch }}" ${{ steps.trigger-step.outputs.workflow-conclusion }}'
|
||||
footer: 'Linked failed CI run ${{ steps.trigger-step.outputs.workflow-url }}'
|
||||
env:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
@@ -30,6 +30,11 @@ jobs:
|
||||
LOG_DIR: "/home/runner/work/dbt-core/dbt-core/logs"
|
||||
# tells integration tests to output into json format
|
||||
DBT_LOG_FORMAT: "json"
|
||||
# Additional test users
|
||||
DBT_TEST_USER_1: dbt_test_user_1
|
||||
DBT_TEST_USER_2: dbt_test_user_2
|
||||
DBT_TEST_USER_3: dbt_test_user_3
|
||||
|
||||
steps:
|
||||
- name: checkout dev
|
||||
uses: actions/checkout@v2
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.2](https://github.com/dbt-labs/dbt-core/blob/1.2.latest/CHANGELOG.md)
|
||||
* [1.1](https://github.com/dbt-labs/dbt-core/blob/1.1.latest/CHANGELOG.md)
|
||||
* [1.0](https://github.com/dbt-labs/dbt-core/blob/1.0.latest/CHANGELOG.md)
|
||||
* [0.21](https://github.com/dbt-labs/dbt-core/blob/0.21.latest/CHANGELOG.md)
|
||||
|
||||
@@ -1 +1,30 @@
|
||||
# Adapters README
|
||||
|
||||
The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql.
|
||||
|
||||
# Directories
|
||||
|
||||
## `base`
|
||||
|
||||
Defines the base implementation Adapters can use to build out full functionality.
|
||||
|
||||
## `sql`
|
||||
|
||||
Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.)
|
||||
|
||||
# Files
|
||||
|
||||
## `cache.py`
|
||||
|
||||
Cached information from the database.
|
||||
|
||||
## `factory.py`
|
||||
Defines how we generate adapter objects
|
||||
|
||||
## `protocol.py`
|
||||
|
||||
Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods.
|
||||
|
||||
## `reference_keys.py`
|
||||
|
||||
Configures naming scheme for cache elements to be universal.
|
||||
|
||||
@@ -1,10 +1,24 @@
|
||||
import abc
|
||||
import os
|
||||
from time import sleep
|
||||
import sys
|
||||
|
||||
# multiprocessing.RLock is a function returning this type
|
||||
from multiprocessing.synchronize import RLock
|
||||
from threading import get_ident
|
||||
from typing import Dict, Tuple, Hashable, Optional, ContextManager, List
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
Tuple,
|
||||
Hashable,
|
||||
Optional,
|
||||
ContextManager,
|
||||
List,
|
||||
Type,
|
||||
Union,
|
||||
Iterable,
|
||||
Callable,
|
||||
)
|
||||
|
||||
import agate
|
||||
|
||||
@@ -21,6 +35,7 @@ from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.adapters.base.query_headers import (
|
||||
MacroQueryStringSetter,
|
||||
)
|
||||
from dbt.events import AdapterLogger
|
||||
from dbt.events.functions import fire_event
|
||||
from dbt.events.types import (
|
||||
NewConnection,
|
||||
@@ -34,6 +49,9 @@ from dbt.events.types import (
|
||||
)
|
||||
from dbt import flags
|
||||
|
||||
SleepTime = Union[int, float] # As taken by time.sleep.
|
||||
AdapterHandle = Any # Adapter connection handle objects can be any class.
|
||||
|
||||
|
||||
class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"""Methods to implement:
|
||||
@@ -159,6 +177,94 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
conn.name = conn_name
|
||||
return conn
|
||||
|
||||
@classmethod
|
||||
def retry_connection(
|
||||
cls,
|
||||
connection: Connection,
|
||||
connect: Callable[[], AdapterHandle],
|
||||
logger: AdapterLogger,
|
||||
retryable_exceptions: Iterable[Type[Exception]],
|
||||
retry_limit: int = 1,
|
||||
retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1,
|
||||
_attempts: int = 0,
|
||||
) -> Connection:
|
||||
"""Given a Connection, set its handle by calling connect.
|
||||
|
||||
The calls to connect will be retried up to retry_limit times to deal with transient
|
||||
connection errors. By default, one retry will be attempted if retryable_exceptions is set.
|
||||
|
||||
:param Connection connection: An instance of a Connection that needs a handle to be set,
|
||||
usually when attempting to open it.
|
||||
:param connect: A callable that returns the appropiate connection handle for a
|
||||
given adapter. This callable will be retried retry_limit times if a subclass of any
|
||||
Exception in retryable_exceptions is raised by connect.
|
||||
:type connect: Callable[[], AdapterHandle]
|
||||
:param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When
|
||||
handling expected errors, we call debug, and call warning on unexpected errors or when
|
||||
all retry attempts have been exhausted.
|
||||
:param retryable_exceptions: An iterable of exception classes that if raised by
|
||||
connect should trigger a retry.
|
||||
:type retryable_exceptions: Iterable[Type[Exception]]
|
||||
:param int retry_limit: How many times to retry the call to connect. If this limit
|
||||
is exceeded before a successful call, a FailedToConnectException will be raised.
|
||||
Must be non-negative.
|
||||
:param retry_timeout: Time to wait between attempts to connect. Can also take a
|
||||
Callable that takes the number of attempts so far, beginning at 0, and returns an int
|
||||
or float to be passed to time.sleep.
|
||||
:type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1
|
||||
:param int _attempts: Parameter used to keep track of the number of attempts in calling the
|
||||
connect function across recursive calls. Passed as an argument to retry_timeout if it
|
||||
is a Callable. This parameter should not be set by the initial caller.
|
||||
:raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without
|
||||
successfully acquiring a handle.
|
||||
:return: The given connection with its appropriate state and handle attributes set
|
||||
depending on whether we successfully acquired a handle or not.
|
||||
"""
|
||||
timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout
|
||||
if timeout < 0:
|
||||
raise dbt.exceptions.FailedToConnectException(
|
||||
"retry_timeout cannot be negative or return a negative time."
|
||||
)
|
||||
|
||||
if retry_limit < 0 or retry_limit > sys.getrecursionlimit():
|
||||
# This guard is not perfect others may add to the recursion limit (e.g. built-ins).
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative")
|
||||
|
||||
try:
|
||||
connection.handle = connect()
|
||||
connection.state = ConnectionState.OPEN
|
||||
return connection
|
||||
|
||||
except tuple(retryable_exceptions) as e:
|
||||
if retry_limit <= 0:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
logger.debug(
|
||||
f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n"
|
||||
f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n"
|
||||
f"Error:\n{e}"
|
||||
)
|
||||
|
||||
sleep(timeout)
|
||||
return cls.retry_connection(
|
||||
connection=connection,
|
||||
connect=connect,
|
||||
logger=logger,
|
||||
retry_limit=retry_limit - 1,
|
||||
retry_timeout=retry_timeout,
|
||||
retryable_exceptions=retryable_exceptions,
|
||||
_attempts=_attempts + 1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
connection.handle = None
|
||||
connection.state = ConnectionState.FAIL
|
||||
raise dbt.exceptions.FailedToConnectException(str(e))
|
||||
|
||||
@abc.abstractmethod
|
||||
def cancel_open(self) -> Optional[List[str]]:
|
||||
"""Cancel all open connections on the adapter. (passable)"""
|
||||
@@ -166,7 +272,8 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
|
||||
"`cancel_open` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def open(cls, connection: Connection) -> Connection:
|
||||
"""Open the given connection on the adapter and return it.
|
||||
|
||||
|
||||
@@ -159,6 +159,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
- convert_datetime_type
|
||||
- convert_date_type
|
||||
- convert_time_type
|
||||
- standardize_grants_dict
|
||||
|
||||
Macros:
|
||||
- get_catalog
|
||||
@@ -434,12 +435,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
###
|
||||
# Abstract methods for database-specific values, attributes, and types
|
||||
###
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def date_function(cls) -> str:
|
||||
"""Get the date function used by this adapter's database."""
|
||||
raise NotImplementedException("`date_function` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def is_cancelable(cls) -> bool:
|
||||
raise NotImplementedException("`is_cancelable` is not implemented for this adapter!")
|
||||
|
||||
@@ -536,6 +539,33 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`list_relations_without_caching` is not implemented for this " "adapter!"
|
||||
)
|
||||
|
||||
###
|
||||
# Methods about grants
|
||||
###
|
||||
@available
|
||||
def standardize_grants_dict(self, grants_table: agate.Table) -> dict:
|
||||
"""Translate the result of `show grants` (or equivalent) to match the
|
||||
grants which a user would configure in their project.
|
||||
|
||||
Ideally, the SQL to show grants should also be filtering:
|
||||
filter OUT any grants TO the current user/role (e.g. OWNERSHIP).
|
||||
If that's not possible in SQL, it can be done in this method instead.
|
||||
|
||||
:param grants_table: An agate table containing the query result of
|
||||
the SQL returned by get_show_grant_sql
|
||||
:return: A standardized dictionary matching the `grants` config
|
||||
:rtype: dict
|
||||
"""
|
||||
grants_dict: Dict[str, List[str]] = {}
|
||||
for row in grants_table:
|
||||
grantee = row["grantee"]
|
||||
privilege = row["privilege_type"]
|
||||
if privilege in grants_dict.keys():
|
||||
grants_dict[privilege].append(grantee)
|
||||
else:
|
||||
grants_dict.update({privilege: [grantee]})
|
||||
return grants_dict
|
||||
|
||||
###
|
||||
# Provided methods about relations
|
||||
###
|
||||
@@ -734,7 +764,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
raise NotImplementedException("`drop_schema` is not implemented for this adapter!")
|
||||
|
||||
@available
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def quote(cls, identifier: str) -> str:
|
||||
"""Quote the given identifier, as appropriate for the database."""
|
||||
raise NotImplementedException("`quote` is not implemented for this adapter!")
|
||||
@@ -780,7 +811,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
# Conversions: These must be implemented by concrete implementations, for
|
||||
# converting agate types into their sql equivalents.
|
||||
###
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Text
|
||||
type for the given agate table and column index.
|
||||
@@ -791,7 +823,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedException("`convert_text_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Number
|
||||
type for the given agate table and column index.
|
||||
@@ -802,7 +835,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedException("`convert_number_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Boolean
|
||||
type for the given agate table and column index.
|
||||
@@ -815,7 +849,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`convert_boolean_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.DateTime
|
||||
type for the given agate table and column index.
|
||||
@@ -828,7 +863,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"`convert_datetime_type` is not implemented for this adapter!"
|
||||
)
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the agate.Date
|
||||
type for the given agate table and column index.
|
||||
@@ -839,7 +875,8 @@ class BaseAdapter(metaclass=AdapterMeta):
|
||||
"""
|
||||
raise NotImplementedException("`convert_date_type` is not implemented for this adapter!")
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
|
||||
"""Return the type in the database that best maps to the
|
||||
agate.TimeDelta type for the given agate table and column index.
|
||||
|
||||
@@ -77,7 +77,8 @@ class SQLConnectionManager(BaseConnectionManager):
|
||||
|
||||
return connection, cursor
|
||||
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def get_response(cls, cursor: Any) -> AdapterResponse:
|
||||
"""Get the status of the cursor."""
|
||||
raise dbt.exceptions.NotImplementedException(
|
||||
|
||||
@@ -8,7 +8,6 @@ try:
|
||||
except ImportError:
|
||||
from yaml import Loader, SafeLoader, Dumper # type: ignore # noqa: F401
|
||||
|
||||
from dbt.ui import warning_tag
|
||||
|
||||
YAML_ERROR_MESSAGE = """
|
||||
Syntax error near line {line_number}
|
||||
@@ -21,26 +20,6 @@ Raw Error:
|
||||
""".strip()
|
||||
|
||||
|
||||
class UniqueKeyLoader(SafeLoader):
|
||||
"""A subclass that checks for unique yaml mapping nodes.
|
||||
|
||||
This class extends `SafeLoader` from the `yaml` library to check for
|
||||
unique top level keys (mapping nodes). See issue (https://github.com/yaml/pyyaml/issues/165)
|
||||
and solution (https://gist.github.com/pypt/94d747fe5180851196eb?permalink_comment_id=4015118).
|
||||
"""
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
mapping = set()
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if key in mapping:
|
||||
raise dbt.exceptions.DuplicateYamlKeyException(
|
||||
f"Duplicate {key!r} key found in yaml file"
|
||||
)
|
||||
mapping.add(key)
|
||||
return super().construct_mapping(node, deep)
|
||||
|
||||
|
||||
def line_no(i, line, width=3):
|
||||
line_number = str(i).ljust(width)
|
||||
return "{}| {}".format(line_number, line)
|
||||
@@ -69,7 +48,7 @@ def contextualized_yaml_error(raw_contents, error):
|
||||
|
||||
|
||||
def safe_load(contents) -> Optional[Dict[str, Any]]:
|
||||
return yaml.load(contents, Loader=UniqueKeyLoader)
|
||||
return yaml.load(contents, Loader=SafeLoader)
|
||||
|
||||
|
||||
def load_yaml_text(contents, path=None):
|
||||
@@ -82,7 +61,3 @@ def load_yaml_text(contents, path=None):
|
||||
error = str(e)
|
||||
|
||||
raise dbt.exceptions.ValidationException(error)
|
||||
except dbt.exceptions.DuplicateYamlKeyException as e:
|
||||
# TODO: We may want to raise an exception instead of a warning in the future.
|
||||
e.msg = f"{e} {path.searched_path}/{path.relative_path}."
|
||||
dbt.exceptions.warn_or_raise(e, log_fmt=warning_tag("{}"))
|
||||
|
||||
@@ -397,6 +397,8 @@ class Compiler:
|
||||
linker.dependency(node.unique_id, (manifest.nodes[dependency].unique_id))
|
||||
elif dependency in manifest.sources:
|
||||
linker.dependency(node.unique_id, (manifest.sources[dependency].unique_id))
|
||||
elif dependency in manifest.metrics:
|
||||
linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id))
|
||||
else:
|
||||
dependency_not_found(node, dependency)
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from typing_extensions import Protocol, runtime_checkable
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dbt import deprecations
|
||||
from dbt import flags, deprecations
|
||||
from dbt.clients.system import resolve_path_from_base
|
||||
from dbt.clients.system import path_exists
|
||||
from dbt.clients.system import load_file_contents
|
||||
@@ -142,6 +142,13 @@ def _all_source_paths(
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def flag_or(flag: Optional[T], value: Optional[T], default: T) -> T:
|
||||
if flag is None:
|
||||
return value_or(value, default)
|
||||
else:
|
||||
return flag
|
||||
|
||||
|
||||
def value_or(value: Optional[T], default: T) -> T:
|
||||
if value is None:
|
||||
return default
|
||||
@@ -356,9 +363,9 @@ class PartialProject(RenderComponents):
|
||||
|
||||
docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths)
|
||||
asset_paths: List[str] = value_or(cfg.asset_paths, [])
|
||||
target_path: str = value_or(cfg.target_path, "target")
|
||||
target_path: str = flag_or(flags.TARGET_PATH, cfg.target_path, "target")
|
||||
clean_targets: List[str] = value_or(cfg.clean_targets, [target_path])
|
||||
log_path: str = value_or(cfg.log_path, "logs")
|
||||
log_path: str = flag_or(flags.LOG_PATH, cfg.log_path, "logs")
|
||||
packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages")
|
||||
# in the default case we'll populate this once we know the adapter type
|
||||
# It would be nice to just pass along a Quoting here, but that would
|
||||
|
||||
@@ -178,7 +178,12 @@ class SecretRenderer(BaseRenderer):
|
||||
return "Secret"
|
||||
|
||||
def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
|
||||
# First, standard Jinja rendering, with special handling for 'secret' environment variables
|
||||
# "{{ env_var('DBT_SECRET_ENV_VAR') }}" -> "$$$DBT_SECRET_START$$$DBT_SECRET_ENV_{VARIABLE_NAME}$$$DBT_SECRET_END$$$"
|
||||
# This prevents Jinja manipulation of secrets via macros/filters that might leak partial/modified values in logs
|
||||
rendered = super().render_value(value, keypath)
|
||||
# Now, detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$)
|
||||
# and replace them with the actual secret value
|
||||
if SECRET_ENV_PREFIX in str(rendered):
|
||||
search_group = f"({SECRET_ENV_PREFIX}(.*))"
|
||||
pattern = SECRET_PLACEHOLDER.format(search_group).replace("$", r"\$")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set
|
||||
from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List
|
||||
|
||||
from dbt import flags
|
||||
from dbt import tracking
|
||||
@@ -474,19 +474,17 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def try_set(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `try_set` context method can be used to convert any iterable
|
||||
def set_strict(value: Iterable[Any]) -> Set[Any]:
|
||||
"""The `set_strict` context method can be used to convert any iterable
|
||||
to a sequence of iterable elements that are unique (a set). The
|
||||
difference to the `set` context method is that the `try_set` method
|
||||
difference to the `set` context method is that the `set_strict` method
|
||||
will raise an exception on a TypeError.
|
||||
|
||||
:param value: The iterable
|
||||
:param default: A default value to return if the `value` argument
|
||||
is not an iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list = [1, 2, 2, 3] %}
|
||||
{% set my_set = try_set(my_list) %}
|
||||
{% set my_set = set_strict(my_list) %}
|
||||
{% do log(my_set) %} {# {1, 2, 3} #}
|
||||
"""
|
||||
try:
|
||||
@@ -497,7 +495,7 @@ class BaseContext(metaclass=ContextMeta):
|
||||
@contextmember("zip")
|
||||
@staticmethod
|
||||
def _zip(*args: Iterable[Any], default: Any = None) -> Optional[Iterable[Any]]:
|
||||
"""The `try_zip` context method can be used to used to return
|
||||
"""The `zip` context method can be used to used to return
|
||||
an iterator of tuples, where the i-th tuple contains the i-th
|
||||
element from each of the argument iterables.
|
||||
|
||||
@@ -518,21 +516,19 @@ class BaseContext(metaclass=ContextMeta):
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def try_zip(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `try_zip` context method can be used to used to return
|
||||
def zip_strict(*args: Iterable[Any]) -> Iterable[Any]:
|
||||
"""The `zip_strict` context method can be used to used to return
|
||||
an iterator of tuples, where the i-th tuple contains the i-th
|
||||
element from each of the argument iterables. The difference to the
|
||||
`zip` context method is that the `try_zip` method will raise an
|
||||
`zip` context method is that the `zip_strict` method will raise an
|
||||
exception on a TypeError.
|
||||
|
||||
:param *args: Any number of iterables
|
||||
:param default: A default value to return if `*args` is not
|
||||
iterable
|
||||
|
||||
Usage:
|
||||
{% set my_list_a = [1, 2] %}
|
||||
{% set my_list_b = ['alice', 'bob'] %}
|
||||
{% set my_zip = try_zip(my_list_a, my_list_b) | list %}
|
||||
{% set my_zip = zip_strict(my_list_a, my_list_b) | list %}
|
||||
{% do log(my_set) %} {# [(1, 'alice'), (2, 'bob')] #}
|
||||
"""
|
||||
try:
|
||||
@@ -657,6 +653,35 @@ class BaseContext(metaclass=ContextMeta):
|
||||
print(msg)
|
||||
return ""
|
||||
|
||||
@contextmember
|
||||
@staticmethod
|
||||
def diff_of_two_dicts(
|
||||
dict_a: Dict[str, List[str]], dict_b: Dict[str, List[str]]
|
||||
) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Given two dictionaries of type Dict[str, List[str]]:
|
||||
dict_a = {'key_x': ['value_1', 'VALUE_2'], 'KEY_Y': ['value_3']}
|
||||
dict_b = {'key_x': ['value_1'], 'key_z': ['value_4']}
|
||||
Return the same dictionary representation of dict_a MINUS dict_b,
|
||||
performing a case-insensitive comparison between the strings in each.
|
||||
All keys returned will be in the original case of dict_a.
|
||||
returns {'key_x': ['VALUE_2'], 'KEY_Y': ['value_3']}
|
||||
"""
|
||||
|
||||
dict_diff = {}
|
||||
dict_b_lowered = {k.casefold(): [x.casefold() for x in v] for k, v in dict_b.items()}
|
||||
for k in dict_a:
|
||||
if k.casefold() in dict_b_lowered.keys():
|
||||
diff = []
|
||||
for v in dict_a[k]:
|
||||
if v.casefold() not in dict_b_lowered[k.casefold()]:
|
||||
diff.append(v)
|
||||
if diff:
|
||||
dict_diff.update({k: diff})
|
||||
else:
|
||||
dict_diff.update({k: dict_a[k]})
|
||||
return dict_diff
|
||||
|
||||
|
||||
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
|
||||
ctx = BaseContext(cli_vars)
|
||||
|
||||
@@ -40,6 +40,7 @@ from dbt.contracts.graph.parsed import (
|
||||
ParsedSeedNode,
|
||||
ParsedSourceDefinition,
|
||||
)
|
||||
from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference
|
||||
from dbt.exceptions import (
|
||||
CompilationException,
|
||||
ParsingException,
|
||||
@@ -50,7 +51,9 @@ from dbt.exceptions import (
|
||||
missing_config,
|
||||
raise_compiler_error,
|
||||
ref_invalid_args,
|
||||
metric_invalid_args,
|
||||
ref_target_not_found,
|
||||
metric_target_not_found,
|
||||
ref_bad_context,
|
||||
source_target_not_found,
|
||||
wrapped_exports,
|
||||
@@ -199,7 +202,7 @@ class BaseResolver(metaclass=abc.ABCMeta):
|
||||
return self.db_wrapper.Relation
|
||||
|
||||
@abc.abstractmethod
|
||||
def __call__(self, *args: str) -> Union[str, RelationProxy]:
|
||||
def __call__(self, *args: str) -> Union[str, RelationProxy, MetricReference]:
|
||||
pass
|
||||
|
||||
|
||||
@@ -265,6 +268,41 @@ class BaseSourceResolver(BaseResolver):
|
||||
return self.resolve(args[0], args[1])
|
||||
|
||||
|
||||
class BaseMetricResolver(BaseResolver):
|
||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
||||
...
|
||||
|
||||
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
||||
if package is None:
|
||||
return [name]
|
||||
else:
|
||||
return [package, name]
|
||||
|
||||
def validate_args(self, name: str, package: Optional[str]):
|
||||
if not isinstance(name, str):
|
||||
raise CompilationException(
|
||||
f"The name argument to metric() must be a string, got {type(name)}"
|
||||
)
|
||||
|
||||
if package is not None and not isinstance(package, str):
|
||||
raise CompilationException(
|
||||
f"The package argument to metric() must be a string or None, got {type(package)}"
|
||||
)
|
||||
|
||||
def __call__(self, *args: str) -> MetricReference:
|
||||
name: str
|
||||
package: Optional[str] = None
|
||||
|
||||
if len(args) == 1:
|
||||
name = args[0]
|
||||
elif len(args) == 2:
|
||||
package, name = args
|
||||
else:
|
||||
metric_invalid_args(self.model, args)
|
||||
self.validate_args(name, package)
|
||||
return self.resolve(name, package)
|
||||
|
||||
|
||||
class Config(Protocol):
|
||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
||||
...
|
||||
@@ -511,6 +549,34 @@ class RuntimeSourceResolver(BaseSourceResolver):
|
||||
return self.Relation.create_from_source(target_source)
|
||||
|
||||
|
||||
# metric` implementations
|
||||
class ParseMetricResolver(BaseMetricResolver):
|
||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
||||
self.model.metrics.append(self._repack_args(name, package))
|
||||
|
||||
return MetricReference(name, package)
|
||||
|
||||
|
||||
class RuntimeMetricResolver(BaseMetricResolver):
|
||||
def resolve(self, target_name: str, target_package: Optional[str] = None) -> MetricReference:
|
||||
target_metric = self.manifest.resolve_metric(
|
||||
target_name,
|
||||
target_package,
|
||||
self.current_project,
|
||||
self.model.package_name,
|
||||
)
|
||||
|
||||
if target_metric is None or isinstance(target_metric, Disabled):
|
||||
# TODO : Use a different exception!!
|
||||
metric_target_not_found(
|
||||
self.model,
|
||||
target_name,
|
||||
target_package,
|
||||
)
|
||||
|
||||
return ResolvedMetricReference(target_metric, self.manifest, self.Relation)
|
||||
|
||||
|
||||
# `var` implementations.
|
||||
class ModelConfiguredVar(Var):
|
||||
def __init__(
|
||||
@@ -568,6 +634,7 @@ class Provider(Protocol):
|
||||
Var: Type[ModelConfiguredVar]
|
||||
ref: Type[BaseRefResolver]
|
||||
source: Type[BaseSourceResolver]
|
||||
metric: Type[BaseMetricResolver]
|
||||
|
||||
|
||||
class ParseProvider(Provider):
|
||||
@@ -577,6 +644,7 @@ class ParseProvider(Provider):
|
||||
Var = ParseVar
|
||||
ref = ParseRefResolver
|
||||
source = ParseSourceResolver
|
||||
metric = ParseMetricResolver
|
||||
|
||||
|
||||
class GenerateNameProvider(Provider):
|
||||
@@ -586,6 +654,7 @@ class GenerateNameProvider(Provider):
|
||||
Var = RuntimeVar
|
||||
ref = ParseRefResolver
|
||||
source = ParseSourceResolver
|
||||
metric = ParseMetricResolver
|
||||
|
||||
|
||||
class RuntimeProvider(Provider):
|
||||
@@ -595,6 +664,7 @@ class RuntimeProvider(Provider):
|
||||
Var = RuntimeVar
|
||||
ref = RuntimeRefResolver
|
||||
source = RuntimeSourceResolver
|
||||
metric = RuntimeMetricResolver
|
||||
|
||||
|
||||
class OperationProvider(RuntimeProvider):
|
||||
@@ -778,6 +848,10 @@ class ProviderContext(ManifestContext):
|
||||
def source(self) -> Callable:
|
||||
return self.provider.source(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty
|
||||
def metric(self) -> Callable:
|
||||
return self.provider.metric(self.db_wrapper, self.model, self.config, self.manifest)
|
||||
|
||||
@contextproperty("config")
|
||||
def ctx_config(self) -> Config:
|
||||
"""The `config` variable exists to handle end-user configuration for
|
||||
@@ -1355,7 +1429,7 @@ class MetricRefResolver(BaseResolver):
|
||||
if not isinstance(name, str):
|
||||
raise ParsingException(
|
||||
f"In a metrics section in {self.model.original_file_path} "
|
||||
f"the name argument to ref() must be a string"
|
||||
"the name argument to ref() must be a string"
|
||||
)
|
||||
|
||||
|
||||
@@ -1373,6 +1447,12 @@ def generate_parse_metrics(
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
"metric": ParseMetricResolver(
|
||||
None,
|
||||
metric,
|
||||
project,
|
||||
manifest,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -183,6 +183,39 @@ class RefableLookup(dbtClassMixin):
|
||||
return manifest.nodes[unique_id]
|
||||
|
||||
|
||||
class MetricLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
|
||||
self.populate(manifest)
|
||||
|
||||
def get_unique_id(self, search_name, package: Optional[PackageName]):
|
||||
return find_unique_id_for_package(self.storage, search_name, package)
|
||||
|
||||
def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"):
|
||||
unique_id = self.get_unique_id(search_name, package)
|
||||
if unique_id is not None:
|
||||
return self.perform_lookup(unique_id, manifest)
|
||||
return None
|
||||
|
||||
def add_metric(self, metric: ParsedMetric):
|
||||
if metric.search_name not in self.storage:
|
||||
self.storage[metric.search_name] = {}
|
||||
|
||||
self.storage[metric.search_name][metric.package_name] = metric.unique_id
|
||||
|
||||
def populate(self, manifest):
|
||||
for metric in manifest.metrics.values():
|
||||
if hasattr(metric, "name"):
|
||||
self.add_metric(metric)
|
||||
|
||||
def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> ParsedMetric:
|
||||
if unique_id not in manifest.metrics:
|
||||
raise dbt.exceptions.InternalException(
|
||||
f"Metric {unique_id} found in cache but not found in manifest"
|
||||
)
|
||||
return manifest.metrics[unique_id]
|
||||
|
||||
|
||||
# This handles both models/seeds/snapshots and sources
|
||||
class DisabledLookup(dbtClassMixin):
|
||||
def __init__(self, manifest: "Manifest"):
|
||||
@@ -328,11 +361,6 @@ class Locality(enum.IntEnum):
|
||||
Root = 3
|
||||
|
||||
|
||||
class Specificity(enum.IntEnum):
|
||||
Default = 1
|
||||
Adapter = 2
|
||||
|
||||
|
||||
@dataclass
|
||||
class MacroCandidate:
|
||||
locality: Locality
|
||||
@@ -355,12 +383,14 @@ class MacroCandidate:
|
||||
|
||||
@dataclass
|
||||
class MaterializationCandidate(MacroCandidate):
|
||||
specificity: Specificity
|
||||
# specificity describes where in the inheritance chain this materialization candidate is
|
||||
# a specificity of 0 means a materialization defined by the current adapter
|
||||
# the highest the specificity describes a default materialization. the value itself depends on
|
||||
# how many adapters there are in the inheritance chain
|
||||
specificity: int
|
||||
|
||||
@classmethod
|
||||
def from_macro(
|
||||
cls, candidate: MacroCandidate, specificity: Specificity
|
||||
) -> "MaterializationCandidate":
|
||||
def from_macro(cls, candidate: MacroCandidate, specificity: int) -> "MaterializationCandidate":
|
||||
return cls(
|
||||
locality=candidate.locality,
|
||||
macro=candidate.macro,
|
||||
@@ -384,9 +414,9 @@ class MaterializationCandidate(MacroCandidate):
|
||||
def __lt__(self, other: object) -> bool:
|
||||
if not isinstance(other, MaterializationCandidate):
|
||||
return NotImplemented
|
||||
if self.specificity < other.specificity:
|
||||
return True
|
||||
if self.specificity > other.specificity:
|
||||
return True
|
||||
if self.specificity < other.specificity:
|
||||
return False
|
||||
if self.locality < other.locality:
|
||||
return True
|
||||
@@ -434,6 +464,9 @@ class Disabled(Generic[D]):
|
||||
target: D
|
||||
|
||||
|
||||
MaybeMetricNode = Optional[ParsedMetric]
|
||||
|
||||
|
||||
MaybeDocumentation = Optional[ParsedDocumentation]
|
||||
|
||||
|
||||
@@ -595,6 +628,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
_ref_lookup: Optional[RefableLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_metric_lookup: Optional[MetricLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
_disabled_lookup: Optional[DisabledLookup] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
@@ -671,18 +707,24 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
disabled_by_file_id[node.file_id] = node
|
||||
return disabled_by_file_id
|
||||
|
||||
def _get_parent_adapter_types(self, adapter_type: str) -> List[str]:
|
||||
# This is duplicated logic from core/dbt/context/providers.py
|
||||
# Ideally this would instead be incorporating actual dispatch logic
|
||||
from dbt.adapters.factory import get_adapter_type_names
|
||||
|
||||
# order matters for dispatch:
|
||||
# 1. current adapter
|
||||
# 2. any parent adapters (dependencies)
|
||||
# 3. 'default'
|
||||
return get_adapter_type_names(adapter_type) + ["default"]
|
||||
|
||||
def _materialization_candidates_for(
|
||||
self,
|
||||
project_name: str,
|
||||
materialization_name: str,
|
||||
adapter_type: Optional[str],
|
||||
adapter_type: str,
|
||||
specificity: int,
|
||||
) -> CandidateList:
|
||||
|
||||
if adapter_type is None:
|
||||
specificity = Specificity.Default
|
||||
else:
|
||||
specificity = Specificity.Adapter
|
||||
|
||||
full_name = dbt.utils.get_materialization_macro_name(
|
||||
materialization_name=materialization_name,
|
||||
adapter_type=adapter_type,
|
||||
@@ -702,8 +744,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
project_name=project_name,
|
||||
materialization_name=materialization_name,
|
||||
adapter_type=atype,
|
||||
specificity=specificity, # where in the inheritance chain this candidate is
|
||||
)
|
||||
for atype in (adapter_type, None)
|
||||
for specificity, atype in enumerate(self._get_parent_adapter_types(adapter_type))
|
||||
)
|
||||
)
|
||||
return candidates.last()
|
||||
@@ -833,6 +876,12 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
return self._ref_lookup
|
||||
|
||||
@property
|
||||
def metric_lookup(self) -> MetricLookup:
|
||||
if self._metric_lookup is None:
|
||||
self._metric_lookup = MetricLookup(self)
|
||||
return self._metric_lookup
|
||||
|
||||
def rebuild_ref_lookup(self):
|
||||
self._ref_lookup = RefableLookup(self)
|
||||
|
||||
@@ -908,6 +957,22 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
return Disabled(disabled[0])
|
||||
return None
|
||||
|
||||
def resolve_metric(
|
||||
self,
|
||||
target_metric_name: str,
|
||||
target_metric_package: Optional[str],
|
||||
current_project: str,
|
||||
node_package: str,
|
||||
) -> MaybeMetricNode:
|
||||
metric: Optional[ParsedMetric] = None
|
||||
|
||||
candidates = _search_packages(current_project, node_package, target_metric_package)
|
||||
for pkg in candidates:
|
||||
metric = self.metric_lookup.find(target_metric_name, pkg, self)
|
||||
if metric is not None:
|
||||
return metric
|
||||
return None
|
||||
|
||||
# Called by DocsRuntimeContext.doc
|
||||
def resolve_doc(
|
||||
self,
|
||||
@@ -1072,6 +1137,7 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
|
||||
self._doc_lookup,
|
||||
self._source_lookup,
|
||||
self._ref_lookup,
|
||||
self._metric_lookup,
|
||||
self._disabled_lookup,
|
||||
self._analysis_lookup,
|
||||
)
|
||||
@@ -1091,7 +1157,7 @@ AnyManifest = Union[Manifest, MacroManifest]
|
||||
|
||||
|
||||
@dataclass
|
||||
@schema_version("manifest", 5)
|
||||
@schema_version("manifest", 6)
|
||||
class WritableManifest(ArtifactMixin):
|
||||
nodes: Mapping[UniqueID, ManifestNode] = field(
|
||||
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
||||
@@ -1135,6 +1201,10 @@ class WritableManifest(ArtifactMixin):
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compatible_previous_versions(self):
|
||||
return [("manifest", 4), ("manifest", 5)]
|
||||
|
||||
def __post_serialize__(self, dct):
|
||||
for unique_id, node in dct["nodes"].items():
|
||||
if "config_call_dict" in node:
|
||||
|
||||
70
core/dbt/contracts/graph/metrics.py
Normal file
70
core/dbt/contracts/graph/metrics.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from dbt.node_types import NodeType
|
||||
|
||||
|
||||
class MetricReference(object):
|
||||
def __init__(self, metric_name, package_name=None):
|
||||
self.metric_name = metric_name
|
||||
self.package_name = package_name
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.metric_name}"
|
||||
|
||||
|
||||
class ResolvedMetricReference(MetricReference):
|
||||
"""
|
||||
Simple proxy over a ParsedMetric which delegates property
|
||||
lookups to the underlying node. Also adds helper functions
|
||||
for working with metrics (ie. __str__ and templating functions)
|
||||
"""
|
||||
|
||||
def __init__(self, node, manifest, Relation):
|
||||
super().__init__(node.name, node.package_name)
|
||||
self.node = node
|
||||
self.manifest = manifest
|
||||
self.Relation = Relation
|
||||
|
||||
def __getattr__(self, key):
|
||||
return getattr(self.node, key)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.node.name}"
|
||||
|
||||
@classmethod
|
||||
def parent_metrics(cls, metric_node, manifest):
|
||||
yield metric_node
|
||||
|
||||
for parent_unique_id in metric_node.depends_on.nodes:
|
||||
node = manifest.metrics.get(parent_unique_id)
|
||||
if node and node.resource_type == NodeType.Metric:
|
||||
yield from cls.parent_metrics(node, manifest)
|
||||
|
||||
def parent_models(self):
|
||||
in_scope_metrics = list(self.parent_metrics(self.node, self.manifest))
|
||||
|
||||
to_return = {
|
||||
"base": [],
|
||||
"derived": [],
|
||||
}
|
||||
for metric in in_scope_metrics:
|
||||
if metric.type == "expression":
|
||||
to_return["derived"].append(
|
||||
{"metric_source": None, "metric": metric, "is_derived": True}
|
||||
)
|
||||
else:
|
||||
for node_unique_id in metric.depends_on.nodes:
|
||||
node = self.manifest.nodes.get(node_unique_id)
|
||||
if node and node.resource_type in NodeType.refable():
|
||||
to_return["base"].append(
|
||||
{
|
||||
"metric_relation_node": node,
|
||||
"metric_relation": self.Relation.create(
|
||||
database=node.database,
|
||||
schema=node.schema,
|
||||
identifier=node.alias,
|
||||
),
|
||||
"metric": metric,
|
||||
"is_derived": False,
|
||||
}
|
||||
)
|
||||
|
||||
return to_return
|
||||
@@ -7,7 +7,8 @@ from dbt.dataclass_schema import (
|
||||
ValidationError,
|
||||
register_pattern,
|
||||
)
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed
|
||||
from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed, Docs
|
||||
from dbt.contracts.graph.utils import validate_color
|
||||
from dbt.exceptions import InternalException, CompilationException
|
||||
from dbt.contracts.util import Replaceable, list_str
|
||||
from dbt import hooks
|
||||
@@ -285,7 +286,7 @@ class BaseConfig(AdditionalPropertiesAllowed, Replaceable):
|
||||
# 'meta' moved here from node
|
||||
mergebehavior = {
|
||||
"append": ["pre-hook", "pre_hook", "post-hook", "post_hook", "tags"],
|
||||
"update": ["quoting", "column_types", "meta"],
|
||||
"update": ["quoting", "column_types", "meta", "docs"],
|
||||
"dict_key_append": ["grants"],
|
||||
}
|
||||
|
||||
@@ -460,6 +461,20 @@ class NodeConfig(NodeAndTestConfig):
|
||||
grants: Dict[str, Any] = field(
|
||||
default_factory=dict, metadata=MergeBehavior.DictKeyAppend.meta()
|
||||
)
|
||||
docs: Docs = field(
|
||||
default_factory=lambda: Docs(show=True),
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||
def __post_init__(self):
|
||||
if self.docs.node_color:
|
||||
node_color = self.docs.node_color
|
||||
if not validate_color(node_color):
|
||||
raise ValidationError(
|
||||
f"Invalid color name for docs.node_color: {node_color}. "
|
||||
"It is neither a valid HTML color name nor a valid HEX code."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def __pre_deserialize__(cls, data):
|
||||
|
||||
@@ -157,7 +157,6 @@ class ParsedNodeMixins(dbtClassMixin):
|
||||
self.created_at = time.time()
|
||||
self.description = patch.description
|
||||
self.columns = patch.columns
|
||||
self.docs = patch.docs
|
||||
|
||||
def get_materialization(self):
|
||||
return self.config.materialized
|
||||
@@ -198,11 +197,12 @@ class ParsedNodeDefaults(NodeInfoMixin, ParsedNodeMandatory):
|
||||
tags: List[str] = field(default_factory=list)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
description: str = field(default="")
|
||||
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
docs: Docs = field(default_factory=Docs)
|
||||
docs: Docs = field(default_factory=lambda: Docs(show=True))
|
||||
patch_path: Optional[str] = None
|
||||
compiled_path: Optional[str] = None
|
||||
build_path: Optional[str] = None
|
||||
@@ -793,24 +793,32 @@ class ParsedExposure(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricReference(dbtClassMixin, Replaceable):
|
||||
sql: Optional[Union[str, int]]
|
||||
unique_id: Optional[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsedMetric(UnparsedBaseNode, HasUniqueID, HasFqn):
|
||||
model: str
|
||||
name: str
|
||||
description: str
|
||||
label: str
|
||||
type: str
|
||||
sql: Optional[str]
|
||||
sql: str
|
||||
timestamp: Optional[str]
|
||||
filters: List[MetricFilter]
|
||||
time_grains: List[str]
|
||||
dimensions: List[str]
|
||||
model: Optional[str] = None
|
||||
model_unique_id: Optional[str] = None
|
||||
resource_type: NodeType = NodeType.Metric
|
||||
meta: Dict[str, Any] = field(default_factory=dict)
|
||||
tags: List[str] = field(default_factory=list)
|
||||
sources: List[List[str]] = field(default_factory=list)
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
refs: List[List[str]] = field(default_factory=list)
|
||||
metrics: List[List[str]] = field(default_factory=list)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
|
||||
@property
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
from dbt.node_types import NodeType
|
||||
from dbt.contracts.util import (
|
||||
AdditionalPropertiesMixin,
|
||||
Mergeable,
|
||||
Replaceable,
|
||||
)
|
||||
from dbt.contracts.util import AdditionalPropertiesMixin, Mergeable, Replaceable
|
||||
|
||||
# trigger the PathEncoder
|
||||
import dbt.helper_types # noqa:F401
|
||||
from dbt.exceptions import CompilationException, ParsingException
|
||||
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin
|
||||
from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin, ValidationError
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import timedelta
|
||||
@@ -80,6 +76,7 @@ class UnparsedRunHook(UnparsedNode):
|
||||
@dataclass
|
||||
class Docs(dbtClassMixin, Replaceable):
|
||||
show: bool = True
|
||||
node_color: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -448,12 +445,15 @@ class MetricFilter(dbtClassMixin, Replaceable):
|
||||
|
||||
@dataclass
|
||||
class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
model: str
|
||||
# TODO : verify that this disallows metric names with spaces
|
||||
# TODO: fix validation that you broke :p
|
||||
# name: Identifier
|
||||
name: str
|
||||
label: str
|
||||
type: str
|
||||
model: Optional[str] = None
|
||||
description: str = ""
|
||||
sql: Optional[str] = None
|
||||
sql: Union[str, int] = ""
|
||||
timestamp: Optional[str] = None
|
||||
time_grains: List[str] = field(default_factory=list)
|
||||
dimensions: List[str] = field(default_factory=list)
|
||||
@@ -463,6 +463,15 @@ class UnparsedMetric(dbtClassMixin, Replaceable):
|
||||
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
# super().validate(data)
|
||||
# TODO: putting this back for now to get tests passing. Do we want to implement name: Identifier?
|
||||
super(UnparsedMetric, cls).validate(data)
|
||||
if "name" in data and " " in data["name"]:
|
||||
raise ParsingException(f"Metrics name '{data['name']}' cannot contain spaces")
|
||||
|
||||
# TODO: Expressions _cannot_ have `model` properties
|
||||
if data.get("model") is None and data.get("type") != "expression":
|
||||
raise ValidationError("Non-expression metrics require a 'model' property")
|
||||
|
||||
if data.get("model") is not None and data.get("type") == "expression":
|
||||
raise ValidationError("Expression metrics cannot have a 'model' property")
|
||||
|
||||
153
core/dbt/contracts/graph/utils.py
Normal file
153
core/dbt/contracts/graph/utils.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import re
|
||||
|
||||
HTML_COLORS = [
|
||||
"aliceblue",
|
||||
"antiquewhite",
|
||||
"aqua",
|
||||
"aquamarine",
|
||||
"azure",
|
||||
"beige",
|
||||
"bisque",
|
||||
"black",
|
||||
"blanchedalmond",
|
||||
"blue",
|
||||
"blueviolet",
|
||||
"brown",
|
||||
"burlywood",
|
||||
"cadetblue",
|
||||
"chartreuse",
|
||||
"chocolate",
|
||||
"coral",
|
||||
"cornflowerblue",
|
||||
"cornsilk",
|
||||
"crimson",
|
||||
"cyan",
|
||||
"darkblue",
|
||||
"darkcyan",
|
||||
"darkgoldenrod",
|
||||
"darkgray",
|
||||
"darkgreen",
|
||||
"darkkhaki",
|
||||
"darkmagenta",
|
||||
"darkolivegreen",
|
||||
"darkorange",
|
||||
"darkorchid",
|
||||
"darkred",
|
||||
"darksalmon",
|
||||
"darkseagreen",
|
||||
"darkslateblue",
|
||||
"darkslategray",
|
||||
"darkturquoise",
|
||||
"darkviolet",
|
||||
"deeppink",
|
||||
"deepskyblue",
|
||||
"dimgray",
|
||||
"dodgerblue",
|
||||
"firebrick",
|
||||
"floralwhite",
|
||||
"forestgreen",
|
||||
"fuchsia",
|
||||
"gainsboro",
|
||||
"ghostwhite",
|
||||
"gold",
|
||||
"goldenrod",
|
||||
"gray",
|
||||
"green",
|
||||
"greenyellow",
|
||||
"honeydew",
|
||||
"hotpink",
|
||||
"indianred",
|
||||
"indigo",
|
||||
"ivory",
|
||||
"khaki",
|
||||
"lavender",
|
||||
"lavenderblush",
|
||||
"lawngreen",
|
||||
"lemonchiffon",
|
||||
"lightblue",
|
||||
"lightcoral",
|
||||
"lightcyan",
|
||||
"lightgoldenrodyellow",
|
||||
"lightgray",
|
||||
"lightgreen",
|
||||
"lightpink",
|
||||
"lightsalmon",
|
||||
"lightsalmon",
|
||||
"lightseagreen",
|
||||
"lightskyblue",
|
||||
"lightslategray",
|
||||
"lightsteelblue",
|
||||
"lightyellow",
|
||||
"lime",
|
||||
"limegreen",
|
||||
"linen",
|
||||
"magenta",
|
||||
"maroon",
|
||||
"mediumaquamarine",
|
||||
"mediumblue",
|
||||
"mediumorchid",
|
||||
"mediumpurple",
|
||||
"mediumseagreen",
|
||||
"mediumslateblue",
|
||||
"mediumslateblue",
|
||||
"mediumspringgreen",
|
||||
"mediumturquoise",
|
||||
"mediumvioletred",
|
||||
"midnightblue",
|
||||
"mintcream",
|
||||
"mistyrose",
|
||||
"moccasin",
|
||||
"navajowhite",
|
||||
"navy",
|
||||
"oldlace",
|
||||
"olive",
|
||||
"olivedrab",
|
||||
"orange",
|
||||
"orangered",
|
||||
"orchid",
|
||||
"palegoldenrod",
|
||||
"palegreen",
|
||||
"paleturquoise",
|
||||
"palevioletred",
|
||||
"papayawhip",
|
||||
"peachpuff",
|
||||
"peru",
|
||||
"pink",
|
||||
"plum",
|
||||
"powderblue",
|
||||
"purple",
|
||||
"rebeccapurple",
|
||||
"red",
|
||||
"rosybrown",
|
||||
"royalblue",
|
||||
"saddlebrown",
|
||||
"salmon",
|
||||
"sandybrown",
|
||||
"seagreen",
|
||||
"seashell",
|
||||
"sienna",
|
||||
"silver",
|
||||
"skyblue",
|
||||
"slateblue",
|
||||
"slategray",
|
||||
"snow",
|
||||
"springgreen",
|
||||
"steelblue",
|
||||
"tan",
|
||||
"teal",
|
||||
"thistle",
|
||||
"tomato",
|
||||
"turquoise",
|
||||
"violet",
|
||||
"wheat",
|
||||
"white",
|
||||
"whitesmoke",
|
||||
"yellow",
|
||||
"yellowgreen",
|
||||
]
|
||||
|
||||
|
||||
def validate_color(color: str) -> bool:
|
||||
match_hex = re.search(r"^#(?:[0-9a-f]{3}){1,2}$", color.lower())
|
||||
match_html_color_name = color.lower() in HTML_COLORS
|
||||
return bool(match_hex or match_html_color_name)
|
||||
@@ -1,4 +1,4 @@
|
||||
from dbt.contracts.util import Replaceable, Mergeable, list_str
|
||||
from dbt.contracts.util import Replaceable, Mergeable, list_str, Identifier
|
||||
from dbt.contracts.connection import QueryComment, UserConfigContract
|
||||
from dbt.helper_types import NoValue
|
||||
from dbt.dataclass_schema import (
|
||||
@@ -7,7 +7,6 @@ from dbt.dataclass_schema import (
|
||||
HyphenatedDbtClassMixin,
|
||||
ExtensibleDbtClassMixin,
|
||||
register_pattern,
|
||||
ValidatedStringMixin,
|
||||
)
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Union, Any
|
||||
@@ -19,25 +18,6 @@ PIN_PACKAGE_URL = (
|
||||
DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True
|
||||
|
||||
|
||||
class Name(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[^\d\W]\w*$"
|
||||
|
||||
@classmethod
|
||||
def is_valid(cls, value: Any) -> bool:
|
||||
if not isinstance(value, str):
|
||||
return False
|
||||
|
||||
try:
|
||||
cls.validate(value)
|
||||
except ValidationError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
register_pattern(Name, r"^[^\d\W]\w*$")
|
||||
|
||||
|
||||
class SemverString(str, SerializableType):
|
||||
def _serialize(self) -> str:
|
||||
return self
|
||||
@@ -182,7 +162,7 @@ BANNED_PROJECT_NAMES = {
|
||||
|
||||
@dataclass
|
||||
class Project(HyphenatedDbtClassMixin, Replaceable):
|
||||
name: Name
|
||||
name: Identifier
|
||||
version: Union[SemverString, float]
|
||||
config_version: int
|
||||
project_root: Optional[str] = None
|
||||
|
||||
@@ -9,6 +9,13 @@ from dbt.version import __version__
|
||||
from dbt.events.functions import get_invocation_id
|
||||
from dbt.dataclass_schema import dbtClassMixin
|
||||
|
||||
from dbt.dataclass_schema import (
|
||||
ValidatedStringMixin,
|
||||
ValidationError,
|
||||
register_pattern,
|
||||
)
|
||||
|
||||
|
||||
SourceKey = Tuple[str, str]
|
||||
|
||||
|
||||
@@ -201,6 +208,14 @@ class VersionedSchema(dbtClassMixin):
|
||||
result["$id"] = str(cls.dbt_schema_version)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def is_compatible_version(cls, schema_version):
|
||||
compatible_versions = [str(cls.dbt_schema_version)]
|
||||
if hasattr(cls, "compatible_previous_versions"):
|
||||
for name, version in cls.compatible_previous_versions():
|
||||
compatible_versions.append(str(SchemaVersion(name, version)))
|
||||
return str(schema_version) in compatible_versions
|
||||
|
||||
@classmethod
|
||||
def read_and_check_versions(cls, path: str):
|
||||
try:
|
||||
@@ -217,7 +232,7 @@ class VersionedSchema(dbtClassMixin):
|
||||
if "metadata" in data and "dbt_schema_version" in data["metadata"]:
|
||||
previous_schema_version = data["metadata"]["dbt_schema_version"]
|
||||
# cls.dbt_schema_version is a SchemaVersion object
|
||||
if str(cls.dbt_schema_version) != previous_schema_version:
|
||||
if not cls.is_compatible_version(previous_schema_version):
|
||||
raise IncompatibleSchemaException(
|
||||
expected=str(cls.dbt_schema_version), found=previous_schema_version
|
||||
)
|
||||
@@ -242,3 +257,22 @@ class ArtifactMixin(VersionedSchema, Writable, Readable):
|
||||
super().validate(data)
|
||||
if cls.dbt_schema_version is None:
|
||||
raise InternalException("Cannot call from_dict with no schema version!")
|
||||
|
||||
|
||||
class Identifier(ValidatedStringMixin):
|
||||
ValidationRegex = r"^[^\d\W]\w*$"
|
||||
|
||||
@classmethod
|
||||
def is_valid(cls, value: Any) -> bool:
|
||||
if not isinstance(value, str):
|
||||
return False
|
||||
|
||||
try:
|
||||
cls.validate(value)
|
||||
except ValidationError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
register_pattern(Identifier, r"^[^\d\W]\w*$")
|
||||
|
||||
@@ -103,7 +103,8 @@ SomeUnpinned = TypeVar("SomeUnpinned", bound="UnpinnedPackage")
|
||||
|
||||
|
||||
class UnpinnedPackage(Generic[SomePinned], BasePackage):
|
||||
@abc.abstractclassmethod
|
||||
@classmethod
|
||||
@abc.abstractmethod
|
||||
def from_contract(cls, contract):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@@ -166,8 +166,12 @@ def event_to_serializable_dict(
|
||||
|
||||
# translates an Event to a completely formatted text-based log line
|
||||
# type hinting everything as strings so we don't get any unintentional string conversions via str()
|
||||
def reset_color() -> str:
|
||||
return "" if not this.format_color else Style.RESET_ALL
|
||||
|
||||
|
||||
def create_info_text_log_line(e: T_Event) -> str:
|
||||
color_tag: str = "" if this.format_color else Style.RESET_ALL
|
||||
color_tag: str = reset_color()
|
||||
ts: str = get_ts().strftime("%H:%M:%S")
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
log_line: str = f"{color_tag}{ts} {scrubbed_msg}"
|
||||
@@ -180,7 +184,7 @@ def create_debug_text_log_line(e: T_Event) -> str:
|
||||
if type(e) == MainReportVersion:
|
||||
separator = 30 * "="
|
||||
log_line = f"\n\n{separator} {get_ts()} | {get_invocation_id()} {separator}\n"
|
||||
color_tag: str = "" if this.format_color else Style.RESET_ALL
|
||||
color_tag: str = reset_color()
|
||||
ts: str = get_ts().strftime("%H:%M:%S.%f")
|
||||
scrubbed_msg: str = scrub_secrets(e.message(), env_secrets())
|
||||
level: str = e.level_tag() if len(e.level_tag()) == 5 else f"{e.level_tag()} "
|
||||
|
||||
@@ -1501,10 +1501,11 @@ class HooksRunning(InfoLevel):
|
||||
class HookFinished(InfoLevel):
|
||||
stat_line: str
|
||||
execution: str
|
||||
execution_time: float
|
||||
code: str = "E040"
|
||||
|
||||
def message(self) -> str:
|
||||
return f"Finished running {self.stat_line}{self.execution}."
|
||||
return f"Finished running {self.stat_line}{self.execution} ({self.execution_time:0.2f}s)."
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -2620,7 +2621,7 @@ if 1 == 0:
|
||||
DatabaseErrorRunning(hook_type="")
|
||||
EmptyLine()
|
||||
HooksRunning(num_hooks=0, hook_type="")
|
||||
HookFinished(stat_line="", execution="")
|
||||
HookFinished(stat_line="", execution="", execution_time=0)
|
||||
WriteCatalogFailure(num_exceptions=0)
|
||||
CatalogWritten(path="")
|
||||
CannotGenerateDocs()
|
||||
|
||||
@@ -520,6 +520,12 @@ def ref_invalid_args(model, args) -> NoReturn:
|
||||
raise_compiler_error("ref() takes at most two arguments ({} given)".format(len(args)), model)
|
||||
|
||||
|
||||
def metric_invalid_args(model, args) -> NoReturn:
|
||||
raise_compiler_error(
|
||||
"metric() takes at most two arguments ({} given)".format(len(args)), model
|
||||
)
|
||||
|
||||
|
||||
def ref_bad_context(model, args) -> NoReturn:
|
||||
ref_args = ", ".join("'{}'".format(a) for a in args)
|
||||
ref_string = "{{{{ ref({}) }}}}".format(ref_args)
|
||||
@@ -650,6 +656,23 @@ def source_target_not_found(
|
||||
raise_compiler_error(msg, model)
|
||||
|
||||
|
||||
def get_metric_not_found_msg(
|
||||
model,
|
||||
target_name: str,
|
||||
target_package: Optional[str],
|
||||
) -> str:
|
||||
reason = "was not found"
|
||||
return _get_target_failure_msg(
|
||||
model, target_name, target_package, include_path=True, reason=reason, target_kind="metric"
|
||||
)
|
||||
|
||||
|
||||
def metric_target_not_found(metric, target_name: str, target_package: Optional[str]) -> NoReturn:
|
||||
msg = get_metric_not_found_msg(metric, target_name, target_package)
|
||||
|
||||
raise_compiler_error(msg, metric)
|
||||
|
||||
|
||||
def dependency_not_found(model, target_model_name):
|
||||
raise_compiler_error(
|
||||
"'{}' depends on '{}' which is not in the graph!".format(
|
||||
|
||||
@@ -37,6 +37,8 @@ EVENT_BUFFER_SIZE = 100000
|
||||
QUIET = None
|
||||
NO_PRINT = None
|
||||
CACHE_SELECTED_ONLY = None
|
||||
TARGET_PATH = None
|
||||
LOG_PATH = None
|
||||
|
||||
_NON_BOOLEAN_FLAGS = [
|
||||
"LOG_FORMAT",
|
||||
@@ -44,6 +46,8 @@ _NON_BOOLEAN_FLAGS = [
|
||||
"PROFILES_DIR",
|
||||
"INDIRECT_SELECTION",
|
||||
"EVENT_BUFFER_SIZE",
|
||||
"TARGET_PATH",
|
||||
"LOG_PATH",
|
||||
]
|
||||
|
||||
_NON_DBT_ENV_FLAGS = ["DO_NOT_TRACK"]
|
||||
@@ -71,6 +75,8 @@ flag_defaults = {
|
||||
"QUIET": False,
|
||||
"NO_PRINT": False,
|
||||
"CACHE_SELECTED_ONLY": False,
|
||||
"TARGET_PATH": None,
|
||||
"LOG_PATH": None,
|
||||
}
|
||||
|
||||
|
||||
@@ -121,6 +127,7 @@ def set_from_args(args, user_config):
|
||||
global WRITE_JSON, PARTIAL_PARSE, USE_COLORS, STORE_FAILURES, PROFILES_DIR, DEBUG, LOG_FORMAT
|
||||
global INDIRECT_SELECTION, VERSION_CHECK, FAIL_FAST, SEND_ANONYMOUS_USAGE_STATS
|
||||
global PRINTER_WIDTH, WHICH, LOG_CACHE_EVENTS, EVENT_BUFFER_SIZE, QUIET, NO_PRINT, CACHE_SELECTED_ONLY
|
||||
global TARGET_PATH, LOG_PATH
|
||||
|
||||
STRICT_MODE = False # backwards compatibility
|
||||
# cli args without user_config or env var option
|
||||
@@ -148,6 +155,8 @@ def set_from_args(args, user_config):
|
||||
QUIET = get_flag_value("QUIET", args, user_config)
|
||||
NO_PRINT = get_flag_value("NO_PRINT", args, user_config)
|
||||
CACHE_SELECTED_ONLY = get_flag_value("CACHE_SELECTED_ONLY", args, user_config)
|
||||
TARGET_PATH = get_flag_value("TARGET_PATH", args, user_config)
|
||||
LOG_PATH = get_flag_value("LOG_PATH", args, user_config)
|
||||
|
||||
_set_overrides_from_env()
|
||||
|
||||
|
||||
167
core/dbt/include/global_project/macros/adapters/apply_grants.sql
Normal file
167
core/dbt/include/global_project/macros/adapters/apply_grants.sql
Normal file
@@ -0,0 +1,167 @@
|
||||
{# ------- BOOLEAN MACROS --------- #}
|
||||
|
||||
{#
|
||||
-- COPY GRANTS
|
||||
-- When a relational object (view or table) is replaced in this database,
|
||||
-- do previous grants carry over to the new object? This may depend on:
|
||||
-- whether we use alter-rename-swap versus CREATE OR REPLACE
|
||||
-- user-supplied configuration (e.g. copy_grants on Snowflake)
|
||||
-- By default, play it safe, assume TRUE: that grants ARE copied over.
|
||||
-- This means dbt will first "show" current grants and then calculate diffs.
|
||||
-- It may require an additional query than is strictly necessary,
|
||||
-- but better safe than sorry.
|
||||
#}
|
||||
|
||||
{% macro copy_grants() %}
|
||||
{{ return(adapter.dispatch('copy_grants', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__copy_grants() %}
|
||||
{{ return(True) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
-- SUPPORT MULTIPLE GRANTEES PER DCL STATEMENT
|
||||
-- Does this database support 'grant {privilege} to {grantee_1}, {grantee_2}, ...'
|
||||
-- Or must these be separate statements:
|
||||
-- `grant {privilege} to {grantee_1}`;
|
||||
-- `grant {privilege} to {grantee_2}`;
|
||||
-- By default, pick the former, because it's what we prefer when available.
|
||||
#}
|
||||
|
||||
{% macro support_multiple_grantees_per_dcl_statement() %}
|
||||
{{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default__support_multiple_grantees_per_dcl_statement() -%}
|
||||
{{ return(True) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{% macro should_revoke(existing_relation, full_refresh_mode=True) %}
|
||||
|
||||
{% if not existing_relation %}
|
||||
{#-- The table doesn't already exist, so no grants to copy over --#}
|
||||
{{ return(False) }}
|
||||
{% elif full_refresh_mode %}
|
||||
{#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}
|
||||
{{ return(copy_grants()) }}
|
||||
{% else %}
|
||||
{#-- The table is being merged/upserted/inserted -- grants will be carried over --#}
|
||||
{{ return(True) }}
|
||||
{% endif %}
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{# ------- DCL STATEMENT TEMPLATES --------- #}
|
||||
|
||||
{% macro get_show_grant_sql(relation) %}
|
||||
{{ return(adapter.dispatch("get_show_grant_sql", "dbt")(relation)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__get_show_grant_sql(relation) %}
|
||||
show grants on {{ relation }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro get_grant_sql(relation, privilege, grantees) %}
|
||||
{{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default__get_grant_sql(relation, privilege, grantees) -%}
|
||||
grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{% macro get_revoke_sql(relation, privilege, grantees) %}
|
||||
{{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}
|
||||
revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{# ------- RUNTIME APPLICATION --------- #}
|
||||
|
||||
{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}
|
||||
{{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}
|
||||
{#
|
||||
-- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.
|
||||
-- Depending on whether this database supports multiple grantees per statement, pass in the list of
|
||||
-- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.
|
||||
-- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`
|
||||
#}
|
||||
{%- set dcl_statements = [] -%}
|
||||
{%- for privilege, grantees in grant_config.items() %}
|
||||
{%- if support_multiple_grantees_per_dcl_statement() and grantees -%}
|
||||
{%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}
|
||||
{%- do dcl_statements.append(dcl) -%}
|
||||
{%- else -%}
|
||||
{%- for grantee in grantees -%}
|
||||
{% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}
|
||||
{%- do dcl_statements.append(dcl) -%}
|
||||
{% endfor -%}
|
||||
{%- endif -%}
|
||||
{%- endfor -%}
|
||||
{{ return(dcl_statements) }}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{% macro call_dcl_statements(dcl_statement_list) %}
|
||||
{{ return(adapter.dispatch("call_dcl_statements", "dbt")(dcl_statement_list)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__call_dcl_statements(dcl_statement_list) %}
|
||||
{#
|
||||
-- By default, supply all grant + revoke statements in a single semicolon-separated block,
|
||||
-- so that they're all processed together.
|
||||
|
||||
-- Some databases do not support this. Those adapters will need to override this macro
|
||||
-- to run each statement individually.
|
||||
#}
|
||||
{% call statement('grants') %}
|
||||
{% for dcl_statement in dcl_statement_list %}
|
||||
{{ dcl_statement }};
|
||||
{% endfor %}
|
||||
{% endcall %}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro apply_grants(relation, grant_config, should_revoke) %}
|
||||
{{ return(adapter.dispatch("apply_grants", "dbt")(relation, grant_config, should_revoke)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}
|
||||
{#-- If grant_config is {} or None, this is a no-op --#}
|
||||
{% if grant_config %}
|
||||
{% if should_revoke %}
|
||||
{#-- We think previous grants may have carried over --#}
|
||||
{#-- Show current grants and calculate diffs --#}
|
||||
{% set current_grants_table = run_query(get_show_grant_sql(relation)) %}
|
||||
{% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}
|
||||
{% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}
|
||||
{% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}
|
||||
{% if not (needs_granting or needs_revoking) %}
|
||||
{{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{#-- We don't think there's any chance of previous grants having carried over. --#}
|
||||
{#-- Jump straight to granting what the user has configured. --#}
|
||||
{% set needs_revoking = {} %}
|
||||
{% set needs_granting = grant_config %}
|
||||
{% endif %}
|
||||
{% if needs_granting or needs_revoking %}
|
||||
{% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}
|
||||
{% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}
|
||||
{% set dcl_statement_list = revoke_statement_list + grant_statement_list %}
|
||||
{% if dcl_statement_list %}
|
||||
{{ call_dcl_statements(dcl_statement_list) }}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endmacro %}
|
||||
@@ -20,6 +20,8 @@
|
||||
-- BEGIN, in a separate transaction
|
||||
{%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}
|
||||
{%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}
|
||||
-- grab current tables grants config for comparision later on
|
||||
{% set grant_config = config.get('grants') %}
|
||||
{{ drop_relation_if_exists(preexisting_intermediate_relation) }}
|
||||
{{ drop_relation_if_exists(preexisting_backup_relation) }}
|
||||
|
||||
@@ -59,6 +61,9 @@
|
||||
{% do to_drop.append(backup_relation) %}
|
||||
{% endif %}
|
||||
|
||||
{% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}
|
||||
|
||||
{% do persist_docs(target_relation, model) %}
|
||||
|
||||
{% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
{%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}
|
||||
-- as above, the backup_relation should not already exist
|
||||
{%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}
|
||||
-- grab current tables grants config for comparision later on
|
||||
{% set grant_config = config.get('grants') %}
|
||||
|
||||
-- drop the temp relations if they exist already in the database
|
||||
{{ drop_relation_if_exists(preexisting_intermediate_relation) }}
|
||||
@@ -40,6 +42,9 @@
|
||||
|
||||
{{ run_hooks(post_hooks, inside_transaction=True) }}
|
||||
|
||||
{% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}
|
||||
|
||||
{% do persist_docs(target_relation, model) %}
|
||||
|
||||
-- `COMMIT` happens here
|
||||
|
||||
@@ -13,12 +13,12 @@
|
||||
{%- set identifier = model['alias'] -%}
|
||||
|
||||
{%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}
|
||||
|
||||
{%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}
|
||||
|
||||
{%- set target_relation = api.Relation.create(
|
||||
identifier=identifier, schema=schema, database=database,
|
||||
type='view') -%}
|
||||
{% set grant_config = config.get('grants') %}
|
||||
|
||||
{{ run_hooks(pre_hooks) }}
|
||||
|
||||
@@ -34,6 +34,9 @@
|
||||
{{ get_create_view_as_sql(target_relation, sql) }}
|
||||
{%- endcall %}
|
||||
|
||||
{% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=True) %}
|
||||
|
||||
{{ run_hooks(post_hooks) }}
|
||||
|
||||
{{ return({'relations': [target_relation]}) }}
|
||||
|
||||
@@ -25,6 +25,8 @@
|
||||
{%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}
|
||||
-- as above, the backup_relation should not already exist
|
||||
{%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}
|
||||
-- grab current tables grants config for comparision later on
|
||||
{% set grant_config = config.get('grants') %}
|
||||
|
||||
{{ run_hooks(pre_hooks, inside_transaction=False) }}
|
||||
|
||||
@@ -47,6 +49,9 @@
|
||||
{% endif %}
|
||||
{{ adapter.rename_relation(intermediate_relation, target_relation) }}
|
||||
|
||||
{% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}
|
||||
|
||||
{% do persist_docs(target_relation, model) %}
|
||||
|
||||
{{ run_hooks(post_hooks, inside_transaction=True) }}
|
||||
|
||||
@@ -8,7 +8,10 @@
|
||||
{%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}
|
||||
{%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}
|
||||
|
||||
{%- set grant_config = config.get('grants') -%}
|
||||
{%- set agate_table = load_agate_table() -%}
|
||||
-- grab current tables grants config for comparision later on
|
||||
|
||||
{%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}
|
||||
|
||||
{{ run_hooks(pre_hooks, inside_transaction=False) }}
|
||||
@@ -35,6 +38,10 @@
|
||||
{% endcall %}
|
||||
|
||||
{% set target_relation = this.incorporate(type='table') %}
|
||||
|
||||
{% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}
|
||||
|
||||
{% do persist_docs(target_relation, model) %}
|
||||
|
||||
{% if full_refresh_mode or not exists_as_table %}
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
{%- set strategy_name = config.get('strategy') -%}
|
||||
{%- set unique_key = config.get('unique_key') %}
|
||||
-- grab current tables grants config for comparision later on
|
||||
{%- set grant_config = config.get('grants') -%}
|
||||
|
||||
{% set target_relation_exists, target_relation = get_or_create_relation(
|
||||
database=model.database,
|
||||
@@ -73,6 +75,9 @@
|
||||
{{ final_sql }}
|
||||
{% endcall %}
|
||||
|
||||
{% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}
|
||||
{% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}
|
||||
|
||||
{% do persist_docs(target_relation, model) %}
|
||||
|
||||
{% if not target_relation_exists %}
|
||||
|
||||
@@ -133,7 +133,7 @@
|
||||
{%- set intersection = [] -%}
|
||||
{%- for col in query_columns -%}
|
||||
{%- if col in existing_cols -%}
|
||||
{%- do intersection.append(col) -%}
|
||||
{%- do intersection.append(adapter.quote(col)) -%}
|
||||
{%- else -%}
|
||||
{% set ns.column_added = true %}
|
||||
{%- endif -%}
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
{% macro any_value(expression) -%}
|
||||
{{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__any_value(expression) -%}
|
||||
|
||||
any_value({{ expression }})
|
||||
|
||||
{%- endmacro %}
|
||||
9
core/dbt/include/global_project/macros/utils/bool_or.sql
Normal file
9
core/dbt/include/global_project/macros/utils/bool_or.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
{% macro bool_or(expression) -%}
|
||||
{{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__bool_or(expression) -%}
|
||||
|
||||
bool_or({{ expression }})
|
||||
|
||||
{%- endmacro %}
|
||||
@@ -0,0 +1,7 @@
|
||||
{% macro cast_bool_to_text(field) %}
|
||||
{{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__cast_bool_to_text(field) %}
|
||||
cast({{ field }} as {{ api.Column.translate_type('string') }})
|
||||
{% endmacro %}
|
||||
7
core/dbt/include/global_project/macros/utils/concat.sql
Normal file
7
core/dbt/include/global_project/macros/utils/concat.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
{% macro concat(fields) -%}
|
||||
{{ return(adapter.dispatch('concat', 'dbt')(fields)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__concat(fields) -%}
|
||||
{{ fields|join(' || ') }}
|
||||
{%- endmacro %}
|
||||
117
core/dbt/include/global_project/macros/utils/data_types.sql
Normal file
117
core/dbt/include/global_project/macros/utils/data_types.sql
Normal file
@@ -0,0 +1,117 @@
|
||||
{# string ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_string() -%}
|
||||
{{ return(adapter.dispatch('type_string', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_string() %}
|
||||
{{ return(api.Column.translate_type("string")) }}
|
||||
{% endmacro %}
|
||||
|
||||
-- This will return 'text' by default
|
||||
-- On Postgres + Snowflake, that's equivalent to varchar (no size)
|
||||
-- Redshift will treat that as varchar(256)
|
||||
|
||||
|
||||
{# timestamp ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_timestamp() -%}
|
||||
{{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_timestamp() %}
|
||||
{{ return(api.Column.translate_type("timestamp")) }}
|
||||
{% endmacro %}
|
||||
|
||||
/*
|
||||
POSTGRES
|
||||
https://www.postgresql.org/docs/current/datatype-datetime.html:
|
||||
The SQL standard requires that writing just `timestamp`
|
||||
be equivalent to `timestamp without time zone`, and
|
||||
PostgreSQL honors that behavior.
|
||||
`timestamptz` is accepted as an abbreviation for `timestamp with time zone`;
|
||||
this is a PostgreSQL extension.
|
||||
|
||||
SNOWFLAKE
|
||||
https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp
|
||||
The TIMESTAMP_* variation associated with TIMESTAMP is specified by the
|
||||
TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ.
|
||||
|
||||
BIGQUERY
|
||||
TIMESTAMP means 'timestamp with time zone'
|
||||
DATETIME means 'timestamp without time zone'
|
||||
TODO: shouldn't this return DATETIME instead of TIMESTAMP, for consistency with other databases?
|
||||
e.g. dateadd returns a DATETIME
|
||||
|
||||
/* Snowflake:
|
||||
https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp
|
||||
The TIMESTAMP_* variation associated with TIMESTAMP is specified by the TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ.
|
||||
*/
|
||||
|
||||
|
||||
{# float ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_float() -%}
|
||||
{{ return(adapter.dispatch('type_float', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__type_float() %}
|
||||
{{ return(api.Column.translate_type("float")) }}
|
||||
{% endmacro %}
|
||||
|
||||
{# numeric ------------------------------------------------ #}
|
||||
|
||||
{%- macro type_numeric() -%}
|
||||
{{ return(adapter.dispatch('type_numeric', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
/*
|
||||
This one can't be just translate_type, since precision/scale make it a bit more complicated.
|
||||
|
||||
On most databases, the default (precision, scale) is something like:
|
||||
Redshift: (18, 0)
|
||||
Snowflake: (38, 0)
|
||||
Postgres: (<=131072, 0)
|
||||
|
||||
https://www.postgresql.org/docs/current/datatype-numeric.html:
|
||||
Specifying NUMERIC without any precision or scale creates an “unconstrained numeric”
|
||||
column in which numeric values of any length can be stored, up to the implementation limits.
|
||||
A column of this kind will not coerce input values to any particular scale,
|
||||
whereas numeric columns with a declared scale will coerce input values to that scale.
|
||||
(The SQL standard requires a default scale of 0, i.e., coercion to integer precision.
|
||||
We find this a bit useless. If you're concerned about portability, always specify
|
||||
the precision and scale explicitly.)
|
||||
*/
|
||||
|
||||
{% macro default__type_numeric() %}
|
||||
{{ return(api.Column.numeric_type("numeric", 28, 6)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{# bigint ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_bigint() -%}
|
||||
{{ return(adapter.dispatch('type_bigint', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
-- We don't have a conversion type for 'bigint' in TYPE_LABELS,
|
||||
-- so this actually just returns the string 'bigint'
|
||||
|
||||
{% macro default__type_bigint() %}
|
||||
{{ return(api.Column.translate_type("bigint")) }}
|
||||
{% endmacro %}
|
||||
|
||||
-- Good news: BigQuery now supports 'bigint' (and 'int') as an alias for 'int64'
|
||||
|
||||
{# int ------------------------------------------------- #}
|
||||
|
||||
{%- macro type_int() -%}
|
||||
{{ return(adapter.dispatch('type_int', 'dbt')()) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro default__type_int() -%}
|
||||
{{ return(api.Column.translate_type("integer")) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
-- returns 'int' everywhere, except BigQuery, where it returns 'int64'
|
||||
-- (but BigQuery also now accepts 'int' as a valid alias for 'int64')
|
||||
@@ -0,0 +1,7 @@
|
||||
{% macro date_trunc(datepart, date) -%}
|
||||
{{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__date_trunc(datepart, date) -%}
|
||||
date_trunc('{{datepart}}', {{date}})
|
||||
{%- endmacro %}
|
||||
14
core/dbt/include/global_project/macros/utils/dateadd.sql
Normal file
14
core/dbt/include/global_project/macros/utils/dateadd.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
{% macro dateadd(datepart, interval, from_date_or_timestamp) %}
|
||||
{{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}
|
||||
|
||||
dateadd(
|
||||
{{ datepart }},
|
||||
{{ interval }},
|
||||
{{ from_date_or_timestamp }}
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
14
core/dbt/include/global_project/macros/utils/datediff.sql
Normal file
14
core/dbt/include/global_project/macros/utils/datediff.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
{% macro datediff(first_date, second_date, datepart) %}
|
||||
{{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}
|
||||
{% endmacro %}
|
||||
|
||||
|
||||
{% macro default__datediff(first_date, second_date, datepart) -%}
|
||||
|
||||
datediff(
|
||||
{{ datepart }},
|
||||
{{ first_date }},
|
||||
{{ second_date }}
|
||||
)
|
||||
|
||||
{%- endmacro %}
|
||||
@@ -0,0 +1,8 @@
|
||||
{% macro escape_single_quotes(expression) %}
|
||||
{{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{# /*Default to replacing a single apostrophe with two apostrophes: they're -> they''re*/ #}
|
||||
{% macro default__escape_single_quotes(expression) -%}
|
||||
{{ expression | replace("'","''") }}
|
||||
{%- endmacro %}
|
||||
9
core/dbt/include/global_project/macros/utils/except.sql
Normal file
9
core/dbt/include/global_project/macros/utils/except.sql
Normal file
@@ -0,0 +1,9 @@
|
||||
{% macro except() %}
|
||||
{{ return(adapter.dispatch('except', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__except() %}
|
||||
|
||||
except
|
||||
|
||||
{% endmacro %}
|
||||
7
core/dbt/include/global_project/macros/utils/hash.sql
Normal file
7
core/dbt/include/global_project/macros/utils/hash.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
{% macro hash(field) -%}
|
||||
{{ return(adapter.dispatch('hash', 'dbt') (field)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__hash(field) -%}
|
||||
md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))
|
||||
{%- endmacro %}
|
||||
@@ -0,0 +1,9 @@
|
||||
{% macro intersect() %}
|
||||
{{ return(adapter.dispatch('intersect', 'dbt')()) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__intersect() %}
|
||||
|
||||
intersect
|
||||
|
||||
{% endmacro %}
|
||||
15
core/dbt/include/global_project/macros/utils/last_day.sql
Normal file
15
core/dbt/include/global_project/macros/utils/last_day.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
{% macro last_day(date, datepart) %}
|
||||
{{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{%- macro default_last_day(date, datepart) -%}
|
||||
cast(
|
||||
{{dbt.dateadd('day', '-1',
|
||||
dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))
|
||||
)}}
|
||||
as date)
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__last_day(date, datepart) -%}
|
||||
{{dbt.default_last_day(date, datepart)}}
|
||||
{%- endmacro %}
|
||||
11
core/dbt/include/global_project/macros/utils/length.sql
Normal file
11
core/dbt/include/global_project/macros/utils/length.sql
Normal file
@@ -0,0 +1,11 @@
|
||||
{% macro length(expression) -%}
|
||||
{{ return(adapter.dispatch('length', 'dbt') (expression)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__length(expression) %}
|
||||
|
||||
length(
|
||||
{{ expression }}
|
||||
)
|
||||
|
||||
{%- endmacro -%}
|
||||
30
core/dbt/include/global_project/macros/utils/listagg.sql
Normal file
30
core/dbt/include/global_project/macros/utils/listagg.sql
Normal file
@@ -0,0 +1,30 @@
|
||||
{% macro listagg(measure, delimiter_text="','", order_by_clause=none, limit_num=none) -%}
|
||||
{{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}
|
||||
|
||||
{% if limit_num -%}
|
||||
array_to_string(
|
||||
array_slice(
|
||||
array_agg(
|
||||
{{ measure }}
|
||||
){% if order_by_clause -%}
|
||||
within group ({{ order_by_clause }})
|
||||
{%- endif %}
|
||||
,0
|
||||
,{{ limit_num }}
|
||||
),
|
||||
{{ delimiter_text }}
|
||||
)
|
||||
{%- else %}
|
||||
listagg(
|
||||
{{ measure }},
|
||||
{{ delimiter_text }}
|
||||
)
|
||||
{% if order_by_clause -%}
|
||||
within group ({{ order_by_clause }})
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
{%- endmacro %}
|
||||
7
core/dbt/include/global_project/macros/utils/literal.sql
Normal file
7
core/dbt/include/global_project/macros/utils/literal.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
{%- macro string_literal(value) -%}
|
||||
{{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% macro default__string_literal(value) -%}
|
||||
'{{ value }}'
|
||||
{%- endmacro %}
|
||||
11
core/dbt/include/global_project/macros/utils/position.sql
Normal file
11
core/dbt/include/global_project/macros/utils/position.sql
Normal file
@@ -0,0 +1,11 @@
|
||||
{% macro position(substring_text, string_text) -%}
|
||||
{{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__position(substring_text, string_text) %}
|
||||
|
||||
position(
|
||||
{{ substring_text }} in {{ string_text }}
|
||||
)
|
||||
|
||||
{%- endmacro -%}
|
||||
14
core/dbt/include/global_project/macros/utils/replace.sql
Normal file
14
core/dbt/include/global_project/macros/utils/replace.sql
Normal file
@@ -0,0 +1,14 @@
|
||||
{% macro replace(field, old_chars, new_chars) -%}
|
||||
{{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__replace(field, old_chars, new_chars) %}
|
||||
|
||||
replace(
|
||||
{{ field }},
|
||||
{{ old_chars }},
|
||||
{{ new_chars }}
|
||||
)
|
||||
|
||||
|
||||
{% endmacro %}
|
||||
12
core/dbt/include/global_project/macros/utils/right.sql
Normal file
12
core/dbt/include/global_project/macros/utils/right.sql
Normal file
@@ -0,0 +1,12 @@
|
||||
{% macro right(string_text, length_expression) -%}
|
||||
{{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__right(string_text, length_expression) %}
|
||||
|
||||
right(
|
||||
{{ string_text }},
|
||||
{{ length_expression }}
|
||||
)
|
||||
|
||||
{%- endmacro -%}
|
||||
@@ -0,0 +1,9 @@
|
||||
{% macro safe_cast(field, type) %}
|
||||
{{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__safe_cast(field, type) %}
|
||||
{# most databases don't support this function yet
|
||||
so we just need to use cast #}
|
||||
cast({{field}} as {{type}})
|
||||
{% endmacro %}
|
||||
26
core/dbt/include/global_project/macros/utils/split_part.sql
Normal file
26
core/dbt/include/global_project/macros/utils/split_part.sql
Normal file
@@ -0,0 +1,26 @@
|
||||
{% macro split_part(string_text, delimiter_text, part_number) %}
|
||||
{{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro default__split_part(string_text, delimiter_text, part_number) %}
|
||||
|
||||
split_part(
|
||||
{{ string_text }},
|
||||
{{ delimiter_text }},
|
||||
{{ part_number }}
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
|
||||
{% macro _split_part_negative(string_text, delimiter_text, part_number) %}
|
||||
|
||||
split_part(
|
||||
{{ string_text }},
|
||||
{{ delimiter_text }},
|
||||
length({{ string_text }})
|
||||
- length(
|
||||
replace({{ string_text }}, {{ delimiter_text }}, '')
|
||||
) + 2 {{ part_number }}
|
||||
)
|
||||
|
||||
{% endmacro %}
|
||||
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user