mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-19 13:11:27 +00:00
Compare commits
1 Commits
generate_l
...
update-ind
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1552eccb05 |
@@ -1,5 +1,5 @@
|
||||
[bumpversion]
|
||||
current_version = 1.10.0a1
|
||||
current_version = 1.9.0a1
|
||||
parse = (?P<major>[\d]+) # major version number
|
||||
\.(?P<minor>[\d]+) # minor version number
|
||||
\.(?P<patch>[\d]+) # patch version number
|
||||
|
||||
6
.changes/unreleased/Dependencies-20240509-093717.yaml
Normal file
6
.changes/unreleased/Dependencies-20240509-093717.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Dependencies
|
||||
body: Remove logbook dependency
|
||||
time: 2024-05-09T09:37:17.745129-05:00
|
||||
custom:
|
||||
Author: emmyoop
|
||||
Issue: "8027"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Dependencies
|
||||
body: Upgrading dbt-semantic-interfaces to 0.8.3 for custom grain support in offset windows
|
||||
time: 2024-11-12T16:38:15.351519-05:00
|
||||
custom:
|
||||
Author: WilliamDee
|
||||
Issue: None
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: "Dependencies"
|
||||
body: "Bump codecov/codecov-action from 4 to 5"
|
||||
time: 2024-11-18T00:11:13.00000Z
|
||||
custom:
|
||||
Author: dependabot[bot]
|
||||
Issue: 11009
|
||||
6
.changes/unreleased/Docs-20240522-174713.yaml
Normal file
6
.changes/unreleased/Docs-20240522-174713.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Docs
|
||||
body: Fix rendering docs with saved queries
|
||||
time: 2024-05-22T17:47:13.414938-04:00
|
||||
custom:
|
||||
Author: ChenyuLInx michelleark
|
||||
Issue: "10168"
|
||||
6
.changes/unreleased/Features-20240506-175642.yaml
Normal file
6
.changes/unreleased/Features-20240506-175642.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: serialize inferred primary key
|
||||
time: 2024-05-06T17:56:42.757673-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "9824"
|
||||
6
.changes/unreleased/Features-20240507-162717.yaml
Normal file
6
.changes/unreleased/Features-20240507-162717.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Features
|
||||
body: 'Add unit_test: selection method'
|
||||
time: 2024-05-07T16:27:17.047585-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "10053"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add new hard_deletes="new_record" mode for snapshots.
|
||||
time: 2024-11-04T12:00:53.95191-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "10235"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add `batch` context object to model jinja context
|
||||
time: 2024-11-21T12:56:30.715473-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11025"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Features
|
||||
body: Ensure pre/post hooks only run on first/last batch respectively for microbatch
|
||||
model batches
|
||||
time: 2024-12-06T19:53:08.928793-06:00
|
||||
custom:
|
||||
Author: MichelleArk QMalcolm
|
||||
Issue: 11094 11104
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Generate latest model version view
|
||||
time: 2024-12-10T13:07:34.723167-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "7442"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Support "tags" in Saved Queries
|
||||
time: 2024-12-16T09:54:35.327675-08:00
|
||||
custom:
|
||||
Author: theyostalservice
|
||||
Issue: "11155"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Calculate source freshness via a SQL query
|
||||
time: 2024-12-17T17:16:31.841076-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8797"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add freshness definition on model for adaptive job
|
||||
time: 2024-12-18T17:07:29.55754-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "11123"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Meta config for dimensions measures and entities
|
||||
time: 2025-01-06T13:28:29.176439-06:00
|
||||
custom:
|
||||
Author: DevonFulcher
|
||||
Issue: None
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Add doc_blocks to manifest for nodes and columns
|
||||
time: 2025-01-22T17:03:28.866522Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: 11000 11001
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Features
|
||||
body: Initial implementation of sample mode
|
||||
time: 2025-02-02T14:00:54.074209-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: 11227 11230 11231 11248 11252 11254 11258
|
||||
6
.changes/unreleased/Fixes-20230601-204157.yaml
Normal file
6
.changes/unreleased/Fixes-20230601-204157.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Remove unused check_new method
|
||||
time: 2023-06-01T20:41:57.556342+02:00
|
||||
custom:
|
||||
Author: kevinneville
|
||||
Issue: "7586"
|
||||
7
.changes/unreleased/Fixes-20240508-151127.yaml
Normal file
7
.changes/unreleased/Fixes-20240508-151127.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: 'Restore previous behavior for --favor-state: only favor defer_relation if not
|
||||
selected in current command"'
|
||||
time: 2024-05-08T15:11:27.510912+02:00
|
||||
custom:
|
||||
Author: jtcohen6
|
||||
Issue: "10107"
|
||||
6
.changes/unreleased/Fixes-20240509-091411.yaml
Normal file
6
.changes/unreleased/Fixes-20240509-091411.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Unit test fixture (csv) returns null for empty value
|
||||
time: 2024-05-09T09:14:11.772709-04:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "9881"
|
||||
7
.changes/unreleased/Fixes-20240516-153913.yaml
Normal file
7
.changes/unreleased/Fixes-20240516-153913.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
kind: Fixes
|
||||
body: Fix json format log and --quiet for ls and jinja print by converting print call
|
||||
to fire events
|
||||
time: 2024-05-16T15:39:13.896723-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "8756"
|
||||
6
.changes/unreleased/Fixes-20240516-223510.yaml
Normal file
6
.changes/unreleased/Fixes-20240516-223510.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Fixes
|
||||
body: Add resource type to saved_query
|
||||
time: 2024-05-16T22:35:10.287514-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "10168"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: dbt retry does not respect --threads
|
||||
time: 2024-08-22T12:21:32.358066+05:30
|
||||
custom:
|
||||
Author: donjin-master
|
||||
Issue: "10584"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: update adapter version messages
|
||||
time: 2024-10-25T10:43:39.274723-05:00
|
||||
custom:
|
||||
Author: dave-connors-3
|
||||
Issue: "10230"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Catch DbtRuntimeError for hooks
|
||||
time: 2024-11-21T18:17:39.753235Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: "11012"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Access DBUG flag more consistently with the rest of the codebase in ManifestLoader
|
||||
time: 2024-11-28T16:29:36.236729+01:00
|
||||
custom:
|
||||
Author: Threynaud
|
||||
Issue: "11068"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Improve the performance characteristics of add_test_edges()
|
||||
time: 2024-12-04T10:04:29.096231-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "10950"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Implement partial parsing for singular data test configs in yaml files
|
||||
time: 2024-12-05T14:53:07.295536-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "10801"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix debug log messages for microbatch batch execution information
|
||||
time: 2024-12-09T11:38:06.972743-06:00
|
||||
custom:
|
||||
Author: MichelleArk QMalcolm
|
||||
Issue: "11111"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix running of extra "last" batch when there is only one batch
|
||||
time: 2024-12-09T13:33:17.253326-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11112"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix interpretation of `PartialSuccess` to result in non-zero exit code
|
||||
time: 2024-12-09T15:07:11.391313-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11114"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Warn about invalid usages of `concurrent_batches` config
|
||||
time: 2024-12-12T11:36:11.451962-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11122"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Error writing generic test at run time
|
||||
time: 2024-12-16T13:46:45.936573-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "11110"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Run check_modified_contract for state:modified
|
||||
time: 2024-12-17T15:48:48.053054-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "11034"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix unrendered_config for tests from dbt_project.yml
|
||||
time: 2024-12-18T11:26:40.270022-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "11146"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Make partial parsing reparse referencing nodes of newly versioned models.
|
||||
time: 2025-01-02T14:05:43.629959-05:00
|
||||
custom:
|
||||
Author: d-cole
|
||||
Issue: "8872"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Ensure warning about microbatch lacking filter inputs is always fired
|
||||
time: 2025-01-07T17:37:19.373261-06:00
|
||||
custom:
|
||||
Author: QMalcolm
|
||||
Issue: "11159"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix microbatch dbt list --output json
|
||||
time: 2025-01-09T12:33:09.958795+01:00
|
||||
custom:
|
||||
Author: internetcoffeephone
|
||||
Issue: 10556 11098
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Fix for custom fields in generic test config for not_null and unique tests
|
||||
time: 2025-01-10T15:58:24.479245-05:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "11208"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Fixes
|
||||
body: Loosen validation on freshness to accomodate previously wrong but harmless config.
|
||||
time: 2025-01-28T13:55:09.318833-08:00
|
||||
custom:
|
||||
Author: ChenyuLInx peterallenwebb
|
||||
Issue: "11123"
|
||||
6
.changes/unreleased/Security-20240522-094540.yaml
Normal file
6
.changes/unreleased/Security-20240522-094540.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Security
|
||||
body: Explicitly bind to localhost in docs serve
|
||||
time: 2024-05-22T09:45:40.748185-04:00
|
||||
custom:
|
||||
Author: ChenyuLInx michelleark
|
||||
Issue: "10209"
|
||||
6
.changes/unreleased/Under the Hood-20240502-154430.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240502-154430.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Clear error message for Private package in dbt-core
|
||||
time: 2024-05-02T15:44:30.713097-07:00
|
||||
custom:
|
||||
Author: ChenyuLInx
|
||||
Issue: "10083"
|
||||
6
.changes/unreleased/Under the Hood-20240506-145511.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240506-145511.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Enable use of context in serialization
|
||||
time: 2024-05-06T14:55:11.1812-04:00
|
||||
custom:
|
||||
Author: gshank
|
||||
Issue: "10093"
|
||||
6
.changes/unreleased/Under the Hood-20240519-155946.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240519-155946.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
kind: Under the Hood
|
||||
body: Make RSS high water mark measurement more accurate on Linux
|
||||
time: 2024-05-19T15:59:46.700842315-04:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "10177"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Create a no-op exposure runner
|
||||
time: 2024-12-02T16:47:15.766574Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: ' '
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Improve selection peformance by optimizing the select_children() and select_parents()
|
||||
functions.
|
||||
time: 2024-12-05T14:31:44.584216-05:00
|
||||
custom:
|
||||
Author: peterallenwebb
|
||||
Issue: "11099"
|
||||
@@ -1,7 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Change exception type from DbtInternalException to UndefinedMacroError when
|
||||
macro not found in 'run operation' command
|
||||
time: 2025-01-07T12:39:55.234321-05:00
|
||||
custom:
|
||||
Author: michelleark
|
||||
Issue: "11192"
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Create LogNodeResult event
|
||||
time: 2025-01-07T20:58:38.821036Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: ' '
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Fix error counts for exposures
|
||||
time: 2025-01-10T20:20:57.01632Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: ' '
|
||||
@@ -1,6 +0,0 @@
|
||||
kind: Under the Hood
|
||||
body: Misc fixes for group info in logging
|
||||
time: 2025-01-17T15:22:15.497485Z
|
||||
custom:
|
||||
Author: aranke
|
||||
Issue: '11218'
|
||||
1
.flake8
1
.flake8
@@ -7,7 +7,6 @@ ignore =
|
||||
W503 # makes Flake8 work like black
|
||||
W504
|
||||
E203 # makes Flake8 work like black
|
||||
E704 # makes Flake8 work like black
|
||||
E741
|
||||
E501 # long line checking is done in black
|
||||
exclude = test/
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
18
.github/ISSUE_TEMPLATE/code-docs.yml
vendored
@@ -1,18 +0,0 @@
|
||||
name: 📄 Code docs
|
||||
description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc.
|
||||
title: "[Code docs] <title>"
|
||||
labels: ["triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thanks for taking the time to fill out this code docs issue!
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Please describe the issue and your proposals.
|
||||
description: |
|
||||
Links? References? Anything that will give us more context about the issue you are encountering!
|
||||
|
||||
Tip: You can attach images by clicking this area to highlight it and then dragging files in.
|
||||
validations:
|
||||
required: false
|
||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Documentation
|
||||
url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose
|
||||
about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template.
|
||||
- name: Ask the community for help
|
||||
url: https://github.com/dbt-labs/docs.getdbt.com/discussions
|
||||
about: Need help troubleshooting? Check out our guide on how to ask
|
||||
|
||||
11
.github/actions/setup-postgres-linux/action.yml
vendored
11
.github/actions/setup-postgres-linux/action.yml
vendored
@@ -5,15 +5,6 @@ runs:
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
sudo apt-get --purge remove postgresql postgresql-*
|
||||
sudo apt update -y
|
||||
sudo apt install gnupg2 wget vim -y
|
||||
sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg
|
||||
sudo apt update -y
|
||||
sudo apt install postgresql-16
|
||||
sudo apt-get -y install postgresql postgresql-contrib
|
||||
sudo systemctl start postgresql
|
||||
sudo systemctl enable postgresql
|
||||
sudo systemctl start postgresql.service
|
||||
pg_isready
|
||||
sudo -u postgres bash ${{ github.action_path }}/setup_db.sh
|
||||
|
||||
@@ -5,9 +5,7 @@ runs:
|
||||
steps:
|
||||
- shell: bash
|
||||
run: |
|
||||
brew install postgresql@16
|
||||
brew link postgresql@16 --force
|
||||
brew services start postgresql@16
|
||||
brew services start postgresql
|
||||
echo "Check PostgreSQL service is running"
|
||||
i=10
|
||||
COMMAND='pg_isready'
|
||||
|
||||
@@ -5,22 +5,8 @@ runs:
|
||||
steps:
|
||||
- shell: pwsh
|
||||
run: |
|
||||
Write-Host -Object "Installing PostgreSQL 16 as windows service..."
|
||||
$installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended")
|
||||
$filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe"
|
||||
Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru
|
||||
|
||||
Write-Host -Object "Validating PostgreSQL 16 Install..."
|
||||
Get-Service -Name postgresql*
|
||||
$pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
$exitCode = $pgReady.ExitCode
|
||||
if ($exitCode -ne 0) {
|
||||
Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode"
|
||||
exit $exitCode
|
||||
}
|
||||
|
||||
Write-Host -Object "Starting PostgreSQL 16 Service..."
|
||||
$pgService = Get-Service -Name postgresql-x64-16
|
||||
$pgService = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
$env:Path += ";$env:PGBIN"
|
||||
bash ${{ github.action_path }}/setup_db.sh
|
||||
|
||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,7 +1,7 @@
|
||||
Resolves #
|
||||
resolves #
|
||||
|
||||
<!---
|
||||
Include the number of the issue addressed by this PR above, if applicable.
|
||||
Include the number of the issue addressed by this PR above if applicable.
|
||||
PRs for code changes without an associated issue *will not be merged*.
|
||||
See CONTRIBUTING.md for more information.
|
||||
|
||||
@@ -26,8 +26,8 @@ Resolves #
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me.
|
||||
- [ ] I have run this code in development, and it appears to resolve the stated issue.
|
||||
- [ ] This PR includes tests, or tests are not required or relevant for this PR.
|
||||
- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX.
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions.
|
||||
- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me
|
||||
- [ ] I have run this code in development and it appears to resolve the stated issue
|
||||
- [ ] This PR includes tests, or tests are not required/relevant for this PR
|
||||
- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX
|
||||
- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions
|
||||
|
||||
50
.github/workflows/auto-respond-bug-reports.yml
vendored
50
.github/workflows/auto-respond-bug-reports.yml
vendored
@@ -1,50 +0,0 @@
|
||||
# **what?**
|
||||
# Check if the an issue is opened near or during an extended holiday period.
|
||||
# If so, post an automatically-generated comment about the holiday for bug reports.
|
||||
# Also provide specific information to customers of dbt Cloud.
|
||||
|
||||
# **why?**
|
||||
# Explain why responses will be delayed during our holiday period.
|
||||
|
||||
# **when?**
|
||||
# This will run when new issues are opened.
|
||||
|
||||
name: Auto-Respond to Bug Reports During Holiday Period
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
auto-response:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if current date is within holiday period
|
||||
id: date-check
|
||||
run: |
|
||||
current_date=$(date -u +"%Y-%m-%d")
|
||||
start_date="2024-12-23"
|
||||
end_date="2025-01-05"
|
||||
|
||||
if [[ "$current_date" < "$start_date" || "$current_date" > "$end_date" ]]; then
|
||||
echo "outside_holiday=true" >> $GITHUB_ENV
|
||||
else
|
||||
echo "outside_holiday=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Post comment
|
||||
if: ${{ env.outside_holiday == 'false' && contains(github.event.issue.labels.*.name, 'bug') }}
|
||||
run: |
|
||||
gh issue comment ${{ github.event.issue.number }} --repo ${{ github.repository }} --body "Thank you for your bug report! Our team is will be out of the office for [Christmas and our Global Week of Rest](https://handbook.getdbt.com/docs/time_off#2024-us-holidays), from December 25, 2024, through January 3, 2025.
|
||||
|
||||
We will review your issue as soon as possible after returning.
|
||||
Thank you for your understanding, and happy holidays! 🎄🎉
|
||||
|
||||
If you are a customer of dbt Cloud, please contact our Customer Support team via the dbt Cloud web interface or email **support@dbtlabs.com**."
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
2
.github/workflows/check-artifact-changes.yml
vendored
2
.github/workflows/check-artifact-changes.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
run: |
|
||||
echo "CI failure: Artifact changes checked in core/dbt/artifacts directory."
|
||||
echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}"
|
||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema."
|
||||
echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR."
|
||||
exit 1
|
||||
|
||||
- name: CI check passed
|
||||
|
||||
4
.github/workflows/docs-issue.yml
vendored
4
.github/workflows/docs-issue.yml
vendored
@@ -36,6 +36,6 @@ jobs:
|
||||
uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main
|
||||
with:
|
||||
issue_repository: "dbt-labs/docs.getdbt.com"
|
||||
issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}"
|
||||
issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}"
|
||||
issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated."
|
||||
secrets: inherit
|
||||
|
||||
15
.github/workflows/main.yml
vendored
15
.github/workflows/main.yml
vendored
@@ -52,14 +52,13 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --user --upgrade pip
|
||||
python -m pip --version
|
||||
make dev
|
||||
make dev_req
|
||||
mypy --version
|
||||
dbt --version
|
||||
|
||||
@@ -75,7 +74,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
|
||||
|
||||
env:
|
||||
TOXENV: "unit"
|
||||
@@ -112,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Upload Unit Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: unit
|
||||
@@ -140,7 +139,7 @@ jobs:
|
||||
- name: generate include
|
||||
id: generate-include
|
||||
run: |
|
||||
INCLUDE=('"python-version":"3.9","os":"windows-latest"' '"python-version":"3.9","os":"macos-14"' )
|
||||
INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-12"' )
|
||||
INCLUDE_GROUPS="["
|
||||
for include in ${INCLUDE[@]}; do
|
||||
for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do
|
||||
@@ -162,7 +161,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12" ]
|
||||
python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
|
||||
os: [ubuntu-20.04]
|
||||
split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }}
|
||||
include: ${{ fromJson(needs.integration-metadata.outputs.include) }}
|
||||
@@ -230,7 +229,7 @@ jobs:
|
||||
|
||||
- name: Upload Integration Test Coverage to Codecov
|
||||
if: ${{ matrix.python-version == '3.11' }}
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: integration
|
||||
@@ -264,7 +263,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
|
||||
2
.github/workflows/model_performance.yml
vendored
2
.github/workflows/model_performance.yml
vendored
@@ -150,7 +150,7 @@ jobs:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install dbt
|
||||
run: pip install dbt-postgres==${{ needs.set-variables.outputs.release_id }}
|
||||
|
||||
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@@ -247,24 +247,3 @@ jobs:
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }}
|
||||
|
||||
testing-slack-notification:
|
||||
# sends notifications to #slackbot-test
|
||||
name: Testing - Slack Notification
|
||||
if: ${{ failure() && inputs.test_run && !inputs.nightly_release }}
|
||||
|
||||
needs:
|
||||
[
|
||||
bump-version-generate-changelog,
|
||||
build-test-package,
|
||||
github-release,
|
||||
pypi-release,
|
||||
docker-release,
|
||||
]
|
||||
|
||||
uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main
|
||||
with:
|
||||
status: "failure"
|
||||
|
||||
secrets:
|
||||
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }}
|
||||
|
||||
4
.github/workflows/schema-check.yml
vendored
4
.github/workflows/schema-check.yml
vendored
@@ -30,14 +30,14 @@ env:
|
||||
|
||||
jobs:
|
||||
checking-schemas:
|
||||
name: "Post-merge schema changes required"
|
||||
name: "Checking schemas"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
python-version: 3.8
|
||||
|
||||
- name: Checkout dbt repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: "3.8"
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
|
||||
5
.github/workflows/test-repeater.yml
vendored
5
.github/workflows/test-repeater.yml
vendored
@@ -27,6 +27,7 @@ on:
|
||||
description: 'Version of Python to Test Against'
|
||||
type: choice
|
||||
options:
|
||||
- '3.8'
|
||||
- '3.9'
|
||||
- '3.10'
|
||||
- '3.11'
|
||||
@@ -35,7 +36,7 @@ on:
|
||||
type: choice
|
||||
options:
|
||||
- 'ubuntu-latest'
|
||||
- 'macos-14'
|
||||
- 'macos-12'
|
||||
- 'windows-latest'
|
||||
num_runs_per_batch:
|
||||
description: 'Max number of times to run the test per batch. We always run 10 batches.'
|
||||
@@ -100,7 +101,7 @@ jobs:
|
||||
|
||||
# mac and windows don't use make due to limitations with docker with those runners in GitHub
|
||||
- name: "Set up postgres (macos)"
|
||||
if: inputs.os == 'macos-14'
|
||||
if: inputs.os == 'macos-12'
|
||||
uses: ./.github/actions/setup-postgres-macos
|
||||
|
||||
- name: "Set up postgres (windows)"
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -57,9 +57,6 @@ test.env
|
||||
makefile.test.env
|
||||
*.pytest_cache/
|
||||
|
||||
# Unit test artifacts
|
||||
index.html
|
||||
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -108,6 +105,3 @@ venv/
|
||||
|
||||
# poetry
|
||||
poetry.lock
|
||||
|
||||
# asdf
|
||||
.tool-versions
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
[settings]
|
||||
profile=black
|
||||
extend_skip_glob=.github/*,third-party-stubs/*,scripts/*
|
||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces
|
||||
known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interface
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py)
|
||||
|
||||
# Force all unspecified python hooks to run python 3.9
|
||||
# Force all unspecified python hooks to run python 3.8
|
||||
default_language_version:
|
||||
python: python3
|
||||
|
||||
@@ -15,19 +15,16 @@ repos:
|
||||
args: [--unsafe]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
exclude: schemas/dbt/manifest/
|
||||
- id: trailing-whitespace
|
||||
exclude_types:
|
||||
- "markdown"
|
||||
- id: check-case-conflict
|
||||
- repo: https://github.com/pycqa/isort
|
||||
# rev must match what's in dev-requirements.txt
|
||||
rev: 5.13.2
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/psf/black
|
||||
# rev must match what's in dev-requirements.txt
|
||||
rev: 24.3.0
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- id: black
|
||||
@@ -37,7 +34,6 @@ repos:
|
||||
- "--check"
|
||||
- "--diff"
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
# rev must match what's in dev-requirements.txt
|
||||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
@@ -45,7 +41,6 @@ repos:
|
||||
alias: flake8-check
|
||||
stages: [manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
# rev must match what's in dev-requirements.txt
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: mypy
|
||||
|
||||
@@ -32,7 +32,7 @@ This is the docs website code. It comes from the dbt-docs repository, and is gen
|
||||
## Adapters
|
||||
|
||||
dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc.
|
||||
Note: dbt-postgres used to exist in dbt-core but is now in [a separate repo](https://github.com/dbt-labs/dbt-adapters/dbt-postgres)
|
||||
Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres)
|
||||
|
||||
Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it.
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
For information on prior major and minor releases, see their changelogs:
|
||||
|
||||
|
||||
* [1.9](https://github.com/dbt-labs/dbt-core/blob/1.9.latest/CHANGELOG.md)
|
||||
* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md)
|
||||
* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md)
|
||||
* [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md)
|
||||
|
||||
@@ -170,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt
|
||||
|
||||
```sh
|
||||
# run all unit tests in a file
|
||||
python3 -m pytest tests/unit/test_invocation_id.py
|
||||
python3 -m pytest tests/unit/test_base_column.py
|
||||
# run a specific unit test
|
||||
python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id
|
||||
python3 -m pytest tests/unit/test_base_column.py::TestNumericType::test__numeric_type
|
||||
# run specific Postgres functional tests
|
||||
python3 -m pytest tests/functional/sources
|
||||
```
|
||||
|
||||
@@ -33,6 +33,9 @@ RUN apt-get update \
|
||||
python-is-python3 \
|
||||
python-dev-is-python3 \
|
||||
python3-pip \
|
||||
python3.8 \
|
||||
python3.8-dev \
|
||||
python3.8-venv \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-venv \
|
||||
|
||||
4
Makefile
4
Makefile
@@ -144,7 +144,3 @@ help: ## Show this help message.
|
||||
@echo
|
||||
@echo 'options:'
|
||||
@echo 'use USE_DOCKER=true to run target in a docker container'
|
||||
|
||||
.PHONY: json_schema
|
||||
json_schema: ## Update generated JSON schema using code changes.
|
||||
scripts/collect-artifact-schema.py --path schemas
|
||||
|
||||
26
codecov.yml
26
codecov.yml
@@ -1,7 +1,6 @@
|
||||
ignore:
|
||||
- ".github"
|
||||
- ".changes"
|
||||
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
@@ -12,28 +11,3 @@ coverage:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 80%
|
||||
|
||||
comment:
|
||||
layout: "header, diff, flags, components" # show component info in the PR comment
|
||||
|
||||
component_management:
|
||||
default_rules: # default rules that will be inherited by all components
|
||||
statuses:
|
||||
- type: project # in this case every component that doens't have a status defined will have a project type one
|
||||
target: auto
|
||||
threshold: 0.1%
|
||||
- type: patch
|
||||
target: 80%
|
||||
individual_components:
|
||||
- component_id: unittests
|
||||
name: "Unit Tests"
|
||||
flag_regexes:
|
||||
- "unit"
|
||||
statuses:
|
||||
- type: patch
|
||||
target: 80%
|
||||
threshold: 5%
|
||||
- component_id: integrationtests
|
||||
name: "Integration Tests"
|
||||
flag_regexes:
|
||||
- "integration"
|
||||
|
||||
@@ -29,10 +29,6 @@ All existing resources are defined under `dbt/artifacts/resources/v1`.
|
||||
|
||||
## Making changes to dbt/artifacts
|
||||
|
||||
### All changes
|
||||
|
||||
All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work.
|
||||
|
||||
### Non-breaking changes
|
||||
|
||||
Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are:
|
||||
@@ -46,9 +42,9 @@ These types of minor, non-breaking changes are tested by [tests/unit/artifacts/t
|
||||
|
||||
#### Updating [schemas.getdbt.com](https://schemas.getdbt.com)
|
||||
Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so:
|
||||
Note this must be done AFTER the core pull request is merged, otherwise we may end up with unresolvable conflicts and schemas that are invalid prior to base pull request merge. You may create the schemas.getdbt.com pull request prior to merging the base pull request, but do not merge until afterward.
|
||||
1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39
|
||||
2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR
|
||||
3. Observe the `Artifact Schema Check` CI check pass on the `dbt-core` PR that updates the artifact schemas, and merge the `dbt-core` PR!
|
||||
|
||||
Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema.
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsO
|
||||
from dbt.artifacts.resources.v1.metric import (
|
||||
ConstantPropertyInput,
|
||||
ConversionTypeParams,
|
||||
CumulativeTypeParams,
|
||||
Metric,
|
||||
MetricConfig,
|
||||
MetricInput,
|
||||
@@ -46,12 +45,7 @@ from dbt.artifacts.resources.v1.metric import (
|
||||
MetricTimeWindow,
|
||||
MetricTypeParams,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.model import (
|
||||
Model,
|
||||
ModelConfig,
|
||||
ModelFreshness,
|
||||
TimeSpine,
|
||||
)
|
||||
from dbt.artifacts.resources.v1.model import Model, ModelConfig
|
||||
from dbt.artifacts.resources.v1.owner import Owner
|
||||
from dbt.artifacts.resources.v1.saved_query import (
|
||||
Export,
|
||||
|
||||
@@ -68,13 +68,3 @@ class TimePeriod(StrEnum):
|
||||
|
||||
def plural(self) -> str:
|
||||
return str(self) + "s"
|
||||
|
||||
|
||||
class BatchSize(StrEnum):
|
||||
hour = "hour"
|
||||
day = "day"
|
||||
month = "month"
|
||||
year = "year"
|
||||
|
||||
def plural(self) -> str:
|
||||
return str(self) + "s"
|
||||
|
||||
@@ -10,7 +10,6 @@ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
||||
from dbt_common.contracts.constraints import ColumnLevelConstraint
|
||||
from dbt_common.contracts.util import Mergeable
|
||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
||||
|
||||
NodeVersion = Union[str, float]
|
||||
|
||||
@@ -67,8 +66,6 @@ class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
||||
quote: Optional[bool] = None
|
||||
tags: List[str] = field(default_factory=list)
|
||||
_extra: Dict[str, Any] = field(default_factory=dict)
|
||||
granularity: Optional[TimeGranularity] = None
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -195,17 +192,13 @@ class ParsedResource(ParsedResourceMandatory):
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
unrendered_config_call_dict: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
raw_code: str = ""
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
if context and context.get("artifact") and "config_call_dict" in dct:
|
||||
del dct["config_call_dict"]
|
||||
if context and context.get("artifact") and "unrendered_config_call_dict" in dct:
|
||||
del dct["unrendered_config_call_dict"]
|
||||
return dct
|
||||
|
||||
|
||||
|
||||
@@ -80,9 +80,6 @@ class NodeConfig(NodeAndTestConfig):
|
||||
# 'mergebehavior' dictionary
|
||||
materialized: str = "view"
|
||||
incremental_strategy: Optional[str] = None
|
||||
batch_size: Any = None
|
||||
lookback: Any = 1
|
||||
begin: Any = None
|
||||
persist_docs: Dict[str, Any] = field(default_factory=dict)
|
||||
post_hook: List[Hook] = field(
|
||||
default_factory=list,
|
||||
@@ -125,8 +122,6 @@ class NodeConfig(NodeAndTestConfig):
|
||||
default_factory=ContractConfig,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
event_time: Any = None
|
||||
concurrent_batches: Any = None
|
||||
|
||||
def __post_init__(self):
|
||||
# we validate that node_color has a suitable value to prevent dbt-docs from crashing
|
||||
|
||||
@@ -2,6 +2,13 @@ import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Literal, Optional
|
||||
|
||||
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
||||
from dbt_semantic_interfaces.type_enums import (
|
||||
ConversionCalculationType,
|
||||
MetricType,
|
||||
TimeGranularity,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources.base import GraphResource
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||
@@ -11,13 +18,6 @@ from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||
)
|
||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.references import MeasureReference, MetricReference
|
||||
from dbt_semantic_interfaces.type_enums import (
|
||||
ConversionCalculationType,
|
||||
MetricType,
|
||||
PeriodAggregation,
|
||||
TimeGranularity,
|
||||
)
|
||||
|
||||
"""
|
||||
The following classes are dataclasses which are used to construct the Metric
|
||||
@@ -46,15 +46,7 @@ class MetricInputMeasure(dbtClassMixin):
|
||||
@dataclass
|
||||
class MetricTimeWindow(dbtClassMixin):
|
||||
count: int
|
||||
granularity: str
|
||||
|
||||
@property
|
||||
def window_string(self) -> str: # noqa: D
|
||||
return f"{self.count} {self.granularity}"
|
||||
|
||||
@property
|
||||
def is_standard_granularity(self) -> bool: # noqa: D
|
||||
return self.granularity.casefold() in {item.value.casefold() for item in TimeGranularity}
|
||||
granularity: TimeGranularity
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -63,7 +55,7 @@ class MetricInput(dbtClassMixin):
|
||||
filter: Optional[WhereFilterIntersection] = None
|
||||
alias: Optional[str] = None
|
||||
offset_window: Optional[MetricTimeWindow] = None
|
||||
offset_to_grain: Optional[str] = None
|
||||
offset_to_grain: Optional[TimeGranularity] = None
|
||||
|
||||
def as_reference(self) -> MetricReference:
|
||||
return MetricReference(element_name=self.name)
|
||||
@@ -88,13 +80,6 @@ class ConversionTypeParams(dbtClassMixin):
|
||||
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CumulativeTypeParams(dbtClassMixin):
|
||||
window: Optional[MetricTimeWindow] = None
|
||||
grain_to_date: Optional[str] = None
|
||||
period_agg: PeriodAggregation = PeriodAggregation.FIRST
|
||||
|
||||
|
||||
@dataclass
|
||||
class MetricTypeParams(dbtClassMixin):
|
||||
measure: Optional[MetricInputMeasure] = None
|
||||
@@ -103,12 +88,9 @@ class MetricTypeParams(dbtClassMixin):
|
||||
denominator: Optional[MetricInput] = None
|
||||
expr: Optional[str] = None
|
||||
window: Optional[MetricTimeWindow] = None
|
||||
grain_to_date: Optional[TimeGranularity] = (
|
||||
None # legacy, use cumulative_type_params.grain_to_date
|
||||
)
|
||||
grain_to_date: Optional[TimeGranularity] = None
|
||||
metrics: Optional[List[MetricInput]] = None
|
||||
conversion_type_params: Optional[ConversionTypeParams] = None
|
||||
cumulative_type_params: Optional[CumulativeTypeParams] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -131,7 +113,6 @@ class Metric(GraphResource):
|
||||
type_params: MetricTypeParams
|
||||
filter: Optional[WhereFilterIntersection] = None
|
||||
metadata: Optional[SourceFileMetadata] = None
|
||||
time_granularity: Optional[str] = None
|
||||
resource_type: Literal[NodeType.Metric]
|
||||
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
||||
tags: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import enum
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Literal, Optional
|
||||
|
||||
from dbt.artifacts.resources.types import AccessType, NodeType, TimePeriod
|
||||
from dbt.artifacts.resources.types import AccessType, NodeType
|
||||
from dbt.artifacts.resources.v1.components import (
|
||||
CompiledResource,
|
||||
DeferRelation,
|
||||
@@ -12,7 +11,6 @@ from dbt.artifacts.resources.v1.components import (
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.contracts.config.base import MergeBehavior
|
||||
from dbt_common.contracts.constraints import ModelLevelConstraint
|
||||
from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -21,36 +19,6 @@ class ModelConfig(NodeConfig):
|
||||
default=AccessType.Protected,
|
||||
metadata=MergeBehavior.Clobber.meta(),
|
||||
)
|
||||
generate_latest: bool = False
|
||||
|
||||
|
||||
@dataclass
|
||||
class CustomGranularity(dbtClassMixin):
|
||||
name: str
|
||||
column_name: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimeSpine(dbtClassMixin):
|
||||
standard_granularity_column: str
|
||||
custom_granularities: List[CustomGranularity] = field(default_factory=list)
|
||||
|
||||
|
||||
class ModelFreshnessDependsOnOptions(enum.Enum):
|
||||
all = "all"
|
||||
any = "any"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelBuildAfter(ExtensibleDbtClassMixin):
|
||||
depends_on: ModelFreshnessDependsOnOptions = ModelFreshnessDependsOnOptions.any
|
||||
count: int = 0
|
||||
period: TimePeriod = TimePeriod.hour
|
||||
|
||||
|
||||
@dataclass
|
||||
class ModelFreshness(ExtensibleDbtClassMixin):
|
||||
build_after: ModelBuildAfter = field(default_factory=ModelBuildAfter)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -64,8 +32,6 @@ class Model(CompiledResource):
|
||||
deprecation_date: Optional[datetime] = None
|
||||
defer_relation: Optional[DeferRelation] = None
|
||||
primary_key: List[str] = field(default_factory=list)
|
||||
time_spine: Optional[TimeSpine] = None
|
||||
freshness: Optional[ModelFreshness] = None
|
||||
|
||||
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
||||
dct = super().__post_serialize__(dct, context)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional, Union
|
||||
from typing import Optional
|
||||
|
||||
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
||||
|
||||
|
||||
@dataclass
|
||||
class Owner(AdditionalPropertiesAllowed):
|
||||
email: Union[str, List[str], None] = None
|
||||
email: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
@@ -2,22 +2,21 @@ from __future__ import annotations
|
||||
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Literal, Optional, Union
|
||||
from typing import Any, Dict, List, Literal, Optional
|
||||
|
||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||
ExportDestinationType,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources.base import GraphResource
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||
from dbt.artifacts.resources.v1.config import list_str, metas
|
||||
from dbt.artifacts.resources.v1.semantic_layer_components import (
|
||||
SourceFileMetadata,
|
||||
WhereFilterIntersection,
|
||||
)
|
||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||
from dbt_common.contracts.config.metadata import ShowBehavior
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.type_enums.export_destination_type import (
|
||||
ExportDestinationType,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -36,7 +35,6 @@ class Export(dbtClassMixin):
|
||||
|
||||
name: str
|
||||
config: ExportConfig
|
||||
unrendered_config: Dict[str, str] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -46,8 +44,6 @@ class QueryParams(dbtClassMixin):
|
||||
metrics: List[str]
|
||||
group_by: List[str]
|
||||
where: Optional[WhereFilterIntersection]
|
||||
order_by: List[str] = field(default_factory=list)
|
||||
limit: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -97,10 +93,6 @@ class SavedQuery(SavedQueryMandatory):
|
||||
depends_on: DependsOn = field(default_factory=DependsOn)
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
refs: List[RefArgs] = field(default_factory=list)
|
||||
tags: Union[List[str], str] = field(
|
||||
default_factory=list_str,
|
||||
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
||||
)
|
||||
|
||||
@property
|
||||
def metrics(self) -> List[str]:
|
||||
|
||||
@@ -1,32 +1,29 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Sequence, Tuple
|
||||
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets
|
||||
from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import (
|
||||
WhereFilterParser,
|
||||
)
|
||||
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
|
||||
@dataclass
|
||||
class WhereFilter(dbtClassMixin):
|
||||
where_sql_template: str
|
||||
|
||||
def call_parameter_sets(
|
||||
self, custom_granularity_names: Sequence[str]
|
||||
) -> FilterCallParameterSets:
|
||||
return WhereFilterParser.parse_call_parameter_sets(
|
||||
self.where_sql_template, custom_granularity_names=custom_granularity_names
|
||||
)
|
||||
@property
|
||||
def call_parameter_sets(self) -> FilterCallParameterSets:
|
||||
return WhereFilterParser.parse_call_parameter_sets(self.where_sql_template)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WhereFilterIntersection(dbtClassMixin):
|
||||
where_filters: List[WhereFilter]
|
||||
|
||||
def filter_expression_parameter_sets(
|
||||
self, custom_granularity_names: Sequence[str]
|
||||
) -> Sequence[Tuple[str, FilterCallParameterSets]]:
|
||||
@property
|
||||
def filter_expression_parameter_sets(self) -> Sequence[Tuple[str, FilterCallParameterSets]]:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
|
||||
@@ -2,11 +2,6 @@ import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
|
||||
from dbt.artifacts.resources import SourceFileMetadata
|
||||
from dbt.artifacts.resources.base import GraphResource
|
||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_semantic_interfaces.references import (
|
||||
DimensionReference,
|
||||
EntityReference,
|
||||
@@ -22,6 +17,12 @@ from dbt_semantic_interfaces.type_enums import (
|
||||
TimeGranularity,
|
||||
)
|
||||
|
||||
from dbt.artifacts.resources import SourceFileMetadata
|
||||
from dbt.artifacts.resources.base import GraphResource
|
||||
from dbt.artifacts.resources.v1.components import DependsOn, RefArgs
|
||||
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
"""
|
||||
The classes in this file are dataclasses which are used to construct the Semantic
|
||||
Model node in dbt-core. Additionally, these classes need to at a minimum support
|
||||
@@ -31,14 +32,6 @@ https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_inter
|
||||
"""
|
||||
|
||||
|
||||
@dataclass
|
||||
class SemanticLayerElementConfig(dbtClassMixin):
|
||||
meta: Dict[str, Any] = field(
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Defaults(dbtClassMixin):
|
||||
agg_time_dimension: Optional[str] = None
|
||||
@@ -80,7 +73,6 @@ class Dimension(dbtClassMixin):
|
||||
type_params: Optional[DimensionTypeParams] = None
|
||||
expr: Optional[str] = None
|
||||
metadata: Optional[SourceFileMetadata] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> DimensionReference:
|
||||
@@ -115,7 +107,6 @@ class Entity(dbtClassMixin):
|
||||
label: Optional[str] = None
|
||||
role: Optional[str] = None
|
||||
expr: Optional[str] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> EntityReference:
|
||||
@@ -157,7 +148,6 @@ class Measure(dbtClassMixin):
|
||||
agg_params: Optional[MeasureAggregationParameters] = None
|
||||
non_additive_dimension: Optional[NonAdditiveDimension] = None
|
||||
agg_time_dimension: Optional[str] = None
|
||||
config: Optional[SemanticLayerElementConfig] = None
|
||||
|
||||
@property
|
||||
def reference(self) -> MeasureReference:
|
||||
|
||||
@@ -1,74 +1,56 @@
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, List, Literal, Optional, Union
|
||||
|
||||
from dbt.artifacts.resources.types import NodeType
|
||||
from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation
|
||||
from dbt.artifacts.resources.v1.config import NodeConfig
|
||||
from dbt_common.dataclass_schema import ValidationError, dbtClassMixin
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotMetaColumnNames(dbtClassMixin):
|
||||
dbt_valid_to: Optional[str] = None
|
||||
dbt_valid_from: Optional[str] = None
|
||||
dbt_scd_id: Optional[str] = None
|
||||
dbt_updated_at: Optional[str] = None
|
||||
dbt_is_deleted: Optional[str] = None
|
||||
from dbt_common.dataclass_schema import ValidationError
|
||||
|
||||
|
||||
@dataclass
|
||||
class SnapshotConfig(NodeConfig):
|
||||
materialized: str = "snapshot"
|
||||
strategy: Optional[str] = None
|
||||
unique_key: Union[str, List[str], None] = None
|
||||
unique_key: Optional[str] = None
|
||||
target_schema: Optional[str] = None
|
||||
target_database: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
# Not using Optional because of serialization issues with a Union of str and List[str]
|
||||
check_cols: Union[str, List[str], None] = None
|
||||
snapshot_meta_column_names: SnapshotMetaColumnNames = field(
|
||||
default_factory=SnapshotMetaColumnNames
|
||||
)
|
||||
dbt_valid_to_current: Optional[str] = None
|
||||
|
||||
@property
|
||||
def snapshot_table_column_names(self):
|
||||
return {
|
||||
"dbt_valid_from": self.snapshot_meta_column_names.dbt_valid_from or "dbt_valid_from",
|
||||
"dbt_valid_to": self.snapshot_meta_column_names.dbt_valid_to or "dbt_valid_to",
|
||||
"dbt_scd_id": self.snapshot_meta_column_names.dbt_scd_id or "dbt_scd_id",
|
||||
"dbt_updated_at": self.snapshot_meta_column_names.dbt_updated_at or "dbt_updated_at",
|
||||
"dbt_is_deleted": self.snapshot_meta_column_names.dbt_is_deleted or "dbt_is_deleted",
|
||||
}
|
||||
|
||||
def final_validate(self):
|
||||
if not self.strategy or not self.unique_key:
|
||||
@classmethod
|
||||
def validate(cls, data):
|
||||
super().validate(data)
|
||||
# Note: currently you can't just set these keys in schema.yml because this validation
|
||||
# will fail when parsing the snapshot node.
|
||||
if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"):
|
||||
raise ValidationError(
|
||||
"Snapshots must be configured with a 'strategy' and 'unique_key'."
|
||||
"Snapshots must be configured with a 'strategy', 'unique_key', "
|
||||
"and 'target_schema'."
|
||||
)
|
||||
if self.strategy == "check":
|
||||
if not self.check_cols:
|
||||
if data.get("strategy") == "check":
|
||||
if not data.get("check_cols"):
|
||||
raise ValidationError(
|
||||
"A snapshot configured with the check strategy must "
|
||||
"specify a check_cols configuration."
|
||||
)
|
||||
if isinstance(self.check_cols, str) and self.check_cols != "all":
|
||||
if isinstance(data["check_cols"], str) and data["check_cols"] != "all":
|
||||
raise ValidationError(
|
||||
f"Invalid value for 'check_cols': {self.check_cols}. "
|
||||
f"Invalid value for 'check_cols': {data['check_cols']}. "
|
||||
"Expected 'all' or a list of strings."
|
||||
)
|
||||
elif self.strategy == "timestamp":
|
||||
if not self.updated_at:
|
||||
elif data.get("strategy") == "timestamp":
|
||||
if not data.get("updated_at"):
|
||||
raise ValidationError(
|
||||
"A snapshot configured with the timestamp strategy "
|
||||
"must specify an updated_at configuration."
|
||||
)
|
||||
if self.check_cols:
|
||||
if data.get("check_cols"):
|
||||
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
||||
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
||||
# formerly supported with GenericSnapshotConfig
|
||||
|
||||
if self.materialized and self.materialized != "snapshot":
|
||||
if data.get("materialized") and data.get("materialized") != "snapshot":
|
||||
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
||||
|
||||
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
||||
|
||||
@@ -19,7 +19,6 @@ from dbt_common.exceptions import CompilationError
|
||||
@dataclass
|
||||
class SourceConfig(BaseConfig):
|
||||
enabled: bool = True
|
||||
event_time: Any = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -59,7 +58,6 @@ class ParsedSourceMandatory(GraphResource, HasRelationMetadata):
|
||||
class SourceDefinition(ParsedSourceMandatory):
|
||||
quoting: Quoting = field(default_factory=Quoting)
|
||||
loaded_at_field: Optional[str] = None
|
||||
loaded_at_query: Optional[str] = None
|
||||
freshness: Optional[FreshnessThreshold] = None
|
||||
external: Optional[ExternalTable] = None
|
||||
description: str = ""
|
||||
@@ -72,6 +70,3 @@ class SourceDefinition(ParsedSourceMandatory):
|
||||
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
||||
relation_name: Optional[str] = None
|
||||
created_at: float = field(default_factory=lambda: time.time())
|
||||
unrendered_database: Optional[str] = None
|
||||
unrendered_schema: Optional[str] = None
|
||||
doc_blocks: List[str] = field(default_factory=list)
|
||||
|
||||
@@ -20,7 +20,6 @@ class UnitTestConfig(BaseConfig):
|
||||
default_factory=dict,
|
||||
metadata=MergeBehavior.Update.meta(),
|
||||
)
|
||||
enabled: bool = True
|
||||
|
||||
|
||||
class UnitTestFormat(StrEnum):
|
||||
|
||||
@@ -77,11 +77,8 @@ class BaseArtifactMetadata(dbtClassMixin):
|
||||
# remote-compile-result
|
||||
# remote-execution-result
|
||||
# remote-run-result
|
||||
S = TypeVar("S", bound="VersionedSchema")
|
||||
|
||||
|
||||
def schema_version(name: str, version: int):
|
||||
def inner(cls: Type[S]):
|
||||
def inner(cls: Type[VersionedSchema]):
|
||||
cls.dbt_schema_version = SchemaVersion(
|
||||
name=name,
|
||||
version=version,
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import List, Tuple
|
||||
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
|
||||
BatchType = Tuple[datetime, datetime]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BatchResults(dbtClassMixin):
|
||||
successful: List[BatchType] = field(default_factory=list)
|
||||
failed: List[BatchType] = field(default_factory=list)
|
||||
|
||||
def __add__(self, other: BatchResults) -> BatchResults:
|
||||
return BatchResults(
|
||||
successful=self.successful + other.successful,
|
||||
failed=self.failed + other.failed,
|
||||
)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.successful) + len(self.failed)
|
||||
@@ -1,11 +1,2 @@
|
||||
# alias to latest
|
||||
from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa
|
||||
from dbt_common.contracts.metadata import (
|
||||
CatalogKey,
|
||||
CatalogTable,
|
||||
ColumnMap,
|
||||
ColumnMetadata,
|
||||
StatsDict,
|
||||
StatsItem,
|
||||
TableMetadata,
|
||||
)
|
||||
|
||||
@@ -1,18 +1,71 @@
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Union
|
||||
|
||||
from dbt.artifacts.schemas.base import (
|
||||
ArtifactMixin,
|
||||
BaseArtifactMetadata,
|
||||
schema_version,
|
||||
)
|
||||
from dbt_common.contracts.metadata import CatalogTable
|
||||
from dbt_common.dataclass_schema import dbtClassMixin
|
||||
from dbt_common.utils.formatting import lowercase
|
||||
|
||||
Primitive = Union[bool, str, float, None]
|
||||
PrimitiveDict = Dict[str, Primitive]
|
||||
|
||||
CatalogKey = NamedTuple(
|
||||
"CatalogKey", [("database", Optional[str]), ("schema", str), ("name", str)]
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class StatsItem(dbtClassMixin):
|
||||
id: str
|
||||
label: str
|
||||
value: Primitive
|
||||
include: bool
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
StatsDict = Dict[str, StatsItem]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ColumnMetadata(dbtClassMixin):
|
||||
type: str
|
||||
index: int
|
||||
name: str
|
||||
comment: Optional[str] = None
|
||||
|
||||
|
||||
ColumnMap = Dict[str, ColumnMetadata]
|
||||
|
||||
|
||||
@dataclass
|
||||
class TableMetadata(dbtClassMixin):
|
||||
type: str
|
||||
schema: str
|
||||
name: str
|
||||
database: Optional[str] = None
|
||||
comment: Optional[str] = None
|
||||
owner: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogTable(dbtClassMixin):
|
||||
metadata: TableMetadata
|
||||
columns: ColumnMap
|
||||
stats: StatsDict
|
||||
# the same table with two unique IDs will just be listed two times
|
||||
unique_id: Optional[str] = None
|
||||
|
||||
def key(self) -> CatalogKey:
|
||||
return CatalogKey(
|
||||
lowercase(self.metadata.database),
|
||||
self.metadata.schema.lower(),
|
||||
self.metadata.name.lower(),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CatalogMetadata(BaseArtifactMetadata):
|
||||
|
||||
@@ -28,7 +28,6 @@ from dbt.artifacts.schemas.base import (
|
||||
schema_version,
|
||||
)
|
||||
from dbt.artifacts.schemas.upgrades import upgrade_manifest_json
|
||||
from dbt_common.exceptions import DbtInternalError
|
||||
|
||||
NodeEdgeMap = Dict[str, List[str]]
|
||||
UniqueID = str
|
||||
@@ -181,13 +180,3 @@ class WritableManifest(ArtifactMixin):
|
||||
if manifest_schema_version < cls.dbt_schema_version.version:
|
||||
data = upgrade_manifest_json(data, manifest_schema_version)
|
||||
return cls.from_dict(data)
|
||||
|
||||
@classmethod
|
||||
def validate(cls, _):
|
||||
# When dbt try to load an artifact with additional optional fields
|
||||
# that are not present in the schema, from_dict will work fine.
|
||||
# As long as validate is not called, the schema will not be enforced.
|
||||
# This is intentional, as it allows for safer schema upgrades.
|
||||
raise DbtInternalError(
|
||||
"The WritableManifest should never be validated directly to allow for schema upgrades."
|
||||
)
|
||||
|
||||
@@ -10,12 +10,6 @@ from dbt_common.utils import cast_to_int, cast_to_str
|
||||
|
||||
@dataclass
|
||||
class TimingInfo(dbtClassMixin):
|
||||
"""
|
||||
Represents a step in the execution of a node.
|
||||
`name` should be one of: compile, execute, or other
|
||||
Do not call directly, use `collect_timing_info` instead.
|
||||
"""
|
||||
|
||||
name: str
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
@@ -27,7 +21,7 @@ class TimingInfo(dbtClassMixin):
|
||||
self.completed_at = datetime.utcnow()
|
||||
|
||||
def to_msg_dict(self):
|
||||
msg_dict = {"name": str(self.name)}
|
||||
msg_dict = {"name": self.name}
|
||||
if self.started_at:
|
||||
msg_dict["started_at"] = datetime_to_json_string(self.started_at)
|
||||
if self.completed_at:
|
||||
@@ -61,18 +55,14 @@ class NodeStatus(StrEnum):
|
||||
Fail = "fail"
|
||||
Warn = "warn"
|
||||
Skipped = "skipped"
|
||||
PartialSuccess = "partial success"
|
||||
Pass = "pass"
|
||||
RuntimeErr = "runtime error"
|
||||
NoOp = "no-op"
|
||||
|
||||
|
||||
class RunStatus(StrEnum):
|
||||
Success = NodeStatus.Success
|
||||
Error = NodeStatus.Error
|
||||
Skipped = NodeStatus.Skipped
|
||||
PartialSuccess = NodeStatus.PartialSuccess
|
||||
NoOp = NodeStatus.NoOp
|
||||
|
||||
|
||||
class TestStatus(StrEnum):
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
import threading
|
||||
from dataclasses import dataclass, field
|
||||
@@ -19,7 +17,6 @@ from dbt.artifacts.schemas.base import (
|
||||
get_artifact_schema_version,
|
||||
schema_version,
|
||||
)
|
||||
from dbt.artifacts.schemas.batch_results import BatchResults
|
||||
from dbt.artifacts.schemas.results import (
|
||||
BaseResult,
|
||||
ExecutionResult,
|
||||
@@ -37,7 +34,6 @@ class RunResult(NodeResult):
|
||||
agate_table: Optional["agate.Table"] = field(
|
||||
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
||||
)
|
||||
batch_results: Optional[BatchResults] = None
|
||||
|
||||
@property
|
||||
def skipped(self):
|
||||
@@ -55,7 +51,6 @@ class RunResult(NodeResult):
|
||||
node=node,
|
||||
adapter_response={},
|
||||
failures=None,
|
||||
batch_results=None,
|
||||
)
|
||||
|
||||
|
||||
@@ -72,7 +67,6 @@ class RunResultOutput(BaseResult):
|
||||
compiled: Optional[bool]
|
||||
compiled_code: Optional[str]
|
||||
relation_name: Optional[str]
|
||||
batch_results: Optional[BatchResults] = None
|
||||
|
||||
|
||||
def process_run_result(result: RunResult) -> RunResultOutput:
|
||||
@@ -88,7 +82,6 @@ def process_run_result(result: RunResult) -> RunResultOutput:
|
||||
message=result.message,
|
||||
adapter_response=result.adapter_response,
|
||||
failures=result.failures,
|
||||
batch_results=result.batch_results,
|
||||
compiled=result.node.compiled if compiled else None, # type:ignore
|
||||
compiled_code=result.node.compiled_code if compiled else None, # type:ignore
|
||||
relation_name=result.node.relation_name if compiled else None, # type:ignore
|
||||
@@ -165,8 +158,7 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
||||
@classmethod
|
||||
def upgrade_schema_version(cls, data):
|
||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
||||
"""
|
||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
|
||||
run_results_schema_version = get_artifact_schema_version(data)
|
||||
# If less than the current version (v5), preprocess contents to match latest schema version
|
||||
if run_results_schema_version <= 5:
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
from typing import IO, List, Optional, Union
|
||||
from typing import IO, Optional
|
||||
|
||||
from click.exceptions import ClickException
|
||||
|
||||
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.contracts.results import RunExecutionResult
|
||||
from dbt.utils import ExitCodes
|
||||
|
||||
|
||||
@@ -26,7 +23,7 @@ class CliException(ClickException):
|
||||
|
||||
# the typing of _file is to satisfy the signature of ClickException.show
|
||||
# overriding this method prevents click from printing any exceptions to stdout
|
||||
def show(self, _file: Optional[IO] = None) -> None: # type: ignore[type-arg]
|
||||
def show(self, _file: Optional[IO] = None) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@@ -34,17 +31,7 @@ class ResultExit(CliException):
|
||||
"""This class wraps any exception that contains results while invoking dbt, or the
|
||||
results of an invocation that did not succeed but did not throw any exceptions."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
result: Union[
|
||||
bool, # debug
|
||||
CatalogArtifact, # docs generate
|
||||
List[str], # list/ls
|
||||
Manifest, # parse
|
||||
None, # clean, deps, init, source
|
||||
RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation
|
||||
] = None,
|
||||
) -> None:
|
||||
def __init__(self, result) -> None:
|
||||
super().__init__(ExitCodes.ModelError)
|
||||
self.result = result
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
from pprint import pformat as pf
|
||||
@@ -16,7 +15,7 @@ from dbt.cli.resolvers import default_log_path, default_project_dir
|
||||
from dbt.cli.types import Command as CliCommand
|
||||
from dbt.config.project import read_project_flags
|
||||
from dbt.contracts.project import ProjectFlags
|
||||
from dbt.deprecations import fire_buffered_deprecations, renamed_env_var
|
||||
from dbt.deprecations import renamed_env_var
|
||||
from dbt.events import ALL_EVENT_NAMES
|
||||
from dbt_common import ui
|
||||
from dbt_common.clients import jinja
|
||||
@@ -38,7 +37,6 @@ FLAGS_DEFAULTS = {
|
||||
"STRICT_MODE": False,
|
||||
"STORE_FAILURES": False,
|
||||
"INTROSPECT": True,
|
||||
"STATE_MODIFIED_COMPARE_VARS": False,
|
||||
}
|
||||
|
||||
DEPRECATED_PARAMS = {
|
||||
@@ -59,7 +57,6 @@ def convert_config(config_name, config_value):
|
||||
ret = WarnErrorOptions(
|
||||
include=config_value.get("include", []),
|
||||
exclude=config_value.get("exclude", []),
|
||||
silence=config_value.get("silence", []),
|
||||
valid_error_names=ALL_EVENT_NAMES,
|
||||
)
|
||||
return ret
|
||||
@@ -94,8 +91,6 @@ class Flags:
|
||||
# Set the default flags.
|
||||
for key, value in FLAGS_DEFAULTS.items():
|
||||
object.__setattr__(self, key, value)
|
||||
# Use to handle duplicate params in _assign_params
|
||||
flags_defaults_list = list(FLAGS_DEFAULTS.keys())
|
||||
|
||||
if ctx is None:
|
||||
ctx = get_current_context()
|
||||
@@ -177,29 +172,13 @@ class Flags:
|
||||
old_name=dep_param.envvar,
|
||||
new_name=new_param.envvar,
|
||||
)
|
||||
# end deprecated_params
|
||||
|
||||
# Set the flag value.
|
||||
is_duplicate = (
|
||||
hasattr(self, param_name.upper())
|
||||
and param_name.upper() not in flags_defaults_list
|
||||
)
|
||||
# First time through, set as though FLAGS_DEFAULTS hasn't been set, so not a duplicate.
|
||||
# Subsequent pass (to process "parent" params) should be treated as duplicates.
|
||||
if param_name.upper() in flags_defaults_list:
|
||||
flags_defaults_list.remove(param_name.upper())
|
||||
# Note: the following determines whether parameter came from click default,
|
||||
# not from FLAGS_DEFAULTS in __init__.
|
||||
is_duplicate = hasattr(self, param_name.upper())
|
||||
is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT
|
||||
is_envvar = ctx.get_parameter_source(param_name) == ParameterSource.ENVIRONMENT
|
||||
|
||||
flag_name = (new_name or param_name).upper()
|
||||
|
||||
# envvar flags are assigned in either parent or child context if there
|
||||
# isn't an overriding cli command flag.
|
||||
# If the flag has been encountered as a child cli flag, we don't
|
||||
# want to overwrite with parent envvar, since the commandline flag takes precedence.
|
||||
if (is_duplicate and not (is_default or is_envvar)) or not is_duplicate:
|
||||
if (is_duplicate and not is_default) or not is_duplicate:
|
||||
object.__setattr__(self, flag_name, param_value)
|
||||
|
||||
# Track default assigned params.
|
||||
@@ -310,13 +289,6 @@ class Flags:
|
||||
params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"]
|
||||
)
|
||||
|
||||
# Handle arguments mutually exclusive with INLINE
|
||||
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECT", "INLINE"])
|
||||
self._assert_mutually_exclusive(params_assigned_from_default, ["SELECTOR", "INLINE"])
|
||||
|
||||
# Check event_time configs for validity
|
||||
self._validate_event_time_configs()
|
||||
|
||||
# Support lower cased access for legacy code.
|
||||
params = set(
|
||||
x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__")
|
||||
@@ -343,9 +315,7 @@ class Flags:
|
||||
"""
|
||||
set_flag = None
|
||||
for flag in group:
|
||||
flag_set_by_user = (
|
||||
hasattr(self, flag) and flag.lower() not in params_assigned_from_default
|
||||
)
|
||||
flag_set_by_user = flag.lower() not in params_assigned_from_default
|
||||
if flag_set_by_user and set_flag:
|
||||
raise DbtUsageException(
|
||||
f"{flag.lower()}: not allowed with argument {set_flag.lower()}"
|
||||
@@ -353,36 +323,6 @@ class Flags:
|
||||
elif flag_set_by_user:
|
||||
set_flag = flag
|
||||
|
||||
def _validate_event_time_configs(self) -> None:
|
||||
event_time_start: datetime = (
|
||||
getattr(self, "EVENT_TIME_START") if hasattr(self, "EVENT_TIME_START") else None
|
||||
)
|
||||
event_time_end: datetime = (
|
||||
getattr(self, "EVENT_TIME_END") if hasattr(self, "EVENT_TIME_END") else None
|
||||
)
|
||||
|
||||
# only do validations if at least one of `event_time_start` or `event_time_end` are specified
|
||||
if event_time_start is not None or event_time_end is not None:
|
||||
|
||||
# These `ifs`, combined with the parent `if` make it so that `event_time_start` and
|
||||
# `event_time_end` are mutually required
|
||||
if event_time_start is None:
|
||||
raise DbtUsageException(
|
||||
"The flag `--event-time-end` was specified, but `--event-time-start` was not. "
|
||||
"When specifying `--event-time-end`, `--event-time-start` must also be present."
|
||||
)
|
||||
if event_time_end is None:
|
||||
raise DbtUsageException(
|
||||
"The flag `--event-time-start` was specified, but `--event-time-end` was not. "
|
||||
"When specifying `--event-time-start`, `--event-time-end` must also be present."
|
||||
)
|
||||
|
||||
# This `if` just is a sanity check that `event_time_start` is before `event_time_end`
|
||||
if event_time_start >= event_time_end:
|
||||
raise DbtUsageException(
|
||||
"Value for `--event-time-start` must be less than `--event-time-end`"
|
||||
)
|
||||
|
||||
def fire_deprecations(self):
|
||||
"""Fires events for deprecated env_var usage."""
|
||||
[dep_fn() for dep_fn in self.deprecated_env_var_warnings]
|
||||
@@ -390,8 +330,6 @@ class Flags:
|
||||
# not get pickled when written to disk as json.
|
||||
object.__delattr__(self, "deprecated_env_var_warnings")
|
||||
|
||||
fire_buffered_deprecations()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags":
|
||||
command_arg_list = command_params(command, args_dict)
|
||||
|
||||
@@ -8,15 +8,12 @@ from click.exceptions import BadOptionUsage
|
||||
from click.exceptions import Exit as ClickExit
|
||||
from click.exceptions import NoSuchOption, UsageError
|
||||
|
||||
from dbt.adapters.factory import register_adapter
|
||||
from dbt.artifacts.schemas.catalog import CatalogArtifact
|
||||
from dbt.artifacts.schemas.run import RunExecutionResult
|
||||
from dbt.cli import params as p
|
||||
from dbt.cli import requires
|
||||
from dbt.cli.exceptions import DbtInternalException, DbtUsageException
|
||||
from dbt.cli.requires import setup_manifest
|
||||
from dbt.contracts.graph.manifest import Manifest
|
||||
from dbt.mp_context import get_mp_context
|
||||
from dbt_common.events.base_types import EventMsg
|
||||
|
||||
|
||||
@@ -140,7 +137,6 @@ def global_flags(func):
|
||||
@p.warn_error
|
||||
@p.warn_error_options
|
||||
@p.write_json
|
||||
@p.use_fast_test_edges
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
@@ -169,8 +165,6 @@ def cli(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.empty
|
||||
@p.event_time_start
|
||||
@p.event_time_end
|
||||
@p.exclude
|
||||
@p.export_saved_queries
|
||||
@p.full_refresh
|
||||
@@ -224,9 +218,10 @@ def clean(ctx, **kwargs):
|
||||
"""Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)"""
|
||||
from dbt.task.clean import CleanTask
|
||||
|
||||
with CleanTask(ctx.obj["flags"], ctx.obj["project"]) as task:
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
task = CleanTask(ctx.obj["flags"], ctx.obj["project"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
@@ -279,7 +274,6 @@ def docs_generate(ctx, **kwargs):
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.browser
|
||||
@p.host
|
||||
@p.port
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -358,7 +352,6 @@ def compile(ctx, **kwargs):
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.inline
|
||||
@p.inline_direct
|
||||
@p.target_path
|
||||
@p.threads
|
||||
@p.vars
|
||||
@@ -367,26 +360,17 @@ def compile(ctx, **kwargs):
|
||||
@requires.profile
|
||||
@requires.project
|
||||
@requires.runtime_config
|
||||
@requires.manifest
|
||||
def show(ctx, **kwargs):
|
||||
"""Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the
|
||||
results. Does not materialize anything to the warehouse."""
|
||||
from dbt.task.show import ShowTask, ShowTaskDirect
|
||||
from dbt.task.show import ShowTask
|
||||
|
||||
if ctx.obj["flags"].inline_direct:
|
||||
# Issue the inline query directly, with no templating. Does not require
|
||||
# loading the manifest.
|
||||
register_adapter(ctx.obj["runtime_config"], get_mp_context())
|
||||
task = ShowTaskDirect(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
)
|
||||
else:
|
||||
setup_manifest(ctx)
|
||||
task = ShowTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
task = ShowTask(
|
||||
ctx.obj["flags"],
|
||||
ctx.obj["runtime_config"],
|
||||
ctx.obj["manifest"],
|
||||
)
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
@@ -452,9 +436,9 @@ def deps(ctx, **kwargs):
|
||||
message=f"Version is required in --add-package when a package when source is {flags.SOURCE}",
|
||||
option_name="--add-package",
|
||||
)
|
||||
with DepsTask(flags, ctx.obj["project"]) as task:
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
task = DepsTask(flags, ctx.obj["project"])
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
@@ -474,9 +458,10 @@ def init(ctx, **kwargs):
|
||||
"""Initialize a new dbt project."""
|
||||
from dbt.task.init import InitTask
|
||||
|
||||
with InitTask(ctx.obj["flags"]) as task:
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
task = InitTask(ctx.obj["flags"])
|
||||
|
||||
results = task.run()
|
||||
success = task.interpret_results(results)
|
||||
return results, success
|
||||
|
||||
|
||||
@@ -553,10 +538,6 @@ def parse(ctx, **kwargs):
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.empty
|
||||
@p.event_time_start
|
||||
@p.event_time_end
|
||||
@p.sample
|
||||
@p.sample_window
|
||||
@p.select
|
||||
@p.selector
|
||||
@p.target_path
|
||||
@@ -719,7 +700,6 @@ def seed(ctx, **kwargs):
|
||||
@cli.command("snapshot")
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.empty
|
||||
@p.exclude
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@@ -802,8 +782,6 @@ cli.commands["source"].add_command(snapshot_freshness, "snapshot-freshness") #
|
||||
@click.pass_context
|
||||
@global_flags
|
||||
@p.exclude
|
||||
@p.resource_type
|
||||
@p.exclude_resource_type
|
||||
@p.profiles_dir
|
||||
@p.project_dir
|
||||
@p.select
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
from typing import Optional
|
||||
from click import Choice, ParamType
|
||||
|
||||
import pytz
|
||||
from click import Choice, Context, Parameter, ParamType
|
||||
|
||||
from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string
|
||||
from dbt.event_time.sample_window import SampleWindow
|
||||
from dbt.config.utils import exclusive_primary_alt_value_setting, parse_cli_yaml_string
|
||||
from dbt.events import ALL_EVENT_NAMES
|
||||
from dbt.exceptions import OptionNotYamlDictError, ValidationError
|
||||
from dbt_common.exceptions import DbtValidationError
|
||||
@@ -55,7 +51,12 @@ class WarnErrorOptionsType(YAML):
|
||||
def convert(self, value, param, ctx):
|
||||
# this function is being used by param in click
|
||||
include_exclude = super().convert(value, param, ctx)
|
||||
normalize_warn_error_options(include_exclude)
|
||||
exclusive_primary_alt_value_setting(
|
||||
include_exclude, "include", "error", "warn_error_options"
|
||||
)
|
||||
exclusive_primary_alt_value_setting(
|
||||
include_exclude, "exclude", "warn", "warn_error_options"
|
||||
)
|
||||
|
||||
return WarnErrorOptions(
|
||||
include=include_exclude.get("include", []),
|
||||
@@ -92,30 +93,3 @@ class ChoiceTuple(Choice):
|
||||
super().convert(value, param, ctx)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class SampleWindowType(ParamType):
|
||||
name = "SAMPLE_WINDOW"
|
||||
|
||||
def convert(
|
||||
self, value, param: Optional[Parameter], ctx: Optional[Context]
|
||||
) -> Optional[SampleWindow]:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
# Try and identify if it's a "dict" or a "str"
|
||||
if value.lstrip()[0] == "{":
|
||||
param_option_name: str = param.opts[0] if param.opts else param.name # type: ignore
|
||||
parsed_dict = parse_cli_yaml_string(value, param_option_name.strip("-"))
|
||||
sample_window = SampleWindow.from_dict(parsed_dict)
|
||||
sample_window.start = sample_window.start.replace(tzinfo=pytz.UTC)
|
||||
sample_window.end = sample_window.end.replace(tzinfo=pytz.UTC)
|
||||
return sample_window
|
||||
else:
|
||||
return SampleWindow.from_relative_string(value)
|
||||
except Exception as e:
|
||||
self.fail(e.__str__(), param, ctx)
|
||||
else:
|
||||
self.fail(f"Cannot load SAMPLE_WINDOW from type {type(value)}", param, ctx)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user