mirror of
https://github.com/dlt-hub/dlt.git
synced 2025-12-17 19:31:30 +00:00
(chore) adds hub extra (#3428)
* adds hub extra * makes hub module more user friendly when hub not installed * test and lint fixes * adds plugin version check util function * adds dlt-runtime to hub extra, minimal import tests * bumps to dlthub 0.20.0 alpha * lists pipelines with cli using the same functions as dashboard, dlt pipeline will list pipelines by default * adds configured propfiles method on context so only profiles with configs or pipelines are listed * adds list of locations that contained actual configs to provider interface * improves workspace and profile commands * test fixes * fixes tests
This commit is contained in:
2
.github/workflows/lint.yml
vendored
2
.github/workflows/lint.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
run: uv lock --check
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --group airflow --group providers --group pipeline --group sources --group sentry-sdk --group dbt --group streamlit
|
||||
run: uv sync --all-extras --no-extra hub --group airflow --group providers --group pipeline --group sources --group sentry-sdk --group dbt --group streamlit
|
||||
|
||||
- name: Run make lint
|
||||
run: |
|
||||
|
||||
11
.github/workflows/test_hub.yml
vendored
11
.github/workflows/test_hub.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
matrix:
|
||||
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13"]
|
||||
dlthub_dep: ["dlthub", "https://dlt-packages.fra1.digitaloceanspaces.com/dlthub/dlthub-0.0.0+nightly-py3-none-any.whl"]
|
||||
dlthub_dep: ["", "https://dlt-packages.fra1.digitaloceanspaces.com/dlthub/dlthub-0.0.0+nightly-py3-none-any.whl"]
|
||||
# Test all python versions on ubuntu only
|
||||
exclude:
|
||||
- os: "macos-latest"
|
||||
@@ -56,6 +56,12 @@ jobs:
|
||||
activate-environment: true
|
||||
enable-cache: true
|
||||
|
||||
- name: Install min dependencies
|
||||
run: uv sync
|
||||
|
||||
- name: run import tests
|
||||
run: uv run pytest tests/hub/test_plugin_import.py
|
||||
|
||||
# NOTE: needed for mssql source tests in plus
|
||||
- name: Install ODBC driver for SQL Server
|
||||
run: |
|
||||
@@ -78,9 +84,10 @@ jobs:
|
||||
# odbcinst -q -d || true
|
||||
|
||||
- name: Install all dependencies
|
||||
run: make dev
|
||||
run: make dev-hub
|
||||
|
||||
- name: Install dlthub
|
||||
if: matrix.dlthub_dep != ''
|
||||
run: uv run pip install --upgrade --force-reinstall --pre --no-cache-dir ${{ matrix.dlthub_dep }}
|
||||
|
||||
- name: Run tests
|
||||
|
||||
7
Makefile
7
Makefile
@@ -44,10 +44,13 @@ has-uv:
|
||||
uv --version
|
||||
|
||||
dev: has-uv
|
||||
uv sync --all-extras --group dev --group providers --group pipeline --group sources --group sentry-sdk --group ibis --group adbc --group dashboard-tests
|
||||
uv sync --all-extras --no-extra hub --group dev --group providers --group pipeline --group sources --group sentry-sdk --group ibis --group adbc --group dashboard-tests
|
||||
|
||||
dev-airflow: has-uv
|
||||
uv sync --all-extras --group providers --group pipeline --group sources --group sentry-sdk --group ibis --group airflow
|
||||
uv sync --all-extras --no-extra hub --group providers --group pipeline --group sources --group sentry-sdk --group ibis --group airflow
|
||||
|
||||
dev-hub: has-uv
|
||||
uv sync --all-extras --group dev --group providers --group pipeline --group sources --group sentry-sdk --group ibis --group adbc --group dashboard-tests
|
||||
|
||||
lint: lint-core lint-security lint-docstrings
|
||||
|
||||
|
||||
@@ -83,12 +83,7 @@ class WorkspaceRunContext(ProfilesRunContext):
|
||||
return os.environ.get(known_env.DLT_DATA_DIR, self._data_dir)
|
||||
|
||||
def initial_providers(self) -> List[ConfigProvider]:
|
||||
providers = [
|
||||
EnvironProvider(),
|
||||
ProfileSecretsTomlProvider(self.settings_dir, self.profile, self.global_dir),
|
||||
ProfileConfigTomlProvider(self.settings_dir, self.profile, self.global_dir),
|
||||
]
|
||||
return providers
|
||||
return self._initial_providers(self.profile)
|
||||
|
||||
def initialize_runtime(self, runtime_config: RuntimeConfiguration = None) -> None:
|
||||
if runtime_config is not None:
|
||||
@@ -127,11 +122,8 @@ class WorkspaceRunContext(ProfilesRunContext):
|
||||
if self._config.settings.name:
|
||||
self._name = self._config.settings.name
|
||||
|
||||
self._data_dir = _to_run_dir(self._config.settings.working_dir) or default_working_dir(
|
||||
self.settings_dir,
|
||||
self.name,
|
||||
self.profile,
|
||||
DEFAULT_WORKSPACE_WORKING_FOLDER,
|
||||
self._data_dir = (
|
||||
_to_run_dir(self._config.settings.working_dir) or self._make_default_working_dir()
|
||||
)
|
||||
self._local_dir = _to_run_dir(self._config.settings.local_dir) or default_working_dir(
|
||||
self.run_dir,
|
||||
@@ -192,6 +184,85 @@ class WorkspaceRunContext(ProfilesRunContext):
|
||||
profiles.append(pinned_profile)
|
||||
return profiles
|
||||
|
||||
def configured_profiles(self) -> List[str]:
|
||||
"""Returns profiles that have configuration or pipelines.
|
||||
|
||||
A profile is considered configured if:
|
||||
- It is the current profile
|
||||
- It is the pinned profile
|
||||
- It has any toml configuration files (config.toml or secrets.toml with profile prefix)
|
||||
- It has pipelines in its working directory
|
||||
|
||||
NOTE: calling this function is relatively expensive as it probes all available profiles
|
||||
"""
|
||||
configured: set[str] = set()
|
||||
|
||||
# current profile is always configured
|
||||
configured.add(self.profile)
|
||||
|
||||
# pinned profile is always configured
|
||||
if pinned := read_profile_pin(self):
|
||||
configured.add(pinned)
|
||||
|
||||
# probe all available profiles
|
||||
for profile_name in self.available_profiles():
|
||||
if profile_name in configured:
|
||||
continue
|
||||
|
||||
# Check if profile has any toml config files
|
||||
if self._profile_has_config(profile_name):
|
||||
configured.add(profile_name)
|
||||
continue
|
||||
|
||||
# Check if profile has any pipelines
|
||||
if self._profile_has_pipelines(profile_name):
|
||||
configured.add(profile_name)
|
||||
|
||||
return list(configured)
|
||||
|
||||
def _initial_providers(self, profile_name: str) -> List[ConfigProvider]:
|
||||
providers = [
|
||||
EnvironProvider(),
|
||||
ProfileSecretsTomlProvider(self.settings_dir, profile_name, self.global_dir),
|
||||
ProfileConfigTomlProvider(self.settings_dir, profile_name, self.global_dir),
|
||||
]
|
||||
return providers
|
||||
|
||||
def _make_default_working_dir(self, profile_name: str = None) -> str:
|
||||
return default_working_dir(
|
||||
self.settings_dir,
|
||||
self.name,
|
||||
profile_name or self.profile,
|
||||
DEFAULT_WORKSPACE_WORKING_FOLDER,
|
||||
)
|
||||
|
||||
def _has_default_working_dir(self) -> bool:
|
||||
"""Checks if current working dir has default layout that includes profiles"""
|
||||
return self._data_dir == self._make_default_working_dir()
|
||||
|
||||
def _profile_has_config(self, profile_name: str) -> bool:
|
||||
"""Check if a profile has any configuration files."""
|
||||
# check if any profile-specific files were found
|
||||
for provider in self._initial_providers(profile_name):
|
||||
for location in provider.present_locations:
|
||||
# check if it's a profile-specific file (starts with profile name)
|
||||
if os.path.basename(location).startswith(f"{profile_name}."):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _profile_has_pipelines(self, profile_name: str) -> bool:
|
||||
"""Check if a profile has any pipelines in its data directory."""
|
||||
# non default layouts can be probed
|
||||
if not self._has_default_working_dir():
|
||||
return False
|
||||
|
||||
working_dir = self._make_default_working_dir(profile_name)
|
||||
pipelines_dir = os.path.join(working_dir, "pipelines")
|
||||
try:
|
||||
return os.path.isdir(pipelines_dir) and bool(os.listdir(pipelines_dir))
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def switch_profile(self, new_profile: str) -> "WorkspaceRunContext":
|
||||
return switch_context(self.run_dir, new_profile, required="WorkspaceRunContext")
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
import yaml
|
||||
from typing import Any, Sequence, Tuple
|
||||
from typing import Any, Dict, List, Sequence, Tuple, cast
|
||||
from inspect import signature
|
||||
import dlt
|
||||
|
||||
from dlt.common.json import json
|
||||
from dlt.common.pendulum import pendulum
|
||||
from dlt.common.pipeline import get_dlt_pipelines_dir, TSourceState
|
||||
from dlt.common.destination.reference import TDestinationReferenceArg
|
||||
from dlt.common.runners import Venv
|
||||
@@ -29,6 +31,41 @@ DLT_PIPELINE_COMMAND_DOCS_URL = (
|
||||
)
|
||||
|
||||
|
||||
def list_pipelines(pipelines_dir: str = None, verbosity: int = 1) -> None:
|
||||
"""List all pipelines in the given directory, sorted by last run time.
|
||||
|
||||
Args:
|
||||
pipelines_dir: Directory containing pipeline folders. If None, uses the default
|
||||
dlt pipelines directory.
|
||||
verbosity: Controls output detail level:
|
||||
- 0: Only show count summary
|
||||
- 1+: Show full list with last run times
|
||||
"""
|
||||
pipelines_dir, pipelines = utils.list_local_pipelines(pipelines_dir)
|
||||
|
||||
if len(pipelines) > 0:
|
||||
if verbosity == 0:
|
||||
fmt.echo(
|
||||
"%s pipelines found in %s. Use %s to see the full list."
|
||||
% (len(pipelines), fmt.bold(pipelines_dir), fmt.bold("-v"))
|
||||
)
|
||||
return
|
||||
else:
|
||||
fmt.echo("%s pipelines found in %s" % (len(pipelines), fmt.bold(pipelines_dir)))
|
||||
else:
|
||||
fmt.echo("No pipelines found in %s" % fmt.bold(pipelines_dir))
|
||||
return
|
||||
|
||||
# pipelines are already sorted by timestamp (newest first) from get_local_pipelines
|
||||
for pipeline_info in pipelines:
|
||||
name = pipeline_info["name"]
|
||||
timestamp = pipeline_info["timestamp"]
|
||||
time_str = utils.date_from_timestamp_with_ago(timestamp)
|
||||
fmt.echo(
|
||||
"%s %s" % (fmt.style(name, fg="green"), fmt.style(f"(last run: {time_str})", fg="cyan"))
|
||||
)
|
||||
|
||||
|
||||
def pipeline_command(
|
||||
operation: str,
|
||||
pipeline_name: str,
|
||||
@@ -39,19 +76,7 @@ def pipeline_command(
|
||||
**command_kwargs: Any,
|
||||
) -> None:
|
||||
if operation == "list":
|
||||
pipelines_dir = pipelines_dir or get_dlt_pipelines_dir()
|
||||
storage = FileStorage(pipelines_dir)
|
||||
dirs = []
|
||||
try:
|
||||
dirs = storage.list_folder_dirs(".", to_root=False)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
if len(dirs) > 0:
|
||||
fmt.echo("%s pipelines found in %s" % (len(dirs), fmt.bold(pipelines_dir)))
|
||||
else:
|
||||
fmt.echo("No pipelines found in %s" % fmt.bold(pipelines_dir))
|
||||
for _dir in dirs:
|
||||
fmt.secho(_dir, fg="green")
|
||||
list_pipelines(pipelines_dir)
|
||||
return
|
||||
|
||||
# we may open the dashboard for a pipeline without checking if it exists
|
||||
|
||||
@@ -20,8 +20,19 @@ def print_profile_info(workspace_run_context: WorkspaceRunContext) -> None:
|
||||
@utils.track_command("profile", track_before=False, operation="list")
|
||||
def list_profiles(workspace_run_context: WorkspaceRunContext) -> None:
|
||||
fmt.echo("Available profiles:")
|
||||
current_profile = workspace_run_context.profile
|
||||
configured_profiles = workspace_run_context.configured_profiles()
|
||||
for profile in workspace_run_context.available_profiles():
|
||||
desc = BUILT_IN_PROFILES.get(profile, "Pinned custom profile")
|
||||
markers = []
|
||||
if profile == current_profile:
|
||||
markers.append(fmt.bold("(current)"))
|
||||
if profile in configured_profiles:
|
||||
markers.append(fmt.bold("(configured)"))
|
||||
marker_str = " ".join(markers)
|
||||
if marker_str:
|
||||
fmt.echo("* %s %s - %s" % (fmt.bold(profile), marker_str, desc))
|
||||
else:
|
||||
fmt.echo("* %s - %s" % (fmt.bold(profile), desc))
|
||||
|
||||
|
||||
@@ -39,7 +50,4 @@ def pin_profile(workspace_run_context: WorkspaceRunContext, profile_name: str) -
|
||||
fmt.echo("No pinned profile.")
|
||||
else:
|
||||
fmt.echo("Will pin the profile %s to current Workspace." % fmt.bold(profile_name))
|
||||
if not fmt.confirm("Do you want to proceed?", default=True):
|
||||
# TODO: raise exception that will exit with all required cleanups
|
||||
exit(0)
|
||||
save_profile_pin(workspace_run_context, profile_name)
|
||||
|
||||
@@ -9,11 +9,12 @@ from dlt.common.configuration.specs.pluggable_run_context import (
|
||||
from dlt._workspace.cli import echo as fmt, utils
|
||||
from dlt._workspace._workspace_context import WorkspaceRunContext
|
||||
from dlt._workspace.cli.utils import check_delete_local_data, delete_local_data
|
||||
from dlt._workspace.cli._pipeline_command import list_pipelines
|
||||
from dlt._workspace.profile import read_profile_pin
|
||||
|
||||
|
||||
@utils.track_command("workspace", track_before=False, operation="info")
|
||||
def print_workspace_info(run_context: WorkspaceRunContext) -> None:
|
||||
def print_workspace_info(run_context: WorkspaceRunContext, verbosity: int = 0) -> None:
|
||||
fmt.echo("Workspace %s:" % fmt.bold(run_context.name))
|
||||
fmt.echo("Workspace dir: %s" % fmt.bold(run_context.run_dir))
|
||||
fmt.echo("Settings dir: %s" % fmt.bold(run_context.settings_dir))
|
||||
@@ -24,16 +25,41 @@ def print_workspace_info(run_context: WorkspaceRunContext) -> None:
|
||||
fmt.echo(" Locally loaded data: %s" % fmt.bold(run_context.local_dir))
|
||||
if run_context.profile == read_profile_pin(run_context):
|
||||
fmt.echo(" Profile is %s" % fmt.bold("pinned"))
|
||||
configured_profiles = run_context.configured_profiles()
|
||||
if configured_profiles:
|
||||
fmt.echo(
|
||||
"Profiles with configs or pipelines: %s" % fmt.bold(", ".join(configured_profiles))
|
||||
)
|
||||
|
||||
# provider info
|
||||
providers_context = Container()[PluggableRunContext].providers
|
||||
fmt.echo()
|
||||
fmt.echo("dlt reads configuration from following locations:")
|
||||
fmt.echo("dlt found configuration in following locations:")
|
||||
total_not_found_count = 0
|
||||
for provider in providers_context.providers:
|
||||
fmt.echo("* %s" % fmt.bold(provider.name))
|
||||
for location in provider.locations:
|
||||
for location in provider.present_locations:
|
||||
fmt.echo(" %s" % location)
|
||||
if provider.is_empty:
|
||||
fmt.echo(" provider is empty")
|
||||
# check for locations that were not found
|
||||
not_found_locations = set(provider.locations).difference(provider.present_locations)
|
||||
if not_found_locations:
|
||||
if verbosity > 0:
|
||||
# display details of not found locations
|
||||
for location in not_found_locations:
|
||||
fmt.echo(" %s (not found)" % fmt.style(location, fg="yellow"))
|
||||
else:
|
||||
total_not_found_count += len(not_found_locations)
|
||||
# at verbosity 0, show summary of not found locations
|
||||
if verbosity == 0 and total_not_found_count > 0:
|
||||
fmt.echo(
|
||||
"%s location(s) were probed but not found. Use %s to see details."
|
||||
% (fmt.bold(str(total_not_found_count)), fmt.bold("-v"))
|
||||
)
|
||||
# list pipelines in the workspace
|
||||
fmt.echo()
|
||||
list_pipelines(run_context.get_data_entity("pipelines"), verbosity)
|
||||
|
||||
|
||||
@utils.track_command("workspace", track_before=False, operation="clean")
|
||||
|
||||
@@ -431,8 +431,13 @@ list of all tables and columns created at the destination during the loading of
|
||||
def execute(self, args: argparse.Namespace) -> None:
|
||||
from dlt._workspace.cli._pipeline_command import pipeline_command_wrapper
|
||||
|
||||
if args.list_pipelines:
|
||||
pipeline_command_wrapper("list", "-", args.pipelines_dir, args.verbosity)
|
||||
if (
|
||||
args.list_pipelines
|
||||
or args.operation == "list"
|
||||
or (not args.pipeline_name and not args.operation)
|
||||
):
|
||||
# Always use max verbosity (1) for dlt pipeline list - show full details
|
||||
pipeline_command_wrapper("list", "-", args.pipelines_dir, 1)
|
||||
else:
|
||||
command_kwargs = dict(args._get_kwargs())
|
||||
if not command_kwargs.get("pipeline_name"):
|
||||
@@ -785,6 +790,15 @@ workspace info.
|
||||
def configure_parser(self, parser: argparse.ArgumentParser) -> None:
|
||||
self.parser = parser
|
||||
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="count",
|
||||
default=0,
|
||||
help="Provides more information for certain commands.",
|
||||
dest="verbosity",
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(
|
||||
title="Available subcommands", dest="workspace_command", required=False
|
||||
)
|
||||
@@ -844,7 +858,7 @@ workspace info.
|
||||
workspace_context = active()
|
||||
|
||||
if args.workspace_command == "info" or not args.workspace_command:
|
||||
print_workspace_info(workspace_context)
|
||||
print_workspace_info(workspace_context, args.verbosity)
|
||||
elif args.workspace_command == "clean":
|
||||
clean_workspace(workspace_context, args)
|
||||
elif args.workspace_command == "show":
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import ast
|
||||
import os
|
||||
import shutil
|
||||
from typing import Any, Callable, List
|
||||
from typing import Any, Callable, Dict, List, Tuple, cast
|
||||
|
||||
import dlt
|
||||
from dlt.common.typing import TFun
|
||||
from dlt.common.pipeline import get_dlt_pipelines_dir
|
||||
from dlt.common.time import ensure_pendulum_datetime_non_utc
|
||||
from dlt.common.typing import TAnyDateTime, TFun
|
||||
from dlt.common.configuration.resolve import resolve_configuration
|
||||
from dlt.common.configuration.specs.pluggable_run_context import (
|
||||
RunContextBase,
|
||||
@@ -19,6 +21,7 @@ from dlt.common.storages.file_storage import FileStorage
|
||||
from dlt._workspace.cli.exceptions import CliCommandException, CliCommandInnerException
|
||||
from dlt._workspace.cli import echo as fmt
|
||||
|
||||
from dlt.pipeline.trace import get_trace_file_path
|
||||
from dlt.reflection.script_visitor import PipelineScriptVisitor
|
||||
|
||||
REQUIREMENTS_TXT = "requirements.txt"
|
||||
@@ -27,6 +30,64 @@ GITHUB_WORKFLOWS_DIR = os.path.join(".github", "workflows")
|
||||
AIRFLOW_DAGS_FOLDER = os.path.join("dags")
|
||||
AIRFLOW_BUILD_FOLDER = os.path.join("build")
|
||||
MODULE_INIT = "__init__.py"
|
||||
DATETIME_FORMAT = "YYYY-MM-DD HH:mm:ss"
|
||||
|
||||
|
||||
def get_pipeline_trace_mtime(pipelines_dir: str, pipeline_name: str) -> float:
|
||||
"""Get mtime of the trace saved by pipeline, which approximates run time"""
|
||||
trace_file = get_trace_file_path(pipelines_dir, pipeline_name)
|
||||
if os.path.isfile(trace_file):
|
||||
return os.path.getmtime(trace_file)
|
||||
return 0
|
||||
|
||||
|
||||
def list_local_pipelines(
|
||||
pipelines_dir: str = None, sort_by_trace: bool = True, additional_pipelines: List[str] = None
|
||||
) -> Tuple[str, List[Dict[str, Any]]]:
|
||||
"""Get the local pipelines directory and the list of pipeline names in it.
|
||||
|
||||
Args:
|
||||
pipelines_dir (str, optional): The local pipelines directory. Defaults to get_dlt_pipelines_dir().
|
||||
sort_by_trace (bool, optional): Whether to sort the pipelines by the latest timestamp of trace. Defaults to True.
|
||||
Returns:
|
||||
Tuple[str, List[str]]: The local pipelines directory and the list of pipeline names in it.
|
||||
"""
|
||||
pipelines_dir = pipelines_dir or get_dlt_pipelines_dir()
|
||||
storage = FileStorage(pipelines_dir)
|
||||
|
||||
try:
|
||||
pipelines = storage.list_folder_dirs(".", to_root=False)
|
||||
except Exception:
|
||||
pipelines = []
|
||||
|
||||
if additional_pipelines:
|
||||
for pipeline in additional_pipelines:
|
||||
if pipeline and pipeline not in pipelines:
|
||||
pipelines.append(pipeline)
|
||||
|
||||
# check last trace timestamp and create dict
|
||||
pipelines_with_timestamps = []
|
||||
for pipeline in pipelines:
|
||||
pipelines_with_timestamps.append(
|
||||
{"name": pipeline, "timestamp": get_pipeline_trace_mtime(pipelines_dir, pipeline)}
|
||||
)
|
||||
|
||||
if sort_by_trace:
|
||||
pipelines_with_timestamps.sort(key=lambda x: cast(float, x["timestamp"]), reverse=True)
|
||||
|
||||
return pipelines_dir, pipelines_with_timestamps
|
||||
|
||||
|
||||
def date_from_timestamp_with_ago(
|
||||
timestamp: TAnyDateTime, datetime_format: str = DATETIME_FORMAT
|
||||
) -> str:
|
||||
"""Return a date with ago section"""
|
||||
if not timestamp or timestamp == 0:
|
||||
return "never"
|
||||
timestamp = ensure_pendulum_datetime_non_utc(timestamp)
|
||||
time_formatted = timestamp.format(datetime_format)
|
||||
ago = timestamp.diff_for_humans()
|
||||
return f"{ago} ({time_formatted})"
|
||||
|
||||
|
||||
def display_run_context_info() -> None:
|
||||
|
||||
@@ -493,7 +493,7 @@ def section_browse_data_table_list(
|
||||
|
||||
# we only show resource state if the table has resource set, child tables do not have a resource set
|
||||
_resource_name, _source_state, _resource_state = (
|
||||
utils.get_source_and_resouce_state_for_table(
|
||||
utils.get_source_and_resource_state_for_table(
|
||||
_schema_table, dlt_pipeline, dlt_selected_schema_name
|
||||
)
|
||||
)
|
||||
@@ -998,6 +998,7 @@ def utils_discover_pipelines(
|
||||
"""
|
||||
Discovers local pipelines and returns a multiselect widget to select one of the pipelines
|
||||
"""
|
||||
from dlt._workspace.cli.utils import list_local_pipelines
|
||||
|
||||
# sync from runtime if enabled
|
||||
_tmp_config = utils.resolve_dashboard_config(None)
|
||||
@@ -1017,7 +1018,7 @@ def utils_discover_pipelines(
|
||||
# discover pipelines and build selector
|
||||
dlt_pipelines_dir: str = ""
|
||||
dlt_all_pipelines: List[Dict[str, Any]] = []
|
||||
dlt_pipelines_dir, dlt_all_pipelines = utils.get_local_pipelines(
|
||||
dlt_pipelines_dir, dlt_all_pipelines = list_local_pipelines(
|
||||
mo_cli_arg_pipelines_dir,
|
||||
additional_pipelines=[mo_cli_arg_pipeline, mo_query_var_pipeline_name],
|
||||
)
|
||||
@@ -1047,7 +1048,7 @@ def utils_discover_profiles(mo_query_var_profile: str, mo_cli_arg_profile: str):
|
||||
selected_profile = None
|
||||
|
||||
if isinstance(run_context, ProfilesRunContext):
|
||||
options = run_context.available_profiles() or []
|
||||
options = run_context.configured_profiles() or []
|
||||
current = run_context.profile if options and run_context.profile in options else None
|
||||
|
||||
selected_profile = current
|
||||
@@ -1201,15 +1202,15 @@ def watch_changes(
|
||||
"""
|
||||
Watch changes in the trace file and trigger reload in the home cell and all following cells on change
|
||||
"""
|
||||
from dlt.pipeline.trace import get_trace_file_path
|
||||
|
||||
# provide pipeline object to the following cells
|
||||
dlt_pipeline_name: str = (
|
||||
str(dlt_pipeline_select.value[0]) if dlt_pipeline_select.value else None
|
||||
)
|
||||
dlt_file_watcher = None
|
||||
if dlt_pipeline_name:
|
||||
dlt_file_watcher = mo.watch.file(
|
||||
utils.get_trace_file_path(dlt_pipeline_name, dlt_pipelines_dir)
|
||||
)
|
||||
dlt_file_watcher = mo.watch.file(get_trace_file_path(dlt_pipelines_dir, dlt_pipeline_name))
|
||||
return dlt_pipeline_name, dlt_file_watcher
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ try:
|
||||
except ModuleNotFoundError:
|
||||
raise MissingDependencyException(
|
||||
"Workspace Dashboard",
|
||||
['dlt["workspace"]'],
|
||||
["dlt[workspace]"],
|
||||
"to install the dlt workspace extra.",
|
||||
)
|
||||
|
||||
|
||||
@@ -26,55 +26,37 @@ import dlt
|
||||
import marimo as mo
|
||||
import pyarrow
|
||||
import traceback
|
||||
import datetime # noqa: I251
|
||||
|
||||
from dlt.common.configuration import resolve_configuration
|
||||
from dlt.common.configuration.specs import known_sections
|
||||
from dlt.common.destination.client import WithStateSync
|
||||
from dlt.common.json import json
|
||||
from dlt.common.pendulum import pendulum
|
||||
from dlt.common.pipeline import get_dlt_pipelines_dir, LoadInfo
|
||||
from dlt.common.pipeline import LoadInfo
|
||||
from dlt.common.schema import Schema
|
||||
from dlt.common.schema.typing import TTableSchema
|
||||
from dlt.common.storages import FileStorage, LoadPackageInfo
|
||||
from dlt.common.storages import LoadPackageInfo
|
||||
from dlt.common.storages.load_package import PackageStorage, TLoadPackageStatus
|
||||
from dlt.common.destination.client import DestinationClientConfiguration
|
||||
from dlt.common.destination.exceptions import SqlClientNotAvailable
|
||||
from dlt.common.storages.configuration import WithLocalFiles
|
||||
from dlt.common.configuration.exceptions import ConfigFieldMissingException
|
||||
from dlt.common.typing import DictStrAny, TypedDict
|
||||
from dlt.common.typing import DictStrAny
|
||||
from dlt.common.utils import map_nested_keys_in_place
|
||||
from dlt.common.pipeline import get_dlt_pipelines_dir
|
||||
|
||||
from dlt._workspace.helpers.dashboard import ui_elements as ui
|
||||
from dlt._workspace.helpers.dashboard.config import DashboardConfiguration
|
||||
from dlt._workspace.cli import utils as cli_utils
|
||||
from dlt.destinations.exceptions import DatabaseUndefinedRelation, DestinationUndefinedEntity
|
||||
from dlt.pipeline.exceptions import PipelineConfigMissing
|
||||
from dlt.pipeline.exceptions import CannotRestorePipelineException
|
||||
from dlt.pipeline.trace import PipelineTrace, PipelineStepTrace
|
||||
|
||||
PICKLE_TRACE_FILE = "trace.pickle"
|
||||
|
||||
|
||||
#
|
||||
# App helpers
|
||||
#
|
||||
|
||||
|
||||
def _exception_to_string(exception: Exception) -> str:
|
||||
"""Convert an exception to a string"""
|
||||
if isinstance(exception, (PipelineConfigMissing, ConfigFieldMissingException)):
|
||||
return "Could not connect to destination, configuration values are missing."
|
||||
elif isinstance(exception, (SqlClientNotAvailable)):
|
||||
return "The destination of this pipeline does not support querying data with sql."
|
||||
elif isinstance(exception, (DestinationUndefinedEntity, DatabaseUndefinedRelation)):
|
||||
return (
|
||||
"Could connect to destination, but the required table or dataset does not exist in the"
|
||||
" destination."
|
||||
)
|
||||
return str(exception)
|
||||
|
||||
|
||||
def get_dashboard_config_sections(p: Optional[dlt.Pipeline]) -> Tuple[str, ...]:
|
||||
"""Find dashboard config section layout for a particular pipeline or for active
|
||||
run context type.
|
||||
@@ -103,55 +85,6 @@ def resolve_dashboard_config(p: Optional[dlt.Pipeline]) -> DashboardConfiguratio
|
||||
)
|
||||
|
||||
|
||||
def get_trace_file_path(pipeline_name: str, pipelines_dir: str) -> Path:
|
||||
"""Get the path to the pickle file for a pipeline"""
|
||||
return Path(pipelines_dir) / pipeline_name / PICKLE_TRACE_FILE
|
||||
|
||||
|
||||
def get_pipeline_last_run(pipeline_name: str, pipelines_dir: str) -> float:
|
||||
"""Get the last run of a pipeline"""
|
||||
trace_file = get_trace_file_path(pipeline_name, pipelines_dir)
|
||||
if trace_file.exists():
|
||||
return os.path.getmtime(trace_file)
|
||||
return 0
|
||||
|
||||
|
||||
def get_local_pipelines(
|
||||
pipelines_dir: str = None, sort_by_trace: bool = True, additional_pipelines: List[str] = None
|
||||
) -> Tuple[str, List[Dict[str, Any]]]:
|
||||
"""Get the local pipelines directory and the list of pipeline names in it.
|
||||
|
||||
Args:
|
||||
pipelines_dir (str, optional): The local pipelines directory. Defaults to get_dlt_pipelines_dir().
|
||||
sort_by_trace (bool, optional): Whether to sort the pipelines by the latet timestamp of trace. Defaults to True.
|
||||
Returns:
|
||||
Tuple[str, List[str]]: The local pipelines directory and the list of pipeline names in it.
|
||||
"""
|
||||
pipelines_dir = pipelines_dir or get_dlt_pipelines_dir()
|
||||
storage = FileStorage(pipelines_dir)
|
||||
|
||||
try:
|
||||
pipelines = storage.list_folder_dirs(".", to_root=False)
|
||||
except Exception:
|
||||
pipelines = []
|
||||
|
||||
if additional_pipelines:
|
||||
for pipeline in additional_pipelines:
|
||||
if pipeline and pipeline not in pipelines:
|
||||
pipelines.append(pipeline)
|
||||
|
||||
# check last trace timestamp and create dict
|
||||
pipelines_with_timestamps = []
|
||||
for pipeline in pipelines:
|
||||
pipelines_with_timestamps.append(
|
||||
{"name": pipeline, "timestamp": get_pipeline_last_run(pipeline, pipelines_dir)}
|
||||
)
|
||||
|
||||
pipelines_with_timestamps.sort(key=lambda x: cast(float, x["timestamp"]), reverse=True)
|
||||
|
||||
return pipelines_dir, pipelines_with_timestamps
|
||||
|
||||
|
||||
def get_pipeline(pipeline_name: str, pipelines_dir: str) -> dlt.Pipeline:
|
||||
"""Get a pipeline by name. Attach exceptions must be handled by the caller
|
||||
|
||||
@@ -220,7 +153,7 @@ def pipeline_details(
|
||||
|
||||
last_executed = "No trace found"
|
||||
if trace and hasattr(trace, "started_at"):
|
||||
last_executed = _date_from_timestamp_with_ago(c, trace.started_at)
|
||||
last_executed = cli_utils.date_from_timestamp_with_ago(trace.started_at, c.datetime_format)
|
||||
|
||||
details_dict = {
|
||||
"pipeline_name": pipeline.pipeline_name,
|
||||
@@ -362,7 +295,7 @@ def create_column_list(
|
||||
return _align_dict_keys(column_list)
|
||||
|
||||
|
||||
def get_source_and_resouce_state_for_table(
|
||||
def get_source_and_resource_state_for_table(
|
||||
table: TTableSchema, pipeline: dlt.Pipeline, schema_name: str
|
||||
) -> Tuple[str, DictStrAny, DictStrAny]:
|
||||
if "resource" not in table:
|
||||
@@ -674,7 +607,11 @@ def build_pipeline_link_list(
|
||||
link_list: str = ""
|
||||
for _p in pipelines:
|
||||
link = f"* [{_p['name']}](?pipeline={_p['name']})"
|
||||
link = link + " - last executed: " + _date_from_timestamp_with_ago(config, _p["timestamp"])
|
||||
link = (
|
||||
link
|
||||
+ " - last executed: "
|
||||
+ cli_utils.date_from_timestamp_with_ago(_p["timestamp"], config.datetime_format)
|
||||
)
|
||||
|
||||
link_list += f"{link}\n"
|
||||
count += 1
|
||||
@@ -750,19 +687,18 @@ def build_exception_section(p: dlt.Pipeline) -> List[Any]:
|
||||
#
|
||||
|
||||
|
||||
def _date_from_timestamp_with_ago(
|
||||
config: DashboardConfiguration, timestamp: Union[int, float, datetime.datetime]
|
||||
) -> str:
|
||||
"""Return a date with ago section"""
|
||||
if not timestamp or timestamp == 0:
|
||||
return "never"
|
||||
if isinstance(timestamp, datetime.datetime):
|
||||
p_ts = pendulum.instance(timestamp)
|
||||
else:
|
||||
p_ts = pendulum.from_timestamp(timestamp)
|
||||
time_formatted = p_ts.format(config.datetime_format)
|
||||
ago = p_ts.diff_for_humans()
|
||||
return f"{ago} ({time_formatted})"
|
||||
def _exception_to_string(exception: Exception) -> str:
|
||||
"""Convert an exception to a string"""
|
||||
if isinstance(exception, (PipelineConfigMissing, ConfigFieldMissingException)):
|
||||
return "Could not connect to destination, configuration values are missing."
|
||||
elif isinstance(exception, (SqlClientNotAvailable)):
|
||||
return "The destination of this pipeline does not support querying data with sql."
|
||||
elif isinstance(exception, (DestinationUndefinedEntity, DatabaseUndefinedRelation)):
|
||||
return (
|
||||
"Could connect to destination, but the required table or dataset does not exist in the"
|
||||
" destination."
|
||||
)
|
||||
return str(exception)
|
||||
|
||||
|
||||
def _without_none_or_empty_string(d: Mapping[Any, Any]) -> Mapping[Any, Any]:
|
||||
|
||||
@@ -91,6 +91,7 @@ def _write_to_bucket(
|
||||
) -> None:
|
||||
# write to bucket using the config, same object may be written to multiple paths
|
||||
|
||||
logger.info(f"Will send run artifact to {bucket_url}: {paths}")
|
||||
for path in paths:
|
||||
with fs.open(f"{bucket_url}/{pipeline_name}/{path}", mode=mode) as f:
|
||||
f.write(data)
|
||||
|
||||
@@ -126,10 +126,23 @@ class ConfigFieldMissingException(KeyError, ConfigurationException):
|
||||
# print locations for config providers
|
||||
providers = Container()[PluggableRunContext].providers
|
||||
for provider in providers.providers:
|
||||
if provider.locations:
|
||||
locations = "\n".join([f"\t- {os.path.abspath(loc)}" for loc in provider.locations])
|
||||
if provider.present_locations:
|
||||
locations = "\n".join(
|
||||
[f"\t- {os.path.abspath(loc)}" for loc in provider.present_locations]
|
||||
)
|
||||
msg += f"Provider `{provider.name}` loaded values from locations:\n{locations}\n"
|
||||
|
||||
# inform on locations that were not found
|
||||
not_found_locations = set(provider.locations).difference(provider.present_locations)
|
||||
if not_found_locations:
|
||||
locations = "\n".join(
|
||||
[f"\t- {os.path.abspath(loc)}" for loc in not_found_locations]
|
||||
)
|
||||
msg += (
|
||||
f"Provider `{provider.name}` probed but not found the following"
|
||||
f" locations:\n{locations}\n"
|
||||
)
|
||||
|
||||
if provider.is_empty:
|
||||
msg += (
|
||||
f"WARNING: provider `{provider.name}` is empty. Locations (i.e., files) are"
|
||||
|
||||
@@ -51,9 +51,14 @@ class ConfigProvider(abc.ABC):
|
||||
|
||||
@property
|
||||
def locations(self) -> Sequence[str]:
|
||||
"""Returns a list of locations where secrets are stored, human readable"""
|
||||
"""Returns a all possible locations where secrets may be stored, human readable"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def present_locations(self) -> Sequence[str]:
|
||||
"""Returns a list of locations that were present and contained secrets, human readable"""
|
||||
return self.locations
|
||||
|
||||
def __repr__(self) -> str:
|
||||
kwargs = {
|
||||
"is_empty": self.is_empty,
|
||||
|
||||
@@ -74,7 +74,8 @@ class SettingsTomlProvider(CustomLoaderDocProvider):
|
||||
self._toml_paths = self._resolve_toml_paths(
|
||||
file_name, [d for d in resolvable_dirs if d is not None]
|
||||
)
|
||||
|
||||
# read toml files and set present locations
|
||||
self._present_locations: List[str] = []
|
||||
self._config_toml = self._read_toml_files(name, file_name, self._toml_paths)
|
||||
|
||||
super().__init__(
|
||||
@@ -115,6 +116,10 @@ class SettingsTomlProvider(CustomLoaderDocProvider):
|
||||
def is_empty(self) -> bool:
|
||||
return len(self._config_toml.body) == 0 and super().is_empty
|
||||
|
||||
@property
|
||||
def present_locations(self) -> List[str]:
|
||||
return self._present_locations
|
||||
|
||||
def set_fragment(
|
||||
self, key: Optional[str], value_or_fragment: str, pipeline_name: str, *sections: str
|
||||
) -> None:
|
||||
@@ -207,6 +212,8 @@ class SettingsTomlProvider(CustomLoaderDocProvider):
|
||||
result_toml = loaded_toml
|
||||
else:
|
||||
result_toml = update_dict_nested(loaded_toml, result_toml)
|
||||
# store as present location
|
||||
self._present_locations.append(path)
|
||||
|
||||
# if nothing was found, try to load from google colab or streamlit
|
||||
if result_toml is None:
|
||||
|
||||
@@ -607,8 +607,8 @@ def _emit_placeholder_warning(
|
||||
"Most likely, this comes from `init`-command, which creates basic templates for "
|
||||
f"non-complex configs and secrets. The provider to adjust is {provider.name}"
|
||||
)
|
||||
if bool(provider.locations):
|
||||
locations = "\n".join([f"\t- {os.path.abspath(loc)}" for loc in provider.locations])
|
||||
if bool(provider.present_locations):
|
||||
locations = "\n".join([f"\t- {os.path.abspath(loc)}" for loc in provider.present_locations])
|
||||
msg += f" at one of these locations:\n{locations}"
|
||||
logger.warning(msg=msg)
|
||||
|
||||
|
||||
@@ -159,6 +159,10 @@ class ProfilesRunContext(RunContextBase):
|
||||
def available_profiles(self) -> List[str]:
|
||||
"""Returns available profiles"""
|
||||
|
||||
def configured_profiles(self) -> List[str]:
|
||||
"""Returns profiles with configurations or dlt entities, same as available by default"""
|
||||
return self.available_profiles()
|
||||
|
||||
@abstractmethod
|
||||
def switch_profile(self, new_profile: str) -> Self:
|
||||
"""Switches current profile and returns new run context"""
|
||||
|
||||
@@ -6,6 +6,8 @@ from types import ModuleType
|
||||
from typing import Any, Dict, Iterator, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
from dlt.common import known_env
|
||||
from dlt.common.configuration.container import Container
|
||||
from dlt.common.configuration.providers import (
|
||||
@@ -238,6 +240,51 @@ def get_plugin_modules() -> List[str]:
|
||||
return plugin_modules
|
||||
|
||||
|
||||
def ensure_plugin_version_match(
|
||||
pkg_name: str,
|
||||
dlt_version: str,
|
||||
plugin_version: str,
|
||||
plugin_module_name: str,
|
||||
dlt_extra: str,
|
||||
) -> None:
|
||||
"""Ensures that installed dlt version matches plugin version. Plugins are tightly bound to `dlt`
|
||||
and released together. Both major and minor version must match. For alpha plugins version may be 0.
|
||||
|
||||
Args:
|
||||
pkg_name: Name of the plugin package (e.g., "dlthub")
|
||||
dlt_version: The installed dlt version string
|
||||
plugin_version: The installed plugin version string
|
||||
plugin_module_name: The module name for MissingDependencyException (e.g., "dlthub")
|
||||
dlt_extra: The dlt extra to install the plugin (e.g., "hub")
|
||||
|
||||
Raises:
|
||||
MissingDependencyException: If version mismatch is detected
|
||||
"""
|
||||
installed = Version(plugin_version)
|
||||
dlt_installed = Version(dlt_version)
|
||||
|
||||
# currently packages must match on minor version
|
||||
if installed.minor != dlt_installed.minor or (
|
||||
installed.major != dlt_installed.major and installed.major != 0
|
||||
):
|
||||
from dlt.common.exceptions import MissingDependencyException
|
||||
|
||||
custom_msg = (
|
||||
f"`{pkg_name}` is a `dlt` plugin and must be installed together with `dlt` with a "
|
||||
f"matching version. `dlt` {dlt_installed.major}.{dlt_installed.minor}.x requires "
|
||||
f"`{pkg_name}` 0.{dlt_installed.minor}.x but you have "
|
||||
f"{plugin_version}. Please install the right version of {pkg_name} with:\n\n"
|
||||
f'pip install "dlt[{dlt_extra}]=={dlt_version}"\n\n'
|
||||
"or if you are upgrading the plugin:\n\n"
|
||||
f'pip install "dlt[{dlt_extra}]=={dlt_version}" -U {pkg_name}'
|
||||
)
|
||||
missing_dep_ex = MissingDependencyException(plugin_module_name, [])
|
||||
# ImportError uses `msg` attribute for __str__, not just args
|
||||
missing_dep_ex.args = (custom_msg,)
|
||||
missing_dep_ex.msg = custom_msg
|
||||
raise missing_dep_ex
|
||||
|
||||
|
||||
def context_uri(name: str, run_dir: str, runtime_kwargs: Optional[Dict[str, Any]]) -> str:
|
||||
from dlt.common.storages.configuration import FilesystemConfiguration
|
||||
|
||||
|
||||
@@ -1,11 +1,38 @@
|
||||
"""A collection of dltHub Features"""
|
||||
from typing import Any
|
||||
|
||||
|
||||
__found__ = False
|
||||
__exception__ = None
|
||||
|
||||
|
||||
try:
|
||||
from dlthub import transformation, runner, data_quality
|
||||
from . import current
|
||||
|
||||
__found__ = True
|
||||
__all__ = ("transformation", "current", "runner", "data_quality")
|
||||
except ImportError:
|
||||
pass
|
||||
except ImportError as import_exc:
|
||||
__exception__ = import_exc
|
||||
|
||||
|
||||
def __getattr__(name: str) -> Any:
|
||||
"""Provide useful info on missing attributes"""
|
||||
|
||||
# hub was found this is just regular missing attribute
|
||||
if __found__:
|
||||
raise AttributeError(f"module 'dlt.hub' has no attribute '{name}'")
|
||||
|
||||
from dlt.common.exceptions import MissingDependencyException
|
||||
|
||||
if isinstance(__exception__, MissingDependencyException):
|
||||
# plugins will MissingDependencyException if they are not installed with a right version
|
||||
# in that case just re-raise original message
|
||||
raise __exception__
|
||||
|
||||
raise MissingDependencyException(
|
||||
"dlt.hub",
|
||||
["dlt[hub]"],
|
||||
"This will install `dlthub` plugin package in with a matching "
|
||||
f"version.\nfrom:\n({str(__exception__)})",
|
||||
)
|
||||
|
||||
@@ -347,6 +347,11 @@ def load_trace(trace_dir: str, ignore_errors: bool = True) -> PipelineTrace:
|
||||
return None
|
||||
|
||||
|
||||
def get_trace_file_path(pipelines_dir: str, pipeline_name: str) -> str:
|
||||
"""Get the path to the pickle file for a pipeline"""
|
||||
return os.path.join(pipelines_dir, pipeline_name, TRACE_FILE_NAME)
|
||||
|
||||
|
||||
def get_exception_traces(exc: BaseException, container: Container = None) -> List[ExceptionTrace]:
|
||||
"""Gets exception trace chain and extend it with data available in Container context"""
|
||||
traces = get_exception_trace_chain(exc)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dlt"
|
||||
version = "1.19.1"
|
||||
version = "1.20.0a0"
|
||||
description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run."
|
||||
authors = [{ name = "dltHub Inc.", email = "services@dlthub.com" }]
|
||||
requires-python = ">=3.9.2, <3.15"
|
||||
@@ -187,6 +187,11 @@ workspace = [
|
||||
"mcp>=1.2.1 ; python_version >= '3.10'",
|
||||
"pathspec>=0.11.2",
|
||||
]
|
||||
hub = [
|
||||
"dlthub>=0.19.0a0,<0.21 ; python_version >= '3.10'",
|
||||
"dlt-runtime>=0.19.0a0,<0.21 ; python_version >= '3.10'",
|
||||
]
|
||||
|
||||
dbml = [
|
||||
"pydbml"
|
||||
]
|
||||
@@ -309,6 +314,7 @@ dlt = "dlt.__plugins__"
|
||||
|
||||
[tool.uv.sources]
|
||||
flake8-encodings = { git = "https://github.com/dlt-hub/flake8-encodings.git", branch = "disable_jedi_support" }
|
||||
# dlthub = { path = "../dlt-plus/packages/dlthub", editable = true }
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
packages = ["dlt"]
|
||||
|
||||
@@ -286,18 +286,25 @@ def test_toml_global_config() -> None:
|
||||
# project overwrites
|
||||
v, _ = config.get_value("param1", bool, None, "api", "params")
|
||||
assert v == "a"
|
||||
# verify locations
|
||||
# verify global location
|
||||
assert os.path.join(global_dir, "config.toml") in config.locations
|
||||
assert os.path.join(global_dir, "config.toml") in config.present_locations
|
||||
# verify local location
|
||||
assert os.path.join(settings_dir, "config.toml") in config.locations
|
||||
assert os.path.join(settings_dir, "config.toml") in config.present_locations
|
||||
|
||||
secrets = SecretsTomlProvider(settings_dir=settings_dir, global_dir=global_dir)
|
||||
assert secrets._toml_paths[1] == os.path.join(global_dir, SECRETS_TOML)
|
||||
# check if values from project exist
|
||||
secrets_project = SecretsTomlProvider(settings_dir=settings_dir)
|
||||
assert secrets._config_doc == secrets_project._config_doc
|
||||
# verify locations
|
||||
# verify global location (secrets not present)
|
||||
assert os.path.join(global_dir, "secrets.toml") in secrets.locations
|
||||
assert os.path.join(global_dir, "secrets.toml") not in secrets.present_locations
|
||||
# verify local location (secrets not present)
|
||||
assert os.path.join(settings_dir, "secrets.toml") in secrets.locations
|
||||
# CI creates secrets.toml so actually those are sometimes present
|
||||
# assert os.path.join(settings_dir, "secrets.toml") not in secrets.present_locations
|
||||
|
||||
|
||||
def test_write_value(toml_providers: ConfigProvidersContainer) -> None:
|
||||
|
||||
22
tests/common/runtime/test_known_plugins.py
Normal file
22
tests/common/runtime/test_known_plugins.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Tests behavior of know plugins when they are not installed"""
|
||||
import pytest
|
||||
|
||||
from dlt.common.exceptions import MissingDependencyException
|
||||
|
||||
|
||||
def test_hub_fallback() -> None:
|
||||
import dlt.hub
|
||||
|
||||
if dlt.hub.__found__ or not isinstance(dlt.hub.__exception__, ModuleNotFoundError):
|
||||
pytest.skip(
|
||||
"Skip test due to hub being present or partially loaded: " + str(dlt.hub.__exception__)
|
||||
)
|
||||
|
||||
assert isinstance(dlt.hub.__exception__, ModuleNotFoundError)
|
||||
|
||||
# accessing attributes generates import error
|
||||
|
||||
with pytest.raises(MissingDependencyException) as missing_ex:
|
||||
dlt.hub.transformation
|
||||
|
||||
assert missing_ex.value.dependencies[0] == "dlt[hub]"
|
||||
@@ -10,10 +10,12 @@ from dlt.common.runtime.init import restore_run_context
|
||||
from dlt.common.runtime.run_context import (
|
||||
DOT_DLT,
|
||||
RunContext,
|
||||
ensure_plugin_version_match,
|
||||
get_plugin_modules,
|
||||
is_folder_writable,
|
||||
switched_run_context,
|
||||
)
|
||||
from dlt.common.exceptions import MissingDependencyException
|
||||
from dlt.common.storages.configuration import _make_file_url
|
||||
from dlt.common.utils import set_working_dir
|
||||
|
||||
@@ -168,3 +170,53 @@ def test_context_with_xdg_dir(mocker) -> None:
|
||||
ctx = PluggableRunContext()
|
||||
run_context = ctx.context
|
||||
assert run_context.global_dir == dlt_home
|
||||
|
||||
|
||||
def test_ensure_plugin_version_match_same_versions() -> None:
|
||||
"""test that matching versions pass without error."""
|
||||
# exact same version
|
||||
ensure_plugin_version_match("dlthub", "1.19.0", "1.19.0", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.5", "1.19.2", "dlthub", "hub")
|
||||
# different patch versions are ok
|
||||
ensure_plugin_version_match("dlthub", "2.5.0", "2.5.10", "dlthub", "hub")
|
||||
# alpha specifiers (e.g. 1.19.0a1)
|
||||
ensure_plugin_version_match("dlthub", "1.19.0a1", "1.19.0a2", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.0a1", "1.19.0", "dlthub", "hub")
|
||||
# dev specifiers (e.g. 1.19.0.dev1)
|
||||
ensure_plugin_version_match("dlthub", "1.19.0.dev1", "1.19.0.dev2", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.0.dev1", "1.19.0", "dlthub", "hub")
|
||||
# post release specifiers
|
||||
ensure_plugin_version_match("dlthub", "1.19.0.post1", "1.19.0.post2", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.0.post1", "1.19.0", "dlthub", "hub")
|
||||
|
||||
|
||||
def test_ensure_plugin_version_match_alpha_plugin() -> None:
|
||||
"""test that alpha plugins (major=0) match any dlt major version with same minor."""
|
||||
# alpha plugin (0.x.y) should match dlt 1.x.y with same minor
|
||||
ensure_plugin_version_match("dlthub", "1.19.0", "0.19.0", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.5", "0.19.2", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "2.19.0", "0.19.0", "dlthub", "hub")
|
||||
# alpha plugin with alpha/dev specifiers
|
||||
ensure_plugin_version_match("dlthub", "1.19.0a1", "0.19.0a2", "dlthub", "hub")
|
||||
ensure_plugin_version_match("dlthub", "1.19.0.dev1", "0.19.0.dev2", "dlthub", "hub")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"dlt_version,plugin_version",
|
||||
[
|
||||
# minor mismatch
|
||||
("1.19.0", "1.18.0"),
|
||||
("1.19.0", "0.18.0"),
|
||||
("1.19.0a1", "1.18.0a1"),
|
||||
("1.19.0.dev1", "1.18.0.dev1"),
|
||||
# major mismatch (non-alpha plugin)
|
||||
("1.19.0", "2.19.0"),
|
||||
("1.19.0a1", "2.19.0a1"),
|
||||
("1.19.0.dev1", "2.19.0.dev1"),
|
||||
],
|
||||
)
|
||||
def test_ensure_plugin_version_match_mismatch(dlt_version: str, plugin_version: str) -> None:
|
||||
"""test that mismatched versions raise MissingDependencyException."""
|
||||
with pytest.raises(MissingDependencyException) as exc_info:
|
||||
ensure_plugin_version_match("dlthub", dlt_version, plugin_version, "dlthub", "hub")
|
||||
assert "dlthub" in str(exc_info.value)
|
||||
|
||||
@@ -2,6 +2,7 @@ import pathlib
|
||||
import sys
|
||||
from typing import Any
|
||||
import pickle
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -11,6 +12,7 @@ from dlt._workspace._templates._single_file_templates.fruitshop_pipeline import
|
||||
fruitshop as fruitshop_source,
|
||||
)
|
||||
from dlt._workspace.helpers.dashboard import utils as dashboard_utils
|
||||
from dlt.pipeline.trace import get_trace_file_path
|
||||
|
||||
|
||||
def _normpath(path: str) -> str:
|
||||
@@ -94,9 +96,9 @@ def broken_trace_pipeline() -> Any:
|
||||
)
|
||||
bp.run(fruitshop_source())
|
||||
|
||||
trace_file = dashboard_utils.get_trace_file_path(bp.pipeline_name, bp.pipelines_dir)
|
||||
trace_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with trace_file.open("wb") as f:
|
||||
trace_file = get_trace_file_path(bp.pipelines_dir, bp.pipeline_name)
|
||||
os.makedirs(os.path.dirname(trace_file), exist_ok=True)
|
||||
with open(trace_file, mode="wb") as f:
|
||||
pickle.dump({"not": "a real PipelineTrace"}, f)
|
||||
|
||||
return bp
|
||||
|
||||
36
tests/hub/test_plugin_import.py
Normal file
36
tests/hub/test_plugin_import.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import pytest
|
||||
from pytest_console_scripts import ScriptRunner
|
||||
|
||||
from tests.workspace.utils import isolated_workspace
|
||||
|
||||
|
||||
def test_import_props() -> None:
|
||||
import dlt.hub
|
||||
|
||||
# hub plugin found
|
||||
assert dlt.hub.__found__
|
||||
assert len(dlt.hub.__all__) > 0
|
||||
|
||||
# no exception
|
||||
assert dlt.hub.__exception__ is None
|
||||
|
||||
# regular attribute error raised
|
||||
|
||||
with pytest.raises(AttributeError) as attr_err:
|
||||
dlt.hub._unknown_feature
|
||||
|
||||
assert "_unknown_feature" in str(attr_err.value)
|
||||
|
||||
|
||||
def test_runtime_client_imports(script_runner: ScriptRunner) -> None:
|
||||
pytest.importorskip("dlt_runtime")
|
||||
|
||||
import dlt_runtime # type: ignore[import-untyped,import-not-found,unused-ignore]
|
||||
|
||||
print(dlt_runtime.__version__)
|
||||
|
||||
# check command activation
|
||||
|
||||
with isolated_workspace("pipelines"):
|
||||
result = script_runner.run(["dlt", "runtime", "-h"])
|
||||
assert result.returncode == 0
|
||||
@@ -45,7 +45,7 @@ def test_pipeline_command_operations(repo_dir: str) -> None:
|
||||
_pipeline_command.pipeline_command("list", "-", None, 0)
|
||||
_out = buf.getvalue()
|
||||
# do we have chess pipeline in the list
|
||||
assert "chess_pipeline" in _out.splitlines()
|
||||
assert _out.splitlines()[1].startswith("chess_pipeline")
|
||||
print(_out)
|
||||
|
||||
with io.StringIO() as buf, contextlib.redirect_stdout(buf):
|
||||
|
||||
@@ -28,7 +28,13 @@ def no_destination_pipeline():
|
||||
@pytest.fixture(scope="session")
|
||||
def success_pipeline_duckdb():
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
yield create_success_pipeline_duckdb(temp_dir, ":memory:")
|
||||
import duckdb
|
||||
|
||||
db_conn = duckdb.connect()
|
||||
try:
|
||||
yield create_success_pipeline_duckdb(temp_dir, db_conn=db_conn)
|
||||
finally:
|
||||
db_conn.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# TODO: consolidate these test pipelines with the ones in tests/e2e/helpers/dashboard
|
||||
#
|
||||
|
||||
from typing import Any
|
||||
import dlt
|
||||
import pytest
|
||||
from dlt._workspace._templates._single_file_templates.fruitshop_pipeline import (
|
||||
@@ -13,8 +14,6 @@ from dlt.common.destination.exceptions import (
|
||||
DestinationTerminalException,
|
||||
)
|
||||
|
||||
import tempfile
|
||||
|
||||
SUCCESS_PIPELINE_DUCKDB = "success_pipeline_duckdb"
|
||||
SUCCESS_PIPELINE_FILESYSTEM = "success_pipeline_filesystem"
|
||||
EXTRACT_EXCEPTION_PIPELINE = "extract_exception_pipeline"
|
||||
@@ -95,16 +94,14 @@ def run_success_pipeline(pipeline: dlt.Pipeline):
|
||||
)
|
||||
|
||||
|
||||
def create_success_pipeline_duckdb(pipelines_dir: str = None, db_location: str = None):
|
||||
def create_success_pipeline_duckdb(pipelines_dir: str = None, db_conn: Any = None):
|
||||
"""Create a test pipeline with in memory duckdb destination, properties see `run_success_pipeline`"""
|
||||
import duckdb
|
||||
|
||||
pipeline = dlt.pipeline(
|
||||
pipeline_name=SUCCESS_PIPELINE_DUCKDB,
|
||||
pipelines_dir=pipelines_dir,
|
||||
destination=dlt.destinations.duckdb(
|
||||
credentials=duckdb.connect(db_location) if db_location else None
|
||||
),
|
||||
destination=dlt.destinations.duckdb(credentials=db_conn if db_conn else None),
|
||||
)
|
||||
|
||||
run_success_pipeline(pipeline)
|
||||
@@ -215,12 +212,8 @@ def create_no_destination_pipeline(pipelines_dir: str = None):
|
||||
)
|
||||
return pipeline
|
||||
|
||||
pipeline.extract(fruitshop_source())
|
||||
|
||||
return pipeline
|
||||
|
||||
|
||||
# NOTE: this sript can be run to create the test pipelines globally for manual testing of the dashboard app and cli
|
||||
# NOTE: this script can be run to create the test pipelines globally for manual testing of the dashboard app and cli
|
||||
if __name__ == "__main__":
|
||||
create_success_pipeline_duckdb()
|
||||
create_success_pipeline_filesystem()
|
||||
|
||||
@@ -11,13 +11,13 @@ import pytest
|
||||
|
||||
import dlt
|
||||
from dlt.common import pendulum
|
||||
|
||||
from dlt._workspace.cli import utils as cli_utils
|
||||
from dlt._workspace.helpers.dashboard.config import DashboardConfiguration
|
||||
from dlt._workspace.helpers.dashboard.utils import (
|
||||
PICKLE_TRACE_FILE,
|
||||
get_dashboard_config_sections,
|
||||
get_query_result_cached,
|
||||
resolve_dashboard_config,
|
||||
get_local_pipelines,
|
||||
get_pipeline,
|
||||
pipeline_details,
|
||||
create_table_list,
|
||||
@@ -38,10 +38,9 @@ from dlt._workspace.helpers.dashboard.utils import (
|
||||
get_local_data_path,
|
||||
remote_state_details,
|
||||
sanitize_trace_for_display,
|
||||
get_pipeline_last_run,
|
||||
trace_resolved_config_values,
|
||||
trace_step_details,
|
||||
get_source_and_resouce_state_for_table,
|
||||
get_source_and_resource_state_for_table,
|
||||
get_default_query_for_table,
|
||||
get_example_query_for_dataset,
|
||||
_get_steps_data_and_status,
|
||||
@@ -53,6 +52,7 @@ from dlt._workspace.helpers.dashboard.utils import (
|
||||
TVisualPipelineStep,
|
||||
)
|
||||
|
||||
from dlt.pipeline.trace import TRACE_FILE_NAME
|
||||
from tests.workspace.helpers.dashboard.example_pipelines import (
|
||||
SUCCESS_PIPELINE_DUCKDB,
|
||||
SUCCESS_PIPELINE_FILESYSTEM,
|
||||
@@ -84,12 +84,12 @@ def temp_pipelines_dir():
|
||||
(pipelines_dir / "_dlt_internal").mkdir()
|
||||
|
||||
# Create trace files with different timestamps
|
||||
trace_file_1 = pipelines_dir / "success_pipeline_1" / PICKLE_TRACE_FILE
|
||||
trace_file_1 = pipelines_dir / "success_pipeline_1" / TRACE_FILE_NAME
|
||||
trace_file_1.touch()
|
||||
# Set modification time to 2 days ago
|
||||
os.utime(trace_file_1, (1000000, 1000000))
|
||||
|
||||
trace_file_2 = pipelines_dir / "success_pipeline_2" / PICKLE_TRACE_FILE
|
||||
trace_file_2 = pipelines_dir / "success_pipeline_2" / TRACE_FILE_NAME
|
||||
trace_file_2.touch()
|
||||
# Set modification time to 1 day ago (more recent)
|
||||
os.utime(trace_file_2, (2000000, 2000000))
|
||||
@@ -97,13 +97,94 @@ def temp_pipelines_dir():
|
||||
yield str(pipelines_dir)
|
||||
|
||||
|
||||
#
|
||||
# cli utils tests
|
||||
# TODO: move to test_cli_utils.py - pipeline fixtures should be unified for cli, dashboard and mcp tests
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
def test_get_pipelines(pipeline: dlt.Pipeline):
|
||||
"""Test getting local pipelines"""
|
||||
pipelines_dir, pipelines = cli_utils.list_local_pipelines(pipeline.pipelines_dir)
|
||||
assert pipelines_dir == pipeline.pipelines_dir
|
||||
assert len(pipelines) == 1
|
||||
assert pipelines[0]["name"] == pipeline.pipeline_name
|
||||
|
||||
|
||||
def test_get_local_pipelines_with_temp_dir(temp_pipelines_dir):
|
||||
"""Test getting local pipelines with temporary directory"""
|
||||
pipelines_dir, pipelines = cli_utils.list_local_pipelines(temp_pipelines_dir)
|
||||
|
||||
assert pipelines_dir == temp_pipelines_dir
|
||||
assert len(pipelines) == 3 # success_pipeline_1, success_pipeline_2, _dlt_internal
|
||||
|
||||
# Should be sorted by timestamp (descending)
|
||||
pipeline_names = [p["name"] for p in pipelines]
|
||||
assert "success_pipeline_2" in pipeline_names
|
||||
assert "success_pipeline_1" in pipeline_names
|
||||
assert "_dlt_internal" in pipeline_names
|
||||
|
||||
# Check timestamps are present
|
||||
for pipeline in pipelines:
|
||||
assert "timestamp" in pipeline
|
||||
assert isinstance(pipeline["timestamp"], (int, float))
|
||||
|
||||
|
||||
def test_get_local_pipelines_empty_dir():
|
||||
"""Test getting local pipelines from empty directory"""
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
pipelines_dir, pipelines = cli_utils.list_local_pipelines(temp_dir)
|
||||
|
||||
assert pipelines_dir == temp_dir
|
||||
assert pipelines == []
|
||||
|
||||
|
||||
def test_get_local_pipelines_nonexistent_dir():
|
||||
"""Test getting local pipelines from nonexistent directory"""
|
||||
nonexistent_dir = "/nonexistent/directory"
|
||||
pipelines_dir, pipelines = cli_utils.list_local_pipelines(nonexistent_dir)
|
||||
|
||||
assert pipelines_dir == nonexistent_dir
|
||||
assert pipelines == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
def test_get_pipeline_last_run(pipeline: dlt.Pipeline):
|
||||
"""Test getting the last run of a pipeline"""
|
||||
if pipeline.pipeline_name in [NEVER_RAN_PIPELINE, NO_DESTINATION_PIPELINE]:
|
||||
assert get_pipeline_last_run(pipeline.pipeline_name, pipeline.pipelines_dir) == 0
|
||||
assert (
|
||||
cli_utils.get_pipeline_trace_mtime(pipeline.pipelines_dir, pipeline.pipeline_name) == 0
|
||||
)
|
||||
else:
|
||||
assert get_pipeline_last_run(pipeline.pipeline_name, pipeline.pipelines_dir) > 1000000
|
||||
assert (
|
||||
cli_utils.get_pipeline_trace_mtime(pipeline.pipelines_dir, pipeline.pipeline_name)
|
||||
> 1000000
|
||||
)
|
||||
|
||||
|
||||
def test_integration_get_local_pipelines_with_sorting(temp_pipelines_dir):
|
||||
"""Test integration scenario with multiple pipelines sorted by timestamp"""
|
||||
pipelines_dir, pipelines = cli_utils.list_local_pipelines(
|
||||
temp_pipelines_dir, sort_by_trace=True
|
||||
)
|
||||
|
||||
assert pipelines_dir == temp_pipelines_dir
|
||||
assert len(pipelines) == 3
|
||||
|
||||
# Should be sorted by timestamp (descending - most recent first)
|
||||
timestamps = [p["timestamp"] for p in pipelines]
|
||||
assert timestamps == sorted(timestamps, reverse=True)
|
||||
|
||||
# Verify the most recent pipeline is first
|
||||
most_recent = pipelines[0]
|
||||
assert most_recent["name"] == "success_pipeline_2"
|
||||
assert most_recent["timestamp"] == 2000000
|
||||
|
||||
|
||||
#
|
||||
# dashboard utils tests
|
||||
#
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
@@ -159,20 +240,11 @@ def test_resolve_dashboard_config(success_pipeline_duckdb) -> None:
|
||||
assert config.datetime_format == "workspace format"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
def test_get_pipelines(pipeline: dlt.Pipeline):
|
||||
"""Test getting local pipelines"""
|
||||
pipelines_dir, pipelines = get_local_pipelines(pipeline.pipelines_dir)
|
||||
assert pipelines_dir == pipeline.pipelines_dir
|
||||
assert len(pipelines) == 1
|
||||
assert pipelines[0]["name"] == pipeline.pipeline_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", PIPELINES_WITH_LOAD, indirect=True)
|
||||
def test_get_source_and_resouce_state_for_table(pipeline: dlt.Pipeline):
|
||||
def test_get_source_and_resource_state_for_table(pipeline: dlt.Pipeline):
|
||||
"""Test getting source and resource state for a table"""
|
||||
table = pipeline.default_schema.tables["purchases"]
|
||||
resource_name, source_state, resource_state = get_source_and_resouce_state_for_table(
|
||||
resource_name, source_state, resource_state = get_source_and_resource_state_for_table(
|
||||
table, pipeline, pipeline.default_schema_name
|
||||
)
|
||||
assert resource_name
|
||||
@@ -184,43 +256,6 @@ def test_get_source_and_resouce_state_for_table(pipeline: dlt.Pipeline):
|
||||
assert mo.json(source_state).text
|
||||
|
||||
|
||||
def test_get_local_pipelines_with_temp_dir(temp_pipelines_dir):
|
||||
"""Test getting local pipelines with temporary directory"""
|
||||
pipelines_dir, pipelines = get_local_pipelines(temp_pipelines_dir)
|
||||
|
||||
assert pipelines_dir == temp_pipelines_dir
|
||||
assert len(pipelines) == 3 # success_pipeline_1, success_pipeline_2, _dlt_internal
|
||||
|
||||
# Should be sorted by timestamp (descending)
|
||||
pipeline_names = [p["name"] for p in pipelines]
|
||||
assert "success_pipeline_2" in pipeline_names
|
||||
assert "success_pipeline_1" in pipeline_names
|
||||
assert "_dlt_internal" in pipeline_names
|
||||
|
||||
# Check timestamps are present
|
||||
for pipeline in pipelines:
|
||||
assert "timestamp" in pipeline
|
||||
assert isinstance(pipeline["timestamp"], (int, float))
|
||||
|
||||
|
||||
def test_get_local_pipelines_empty_dir():
|
||||
"""Test getting local pipelines from empty directory"""
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
pipelines_dir, pipelines = get_local_pipelines(temp_dir)
|
||||
|
||||
assert pipelines_dir == temp_dir
|
||||
assert pipelines == []
|
||||
|
||||
|
||||
def test_get_local_pipelines_nonexistent_dir():
|
||||
"""Test getting local pipelines from nonexistent directory"""
|
||||
nonexistent_dir = "/nonexistent/directory"
|
||||
pipelines_dir, pipelines = get_local_pipelines(nonexistent_dir)
|
||||
|
||||
assert pipelines_dir == nonexistent_dir
|
||||
assert pipelines == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
def test_get_pipeline(pipeline: dlt.Pipeline):
|
||||
"""Test getting a real pipeline by name"""
|
||||
@@ -681,23 +716,6 @@ def test_dict_to_table_items():
|
||||
assert result_sorted == expected_sorted
|
||||
|
||||
|
||||
def test_integration_get_local_pipelines_with_sorting(temp_pipelines_dir):
|
||||
"""Test integration scenario with multiple pipelines sorted by timestamp"""
|
||||
pipelines_dir, pipelines = get_local_pipelines(temp_pipelines_dir, sort_by_trace=True)
|
||||
|
||||
assert pipelines_dir == temp_pipelines_dir
|
||||
assert len(pipelines) == 3
|
||||
|
||||
# Should be sorted by timestamp (descending - most recent first)
|
||||
timestamps = [p["timestamp"] for p in pipelines]
|
||||
assert timestamps == sorted(timestamps, reverse=True)
|
||||
|
||||
# Verify the most recent pipeline is first
|
||||
most_recent = pipelines[0]
|
||||
assert most_recent["name"] == "success_pipeline_2"
|
||||
assert most_recent["timestamp"] == 2000000
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pipeline", ALL_PIPELINES, indirect=True)
|
||||
def test_integration_pipeline_workflow(pipeline, temp_pipelines_dir):
|
||||
"""Test integration scenario with complete pipeline workflow"""
|
||||
@@ -791,8 +809,11 @@ def test_get_steps_data_and_status(
|
||||
|
||||
def test_get_migrations_count(temp_pipelines_dir) -> None:
|
||||
"""Test getting migrations count from the pipeline's last load info"""
|
||||
import duckdb
|
||||
|
||||
pipeline = create_success_pipeline_duckdb(temp_pipelines_dir)
|
||||
db_conn = duckdb.connect()
|
||||
try:
|
||||
pipeline = create_success_pipeline_duckdb(temp_pipelines_dir, db_conn=db_conn)
|
||||
|
||||
migrations_count = _get_migrations_count(pipeline.last_trace.last_load_info)
|
||||
assert migrations_count == 1
|
||||
@@ -808,6 +829,8 @@ def test_get_migrations_count(temp_pipelines_dir) -> None:
|
||||
pipeline.load()
|
||||
migrations_count = _get_migrations_count(pipeline.last_trace.last_load_info)
|
||||
assert migrations_count == 3
|
||||
finally:
|
||||
db_conn.close()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -13,6 +13,10 @@ def test_secrets_toml() -> None:
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/access.secrets.toml")),
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/secrets.toml")),
|
||||
]
|
||||
assert provider.present_locations == [
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/access.secrets.toml")),
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/secrets.toml")),
|
||||
]
|
||||
# overrides secrets.toml with profile
|
||||
assert provider.get_value("api_key", str, None) == ("PASS", "api_key")
|
||||
# still has secrets.toml keys
|
||||
@@ -21,3 +25,13 @@ def test_secrets_toml() -> None:
|
||||
# dev profile will load just secrets.toml
|
||||
provider = ProfileSecretsTomlProvider(os.path.join(TESTS_CASES_DIR, ".dlt"), "dev")
|
||||
assert provider.get_value("api_key", str, None) == ("X", "api_key")
|
||||
|
||||
|
||||
def test_secrets_not_present() -> None:
|
||||
provider = ProfileSecretsTomlProvider(os.path.join(TESTS_CASES_DIR, ".dlt"), "unknown")
|
||||
# first access profile, global comes second
|
||||
assert provider.locations == [
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/unknown.secrets.toml")),
|
||||
str(Path(TESTS_CASES_DIR).joinpath(".dlt/secrets.toml")),
|
||||
]
|
||||
assert provider.present_locations == [str(Path(TESTS_CASES_DIR).joinpath(".dlt/secrets.toml"))]
|
||||
|
||||
@@ -43,6 +43,12 @@ def test_workspace_settings() -> None:
|
||||
with isolated_workspace("default") as ctx:
|
||||
assert_workspace_context(ctx, "default", DEFAULT_PROFILE)
|
||||
assert_dev_config()
|
||||
assert ctx.configured_profiles() == [DEFAULT_PROFILE]
|
||||
# has dev config
|
||||
assert ctx._profile_has_config(DEFAULT_PROFILE) is True
|
||||
assert ctx._profile_has_config("unknown") is False
|
||||
# no pipelines
|
||||
assert ctx._profile_has_pipelines(DEFAULT_PROFILE) is False
|
||||
|
||||
|
||||
def test_workspace_profile() -> None:
|
||||
@@ -50,6 +56,9 @@ def test_workspace_profile() -> None:
|
||||
assert_workspace_context(ctx, "default", "prod")
|
||||
# mocked global dir
|
||||
assert ctx.global_dir.endswith(".global_dir")
|
||||
assert set(ctx.configured_profiles()) == {"dev", "prod"}
|
||||
assert ctx._profile_has_config("prod") is False
|
||||
assert ctx._profile_has_config("dev") is True
|
||||
|
||||
# files for dev profile will be ignores
|
||||
assert dlt.config["config_val"] == "config.toml"
|
||||
@@ -69,6 +78,8 @@ def test_workspace_profile() -> None:
|
||||
# standard global dir
|
||||
assert ctx.global_dir == global_dir()
|
||||
assert_dev_config()
|
||||
assert ctx.configured_profiles() == ["dev"]
|
||||
assert ctx._profile_has_config("dev") is True
|
||||
|
||||
|
||||
def test_profile_switch_no_workspace():
|
||||
@@ -95,8 +106,15 @@ def test_workspace_configuration():
|
||||
|
||||
def test_pinned_profile() -> None:
|
||||
with isolated_workspace("default") as ctx:
|
||||
assert ctx.profile == "dev"
|
||||
assert ctx.configured_profiles() == ["dev"]
|
||||
# we pin prod
|
||||
save_profile_pin(ctx, "prod")
|
||||
assert read_profile_pin(ctx) == "prod"
|
||||
# prod is configured profile now
|
||||
assert set(ctx.configured_profiles()) == {"prod", "dev"}
|
||||
# because it is pinned, we still do not see it as special config
|
||||
assert ctx._profile_has_config("prod") is False
|
||||
|
||||
# this is new default profile
|
||||
ctx = switch_context(ctx.run_dir)
|
||||
@@ -114,6 +132,13 @@ def test_workspace_pipeline() -> None:
|
||||
pytest.importorskip("duckdb", minversion="1.3.2")
|
||||
|
||||
with isolated_workspace("pipelines", profile="tests") as ctx:
|
||||
# prod and test have explicit config for profiles
|
||||
assert set(ctx.configured_profiles()) == {"tests", "prod"}
|
||||
assert ctx._profile_has_config("tests") is True
|
||||
assert ctx._profile_has_config("prod") is True
|
||||
assert ctx._profile_has_pipelines("tests") is False
|
||||
assert ctx._profile_has_pipelines("prod") is False
|
||||
|
||||
# `ducklake_pipeline` configured in config.toml
|
||||
pipeline = dlt.pipeline(pipeline_name="ducklake_pipeline")
|
||||
assert pipeline.run_context is ctx
|
||||
@@ -128,6 +153,7 @@ def test_workspace_pipeline() -> None:
|
||||
assert os.path.isdir(os.path.join(ctx.local_dir, "test_ducklake.files"))
|
||||
# make sure that working folder got created
|
||||
assert os.path.isdir(os.path.join(ctx.get_data_entity("pipelines"), "ducklake_pipeline"))
|
||||
assert ctx._profile_has_pipelines("tests") is True
|
||||
|
||||
# test wipe function
|
||||
with always_choose(always_choose_default=False, always_choose_value=True):
|
||||
@@ -148,6 +174,14 @@ def test_workspace_pipeline() -> None:
|
||||
# local files point to prod
|
||||
assert os.path.isfile(os.path.join(ctx.local_dir, "prod_ducklake.sqlite"))
|
||||
assert os.path.isdir(os.path.join(ctx.local_dir, "prod_ducklake.files"))
|
||||
# both profiles have pipelines
|
||||
assert ctx._profile_has_pipelines("tests") is True
|
||||
assert ctx._profile_has_pipelines("prod") is True
|
||||
assert set(ctx.configured_profiles()) == {"prod", "tests"}
|
||||
|
||||
# switch to dev
|
||||
ctx = ctx.switch_profile("dev")
|
||||
assert set(ctx.configured_profiles()) == {"dev", "prod", "tests"}
|
||||
|
||||
|
||||
def test_workspace_send_artifacts() -> None:
|
||||
@@ -202,6 +236,8 @@ def assert_workspace_context(context: WorkspaceRunContext, name_prefix: str, pro
|
||||
# basic properties must be set
|
||||
assert context.name.startswith(name_prefix)
|
||||
assert context.profile == profile
|
||||
assert context.default_profile == "dev"
|
||||
assert context.profile in context.configured_profiles()
|
||||
|
||||
expected_settings = os.path.join(context.run_dir, DOT_DLT)
|
||||
assert context.settings_dir == expected_settings
|
||||
@@ -213,6 +249,8 @@ def assert_workspace_context(context: WorkspaceRunContext, name_prefix: str, pro
|
||||
assert context.data_dir == expected_data_dir
|
||||
# got created
|
||||
assert os.path.isdir(context.data_dir)
|
||||
# is a default dir
|
||||
assert context._has_default_working_dir() is True
|
||||
|
||||
# local files
|
||||
expected_local_dir = os.path.join(context.run_dir, DEFAULT_LOCAL_FOLDER, profile)
|
||||
|
||||
210
uv.lock
generated
210
uv.lock
generated
@@ -246,7 +246,8 @@ dependencies = [
|
||||
{ name = "aiohappyeyeballs" },
|
||||
{ name = "aiosignal" },
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11'" },
|
||||
{ name = "attrs" },
|
||||
{ name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "frozenlist" },
|
||||
{ name = "multidict" },
|
||||
{ name = "propcache" },
|
||||
@@ -459,7 +460,8 @@ dependencies = [
|
||||
{ name = "apache-airflow-providers-sqlite", marker = "python_full_version < '3.12'" },
|
||||
{ name = "argcomplete", marker = "python_full_version < '3.12'" },
|
||||
{ name = "asgiref", marker = "python_full_version < '3.12'" },
|
||||
{ name = "attrs", marker = "python_full_version < '3.12'" },
|
||||
{ name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10' and python_full_version < '3.12'" },
|
||||
{ name = "blinker", marker = "python_full_version < '3.12'" },
|
||||
{ name = "colorlog", marker = "python_full_version < '3.12'" },
|
||||
{ name = "configupdater", marker = "python_full_version < '3.12'" },
|
||||
@@ -745,11 +747,60 @@ wheels = [
|
||||
name = "attrs"
|
||||
version = "25.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version < '3.10' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version < '3.10' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version < '3.10' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version < '3.10' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version < '3.10' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version < '3.10' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "25.4.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.14' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version >= '3.14' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version >= '3.14' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version >= '3.14' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version >= '3.14' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version >= '3.14' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name == 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name != 'nt' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name == 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name != 'nt' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.13.*' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name == 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.12.*' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.11.*' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
"python_full_version == '3.10.*' and os_name != 'nt' and sys_platform == 'emscripten'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.3.1"
|
||||
@@ -2007,7 +2058,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dlt"
|
||||
version = "1.19.1"
|
||||
version = "1.20.0a0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
@@ -2108,6 +2159,10 @@ gs = [
|
||||
http = [
|
||||
{ name = "aiohttp" },
|
||||
]
|
||||
hub = [
|
||||
{ name = "dlt-runtime", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "dlthub", marker = "python_full_version >= '3.10'" },
|
||||
]
|
||||
lancedb = [
|
||||
{ name = "lancedb", marker = "python_full_version < '3.13'" },
|
||||
{ name = "pyarrow" },
|
||||
@@ -2301,6 +2356,8 @@ requires-dist = [
|
||||
{ name = "db-dtypes", marker = "extra == 'bigquery'", specifier = ">=1.2.0" },
|
||||
{ name = "db-dtypes", marker = "extra == 'gcp'", specifier = ">=1.2.0" },
|
||||
{ name = "deltalake", marker = "extra == 'deltalake'", specifier = ">=0.25.1" },
|
||||
{ name = "dlt-runtime", marker = "python_full_version >= '3.10' and extra == 'hub'", specifier = ">=0.19.0a0,<0.21" },
|
||||
{ name = "dlthub", marker = "python_full_version >= '3.10' and extra == 'hub'", specifier = ">=0.19.0a0,<0.21" },
|
||||
{ name = "duckdb", marker = "extra == 'duckdb'", specifier = ">=0.9" },
|
||||
{ name = "duckdb", marker = "extra == 'ducklake'", specifier = ">=1.2.0" },
|
||||
{ name = "duckdb", marker = "extra == 'motherduck'", specifier = ">=0.9" },
|
||||
@@ -2381,7 +2438,7 @@ requires-dist = [
|
||||
{ name = "weaviate-client", marker = "extra == 'weaviate'", specifier = ">=3.26.7,<4.0.0" },
|
||||
{ name = "win-precise-time", marker = "python_full_version < '3.13' and os_name == 'nt'", specifier = ">=1.4.2" },
|
||||
]
|
||||
provides-extras = ["gcp", "bigquery", "postgres", "redshift", "parquet", "duckdb", "ducklake", "filesystem", "s3", "gs", "az", "sftp", "http", "snowflake", "motherduck", "cli", "athena", "weaviate", "mssql", "synapse", "qdrant", "databricks", "clickhouse", "dremio", "lancedb", "deltalake", "sql-database", "sqlalchemy", "pyiceberg", "postgis", "workspace", "dbml"]
|
||||
provides-extras = ["gcp", "bigquery", "postgres", "redshift", "parquet", "duckdb", "ducklake", "filesystem", "s3", "gs", "az", "sftp", "http", "snowflake", "motherduck", "cli", "athena", "weaviate", "mssql", "synapse", "qdrant", "databricks", "clickhouse", "dremio", "lancedb", "deltalake", "sql-database", "sqlalchemy", "pyiceberg", "postgis", "workspace", "hub", "dbml"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
adbc = [
|
||||
@@ -2476,6 +2533,36 @@ sources = [
|
||||
]
|
||||
streamlit = [{ name = "streamlit", marker = "python_full_version >= '3.9' and python_full_version < '3.14'", specifier = ">=1.40.0,<2" }]
|
||||
|
||||
[[package]]
|
||||
name = "dlt-runtime"
|
||||
version = "0.20.0a0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "cron-descriptor", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "httpx", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "pathspec", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "python-jose", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "tabulate", marker = "python_full_version >= '3.10'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/f5/d5c74ba2560507493b9d5d0c98a8482e15f036f338bb2832730065679e22/dlt_runtime-0.20.0a0.tar.gz", hash = "sha256:3e9d5df91f03152c251e5f874e5a13ac1bd66d5fbac0357c11996bf4e8279c8c", size = 48300, upload-time = "2025-12-04T15:51:51.192Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/21/1f/ebcfea0c69a697d64f836140bfdb0f73d0be2880547dc00ff267eaad7569/dlt_runtime-0.20.0a0-py3-none-any.whl", hash = "sha256:d6498d4078980c833ea9c5cbfc8b7146488beddb77146ae6d3a2e7d7345dfb5a", size = 118233, upload-time = "2025-12-04T15:51:49.67Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dlthub"
|
||||
version = "0.20.0a0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "python-jose", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "ruamel-yaml", marker = "python_full_version >= '3.10'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/da/ff/3484810a588516053ead898e86cf05bc416ca0a06035cfe11440153a518c/dlthub-0.20.0a0.tar.gz", hash = "sha256:34aa26c8103e54913f92bcbbbc55df516e32cfcaf24f176e7562141fabc8c1e6", size = 154938, upload-time = "2025-12-04T18:55:23.034Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/a8/2116d92df6fa1c660eb3ae699f9409bfe968700d6842ad9c7a4c53ce7326/dlthub-0.20.0a0-py3-none-any.whl", hash = "sha256:5c3abc352b5525d5f84f744b511339ff9d728234812824dc6065b29b86615aee", size = 205718, upload-time = "2025-12-04T18:55:20.143Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.7.0"
|
||||
@@ -2607,6 +2694,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/97/2b09ad149081d75534fe063ff6a1b4b91fffe7e17816a7d9261aa7456788/duckdb-1.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4389fc3812e26977034fe3ff08d1f7dbfe6d2d8337487b4686f2b50e254d7ee3", size = 22723577, upload-time = "2025-07-08T10:41:10.392Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "six", marker = "python_full_version >= '3.10'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.2.0"
|
||||
@@ -2723,7 +2822,8 @@ name = "flake8-bugbear"
|
||||
version = "22.12.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "flake8" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/da/b5/83a89114a82a2764ddfe27451df6b69c46513f88a3f66206514265b6f447/flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0", size = 44094, upload-time = "2022-12-06T19:07:02.569Z" }
|
||||
@@ -4135,7 +4235,8 @@ name = "jsonschema"
|
||||
version = "4.24.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "jsonschema-specifications" },
|
||||
{ name = "referencing" },
|
||||
{ name = "rpds-py" },
|
||||
@@ -7503,6 +7604,20 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-jose"
|
||||
version = "3.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ecdsa", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "pyasn1", marker = "python_full_version >= '3.10'" },
|
||||
{ name = "rsa", marker = "python_full_version >= '3.10'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-multipart"
|
||||
version = "0.0.20"
|
||||
@@ -7683,7 +7798,8 @@ name = "referencing"
|
||||
version = "0.36.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "attrs" },
|
||||
{ name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" },
|
||||
{ name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" },
|
||||
{ name = "rpds-py" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
@@ -8015,6 +8131,86 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruamel-yaml"
|
||||
version = "0.18.16"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ruamel-yaml-clib", marker = "python_full_version >= '3.10' and python_full_version < '3.14' and platform_python_implementation == 'CPython'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/c7/ee630b29e04a672ecfc9b63227c87fd7a37eb67c1bf30fe95376437f897c/ruamel.yaml-0.18.16.tar.gz", hash = "sha256:a6e587512f3c998b2225d68aa1f35111c29fad14aed561a26e73fab729ec5e5a", size = 147269, upload-time = "2025-10-22T17:54:02.346Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/73/bb1bc2529f852e7bf64a2dec885e89ff9f5cc7bbf6c9340eed30ff2c69c5/ruamel.yaml-0.18.16-py3-none-any.whl", hash = "sha256:048f26d64245bae57a4f9ef6feb5b552a386830ef7a826f235ffb804c59efbba", size = 119858, upload-time = "2025-10-22T17:53:59.012Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruamel-yaml-clib"
|
||||
version = "0.2.15"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ea/97/60fda20e2fb54b83a61ae14648b0817c8f5d84a3821e40bfbdae1437026a/ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600", size = 225794, upload-time = "2025-11-16T16:12:59.761Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/5a/4ab767cd42dcd65b83c323e1620d7c01ee60a52f4032fb7b61501f45f5c2/ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88eea8baf72f0ccf232c22124d122a7f26e8a24110a0273d9bcddcb0f7e1fa03", size = 147454, upload-time = "2025-11-16T16:13:02.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/44/184173ac1e74fd35d308108bcbf83904d6ef8439c70763189225a166b238/ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b6f7d74d094d1f3a4e157278da97752f16ee230080ae331fcc219056ca54f77", size = 132467, upload-time = "2025-11-16T16:13:03.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/1b/2d2077a25fe682ae335007ca831aff42e3cbc93c14066675cf87a6c7fc3e/ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4be366220090d7c3424ac2b71c90d1044ea34fca8c0b88f250064fd06087e614", size = 693454, upload-time = "2025-11-16T20:22:41.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/16/e708059c4c429ad2e33be65507fc1730641e5f239fb2964efc1ba6edea94/ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f66f600833af58bea694d5892453f2270695b92200280ee8c625ec5a477eed3", size = 700345, upload-time = "2025-11-16T16:13:04.771Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/79/0e8ef51df1f0950300541222e3332f20707a9c210b98f981422937d1278c/ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da3d6adadcf55a93c214d23941aef4abfd45652110aed6580e814152f385b862", size = 731306, upload-time = "2025-11-16T16:13:06.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/f4/2cdb54b142987ddfbd01fc45ac6bd882695fbcedb9d8bbf796adc3fc3746/ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9fde97ecb7bb9c41261c2ce0da10323e9227555c674989f8d9eb7572fc2098d", size = 692415, upload-time = "2025-11-16T16:13:07.465Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/07/40b5fc701cce8240a3e2d26488985d3bbdc446e9fe397c135528d412fea6/ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:05c70f7f86be6f7bee53794d80050a28ae7e13e4a0087c1839dcdefd68eb36b6", size = 705007, upload-time = "2025-11-16T20:22:42.856Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/19/309258a1df6192fb4a77ffa8eae3e8150e8d0ffa56c1b6fa92e450ba2740/ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f1d38cbe622039d111b69e9ca945e7e3efebb30ba998867908773183357f3ed", size = 723974, upload-time = "2025-11-16T16:13:08.72Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/3a/d6ee8263b521bfceb5cd2faeb904a15936480f2bb01c7ff74a14ec058ca4/ruamel_yaml_clib-0.2.15-cp310-cp310-win32.whl", hash = "sha256:fe239bdfdae2302e93bd6e8264bd9b71290218fff7084a9db250b55caaccf43f", size = 102836, upload-time = "2025-11-16T16:13:10.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/03/92aeb5c69018387abc49a8bb4f83b54a0471d9ef48e403b24bac68f01381/ruamel_yaml_clib-0.2.15-cp310-cp310-win_amd64.whl", hash = "sha256:468858e5cbde0198337e6a2a78eda8c3fb148bdf4c6498eaf4bc9ba3f8e780bd", size = 121917, upload-time = "2025-11-16T16:13:12.145Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/80/8ce7b9af532aa94dd83360f01ce4716264db73de6bc8efd22c32341f6658/ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd", size = 147998, upload-time = "2025-11-16T16:13:13.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/09/de9d3f6b6701ced5f276d082ad0f980edf08ca67114523d1b9264cd5e2e0/ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137", size = 132743, upload-time = "2025-11-16T16:13:14.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/f7/73a9b517571e214fe5c246698ff3ed232f1ef863c8ae1667486625ec688a/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401", size = 731459, upload-time = "2025-11-16T20:22:44.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/a2/0dc0013169800f1c331a6f55b1282c1f4492a6d32660a0cf7b89e6684919/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262", size = 749289, upload-time = "2025-11-16T16:13:15.633Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/ed/3fb20a1a96b8dc645d88c4072df481fe06e0289e4d528ebbdcc044ebc8b3/ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f", size = 777630, upload-time = "2025-11-16T16:13:16.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/50/6842f4628bc98b7aa4733ab2378346e1441e150935ad3b9f3c3c429d9408/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d", size = 744368, upload-time = "2025-11-16T16:13:18.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/b0/128ae8e19a7d794c2e36130a72b3bb650ce1dd13fb7def6cf10656437dcf/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922", size = 745233, upload-time = "2025-11-16T20:22:45.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/05/91130633602d6ba7ce3e07f8fc865b40d2a09efd4751c740df89eed5caf9/ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490", size = 770963, upload-time = "2025-11-16T16:13:19.344Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/4b/fd4542e7f33d7d1bc64cc9ac9ba574ce8cf145569d21f5f20133336cdc8c/ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c", size = 102640, upload-time = "2025-11-16T16:13:20.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/eb/00ff6032c19c7537371e3119287999570867a0eafb0154fccc80e74bf57a/ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e", size = 121996, upload-time = "2025-11-16T16:13:21.855Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/4b/5fde11a0722d676e469d3d6f78c6a17591b9c7e0072ca359801c4bd17eee/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff", size = 149088, upload-time = "2025-11-16T16:13:22.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/82/4d08ac65ecf0ef3b046421985e66301a242804eb9a62c93ca3437dc94ee0/ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2", size = 134553, upload-time = "2025-11-16T16:13:24.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/cb/22366d68b280e281a932403b76da7a988108287adff2bfa5ce881200107a/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1", size = 737468, upload-time = "2025-11-16T20:22:47.335Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/73/81230babf8c9e33770d43ed9056f603f6f5f9665aea4177a2c30ae48e3f3/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60", size = 753349, upload-time = "2025-11-16T16:13:26.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/62/150c841f24cda9e30f588ef396ed83f64cfdc13b92d2f925bb96df337ba9/ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9", size = 788211, upload-time = "2025-11-16T16:13:27.441Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/93/e79bd9cbecc3267499d9ead919bd61f7ddf55d793fb5ef2b1d7d92444f35/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642", size = 743203, upload-time = "2025-11-16T16:13:28.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/06/1eb640065c3a27ce92d76157f8efddb184bd484ed2639b712396a20d6dce/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690", size = 747292, upload-time = "2025-11-16T20:22:48.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/21/ee353e882350beab65fcc47a91b6bdc512cace4358ee327af2962892ff16/ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a", size = 771624, upload-time = "2025-11-16T16:13:29.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/34/cc1b94057aa867c963ecf9ea92ac59198ec2ee3a8d22a126af0b4d4be712/ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144", size = 100342, upload-time = "2025-11-16T16:13:31.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/e5/8925a4208f131b218f9a7e459c0d6fcac8324ae35da269cb437894576366/ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc", size = 119013, upload-time = "2025-11-16T16:13:32.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/5e/2f970ce4c573dc30c2f95825f2691c96d55560268ddc67603dc6ea2dd08e/ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb", size = 147450, upload-time = "2025-11-16T16:13:33.542Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/03/a1baa5b94f71383913f21b96172fb3a2eb5576a4637729adbf7cd9f797f8/ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471", size = 133139, upload-time = "2025-11-16T16:13:34.587Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/19/40d676802390f85784235a05788fd28940923382e3f8b943d25febbb98b7/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25", size = 731474, upload-time = "2025-11-16T20:22:49.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/bb/6ef5abfa43b48dd55c30d53e997f8f978722f02add61efba31380d73e42e/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a", size = 748047, upload-time = "2025-11-16T16:13:35.633Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/5d/e4f84c9c448613e12bd62e90b23aa127ea4c46b697f3d760acc32cb94f25/ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf", size = 782129, upload-time = "2025-11-16T16:13:36.781Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/4b/e98086e88f76c00c88a6bcf15eae27a1454f661a9eb72b111e6bbb69024d/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d", size = 736848, upload-time = "2025-11-16T16:13:37.952Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/5c/5964fcd1fd9acc53b7a3a5d9a05ea4f95ead9495d980003a557deb9769c7/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf", size = 741630, upload-time = "2025-11-16T20:22:51.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/1e/99660f5a30fceb58494598e7d15df883a07292346ef5696f0c0ae5dee8c6/ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51", size = 766619, upload-time = "2025-11-16T16:13:39.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/2f/fa0344a9327b58b54970e56a27b32416ffbcfe4dcc0700605516708579b2/ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec", size = 100171, upload-time = "2025-11-16T16:13:40.456Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c4/c124fbcef0684fcf3c9b72374c2a8c35c94464d8694c50f37eef27f5a145/ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6", size = 118845, upload-time = "2025-11-16T16:13:41.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/bd/ab8459c8bb759c14a146990bf07f632c1cbec0910d4853feeee4be2ab8bb/ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef", size = 147248, upload-time = "2025-11-16T16:13:42.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/f2/c4cec0a30f1955510fde498aac451d2e52b24afdbcb00204d3a951b772c3/ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf", size = 133764, upload-time = "2025-11-16T16:13:43.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/c7/2480d062281385a2ea4f7cc9476712446e0c548cd74090bff92b4b49e898/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000", size = 730537, upload-time = "2025-11-16T20:22:52.918Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/08/e365ee305367559f57ba6179d836ecc3d31c7d3fdff2a40ebf6c32823a1f/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4", size = 746944, upload-time = "2025-11-16T16:13:45.338Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/5c/8b56b08db91e569d0a4fbfa3e492ed2026081bdd7e892f63ba1c88a2f548/ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c", size = 778249, upload-time = "2025-11-16T16:13:46.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/1d/70dbda370bd0e1a92942754c873bd28f513da6198127d1736fa98bb2a16f/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043", size = 737140, upload-time = "2025-11-16T16:13:48.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/87/822d95874216922e1120afb9d3fafa795a18fdd0c444f5c4c382f6dac761/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524", size = 741070, upload-time = "2025-11-16T20:22:54.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/17/4e01a602693b572149f92c983c1f25bd608df02c3f5cf50fd1f94e124a59/ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e", size = 765882, upload-time = "2025-11-16T16:13:49.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/17/7999399081d39ebb79e807314de6b611e1d1374458924eb2a489c01fc5ad/ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa", size = 102567, upload-time = "2025-11-16T16:13:50.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/67/be582a7370fdc9e6846c5be4888a530dcadd055eef5b932e0e85c33c7d73/ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467", size = 122847, upload-time = "2025-11-16T16:13:51.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/85/6eefb87f379dd7de0f84b5b0613fbeb62b77c3b3fea0bcc39d1645af82fb/ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:923816815974425fbb1f1bf57e85eca6e14d8adc313c66db21c094927ad01815", size = 148240, upload-time = "2025-11-16T16:13:53.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/9b/daff728d384563063cfcc08b67178ae0974e4bf8a239a11c826098c325e7/ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dcc7f3162d3711fd5d52e2267e44636e3e566d1e5675a5f0b30e98f2c4af7974", size = 132980, upload-time = "2025-11-16T16:13:55.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/a5/e9e14332cb5144df205cb8a705edf6af9360a751a3e5801e1fa0bb8fa30d/ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d3c9210219cbc0f22706f19b154c9a798ff65a6beeafbf77fc9c057ec806f7d", size = 695861, upload-time = "2025-11-16T20:22:55.659Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/a1/ac45c5c0a406edbdfedb4148f06125bf994a378082bf85e13ace9b2f2a0f/ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bb7b728fd9f405aa00b4a0b17ba3f3b810d0ccc5f77f7373162e9b5f0ff75d5", size = 703585, upload-time = "2025-11-16T16:13:56.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/20/3e7b0d26261c2ac0c272f42f21408bf2d01aaa08cddd378a51056b3f5fbc/ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3cb75a3c14f1d6c3c2a94631e362802f70e83e20d1f2b2ef3026c05b415c4900", size = 734102, upload-time = "2025-11-16T16:13:57.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/3d/231c8902ef8e35c5dd31bb484d3b807aee952b53ee434c0ebb3609ced170/ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:badd1d7283f3e5894779a6ea8944cc765138b96804496c91812b2829f70e18a7", size = 695072, upload-time = "2025-11-16T16:13:59.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/fb/f97f98e6d82e07f1b499fb251505d2fda30da318cc7f8708c22397bbb265/ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ba6604bbc3dfcef844631932d06a1a4dcac3fee904efccf582261948431628a", size = 707541, upload-time = "2025-11-16T20:22:56.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/3e/dcb5158d09dc028ada88555a22eeaace8717f4e7bd077d8bfa22ce07e02b/ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8220fd4c6f98485e97aea65e1df76d4fed1678ede1fe1d0eed2957230d287c4", size = 726459, upload-time = "2025-11-16T16:14:00.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/e8/4752698aada0a6fef4cef3346d61b5de11304950886cc03354d9af554082/ruamel_yaml_clib-0.2.15-cp39-cp39-win32.whl", hash = "sha256:04d21dc9c57d9608225da28285900762befbb0165ae48482c15d8d4989d4af14", size = 103348, upload-time = "2025-11-16T16:14:01.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/3b/5db40ece46cff9d9a265fb636722f1f5c1a027c1da09921a6c5693d920be/ruamel_yaml_clib-0.2.15-cp39-cp39-win_amd64.whl", hash = "sha256:27dc656e84396e6d687f97c6e65fb284d100483628f02d95464fd731743a4afe", size = 122500, upload-time = "2025-11-16T16:14:03.018Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.3.7"
|
||||
|
||||
Reference in New Issue
Block a user