(fix) 3351 fixes default type var (#3373)

* tests minimal typing extensions in alpine docker

* keeps typevar default but does not use it in the code for backwart compat
This commit is contained in:
rudolfix
2025-11-26 09:26:52 +01:00
committed by GitHub
parent 7d7b7af00c
commit cc3b88d73a
4 changed files with 18 additions and 17 deletions

View File

@@ -16,7 +16,7 @@ RUN apk update &&\
apk add --no-cache ca-certificates curl &&\
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && python3 get-pip.py &&\
rm get-pip.py &&\
pip install --upgrade setuptools wheel pip
pip install --upgrade setuptools wheel pip uv
# add build labels and envs
@@ -29,28 +29,28 @@ ENV IMAGE_VERSION=${IMAGE_VERSION}
# install exactly the same version of the library we used to build
COPY dist/dlt-${IMAGE_VERSION}.tar.gz .
RUN pip install /tmp/pydlt/dlt-${IMAGE_VERSION}.tar.gz
# create app dir to run simple test
RUN mkdir -p /app
WORKDIR /app
RUN uv venv && uv pip install /tmp/pydlt/dlt-${IMAGE_VERSION}.tar.gz --resolution lowest-direct && uv pip install typing-extensions==4.8.0
RUN rm -r /tmp/pydlt
# make sure dlt can be actually imported
RUN python -c 'import dlt;import pendulum;'
RUN uv run python -c 'import dlt;import pendulum;'
# check excluded imports
COPY deploy/dlt/restrict_imports.py .
RUN python restrict_imports.py
RUN uv run python restrict_imports.py
# run simple pipeline
COPY deploy/dlt/minimal_pipeline.py .
RUN python minimal_pipeline.py
RUN dlt pipeline fruit_pipeline info
RUN uv run python minimal_pipeline.py
RUN uv run dlt pipeline fruit_pipeline info
# enable workspace
RUN mkdir -p .dlt && touch .dlt/.workspace
# RUN dlt pipeline fruit_pipeline info
RUN dlt workspace info
RUN python minimal_pipeline.py
RUN dlt pipeline fruit_pipeline info
RUN uv run dlt workspace info
RUN uv run python minimal_pipeline.py
RUN uv run dlt pipeline fruit_pipeline info

View File

@@ -80,7 +80,7 @@ class DatabricksLoadJob(RunnableLoadJob, HasFollowupJobs):
backend.staging_allowed_local_path = file_dir
else:
# thrift backend discontinued on newer databricks connector clients
conn_.staging_allowed_local_path = file_dir
conn_.staging_allowed_local_path = file_dir # type: ignore[attr-defined,unused-ignore]
# local file by uploading to a temporary volume on Databricks
from_clause, file_name, volume_path, volume_file_path = self._handle_local_file_upload(
self._file_path

View File

@@ -73,7 +73,7 @@ class ItemTransform(BaseItemTransform[TCustomMetrics], ABC, Generic[TAny, TCusto
pass
class FilterItem(ItemTransform[bool]):
class FilterItem(ItemTransform[bool, Dict[str, Any]]):
# mypy needs those to type correctly
_f_meta: ItemTransformFunctionWithMeta[bool]
_f: ItemTransformFunctionNoMeta[bool]
@@ -99,7 +99,7 @@ class FilterItem(ItemTransform[bool]):
return item if self._f(item) else None
class MapItem(ItemTransform[TDataItem]):
class MapItem(ItemTransform[TDataItem, Dict[str, Any]]):
# mypy needs those to type correctly
_f_meta: ItemTransformFunctionWithMeta[TDataItem]
_f: ItemTransformFunctionNoMeta[TDataItem]
@@ -121,7 +121,7 @@ class MapItem(ItemTransform[TDataItem]):
return self._f(item)
class YieldMapItem(ItemTransform[Iterator[TDataItem]]):
class YieldMapItem(ItemTransform[Iterator[TDataItem], Dict[str, Any]]):
# mypy needs those to type correctly
_f_meta: ItemTransformFunctionWithMeta[TDataItem]
_f: ItemTransformFunctionNoMeta[TDataItem]
@@ -144,7 +144,7 @@ class YieldMapItem(ItemTransform[Iterator[TDataItem]]):
yield from self._f(item)
class ValidateItem(ItemTransform[TDataItem]):
class ValidateItem(ItemTransform[TDataItem, Dict[str, Any]]):
"""Base class for validators of data items.
Subclass should implement the `__call__` method to either return the data item(s) or raise `extract.exceptions.ValidationError`.
@@ -160,7 +160,7 @@ class ValidateItem(ItemTransform[TDataItem]):
return self
class LimitItem(ItemTransform[TDataItem]):
class LimitItem(ItemTransform[TDataItem, Dict[str, Any]]):
placement_affinity: ClassVar[float] = 1.1 # stick to end right behind incremental
def __init__(
@@ -227,7 +227,7 @@ class LimitItem(ItemTransform[TDataItem]):
return item
class MetricsItem(ItemTransform[None]):
class MetricsItem(ItemTransform[None, Dict[str, Any]]):
"""Collects custom metrics from data flowing through the pipe without modifying items.
The metrics function receives items, optionally meta, and a metrics dictionary that it can

View File

@@ -4,7 +4,6 @@ import os
from typing import Awaitable, Callable, List, Optional, Dict, Iterator, Any, cast
import pytest
from pydantic import BaseModel
import dlt
from dlt.common.configuration import known_sections
@@ -386,6 +385,8 @@ def test_nested_hints_decorator() -> None:
def test_columns_from_pydantic() -> None:
from pydantic import BaseModel
class Columns(BaseModel):
tags: List[str]
name: Optional[str]