Model Deprecation (#7562)

* CT-2461: Work toward model deprecation

* CT-2461: Remove unneeded conversions

* CT-2461: Fix up unit tests for new fields, correct a couple oversights

* CT-2461: Remaining implementation and tests for model/ref deprecation warnings

* CT-2461: Changelog entry for deprecation warnings

* CT-2461: Refine datetime handling and tests

* CT-2461: Fix up unit test data

* CT-2461: Fix some more unit test data.

* CT-2461: Fix merge issues

* CT-2461: Code review items.

* CT-2461: Improve version -> str conversion
This commit is contained in:
Peter Webb
2023-05-23 09:30:32 -04:00
committed by GitHub
parent 265e09dc93
commit 4a4b7beeb9
24 changed files with 1031 additions and 442 deletions

View File

@@ -0,0 +1,6 @@
kind: Features
body: Added warnings for model and ref deprecations
time: 2023-05-09T23:33:29.679333-04:00
custom:
Author: peterallenwebb
Issue: "7433"

View File

@@ -1,6 +1,7 @@
from typing import Dict, Any, Tuple, Optional, Union, Callable
import re
import os
from datetime import date
from dbt.clients.jinja import get_rendered, catch_jinja
from dbt.constants import SECRET_ENV_PREFIX
@@ -33,10 +34,10 @@ class BaseRenderer:
return self.render_value(value, keypath)
def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any:
# keypath is ignored.
# if it wasn't read as a string, ignore it
# keypath is ignored (and someone who knows should explain why here)
if not isinstance(value, str):
return value
return value if not isinstance(value, date) else value.isoformat()
try:
with catch_jinja():
return get_rendered(value, self.context, native=True)

View File

@@ -965,6 +965,23 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
self._analysis_lookup = AnalysisLookup(self)
return self._analysis_lookup
def resolve_refs(
self, source_node: GraphMemberNode, current_project: str
) -> List[MaybeNonSource]:
resolved_refs: List[MaybeNonSource] = []
for ref in source_node.refs:
resolved = self.resolve_ref(
source_node,
ref.name,
ref.package,
ref.version,
current_project,
source_node.package_name,
)
resolved_refs.append(resolved)
return resolved_refs
# Called by dbt.parser.manifest._process_refs_for_exposure, _process_refs_for_metric,
# and dbt.parser.manifest._process_refs_for_node
def resolve_ref(

View File

@@ -1,4 +1,5 @@
import os
from datetime import datetime
import time
from dataclasses import dataclass, field
from enum import Enum
@@ -568,6 +569,7 @@ class ModelNode(CompiledNode):
constraints: List[ModelLevelConstraint] = field(default_factory=list)
version: Optional[NodeVersion] = None
latest_version: Optional[NodeVersion] = None
deprecation_date: Optional[datetime] = None
state_relation: Optional[StateRelation] = None
@property
@@ -1415,6 +1417,7 @@ class ParsedNodePatch(ParsedPatch):
version: Optional[NodeVersion]
latest_version: Optional[NodeVersion]
constraints: List[Dict[str, Any]]
deprecation_date: Optional[datetime]
@dataclass

View File

@@ -1,3 +1,4 @@
import datetime
import re
from dbt import deprecations
@@ -154,6 +155,7 @@ class UnparsedVersion(dbtClassMixin):
columns: Sequence[Union[dbt.helper_types.IncludeExclude, UnparsedColumn]] = field(
default_factory=list
)
deprecation_date: Optional[datetime.datetime] = None
def __lt__(self, other):
try:
@@ -192,6 +194,8 @@ class UnparsedVersion(dbtClassMixin):
else:
self._unparsed_columns.append(column)
self.deprecation_date = normalize_date(self.deprecation_date)
@dataclass
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasColumnProps, HasYamlMetadata):
@@ -210,6 +214,7 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
access: Optional[str] = None
latest_version: Optional[NodeVersion] = None
versions: Sequence[UnparsedVersion] = field(default_factory=list)
deprecation_date: Optional[datetime.datetime] = None
def __post_init__(self):
if self.latest_version:
@@ -229,6 +234,8 @@ class UnparsedModelUpdate(UnparsedNodeUpdate):
self._version_map = {version.v: version for version in self.versions}
self.deprecation_date = normalize_date(self.deprecation_date)
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
if version not in self._version_map:
raise DbtInternalError(
@@ -652,3 +659,18 @@ class UnparsedGroup(dbtClassMixin, Replaceable):
super(UnparsedGroup, cls).validate(data)
if data["owner"].get("name") is None and data["owner"].get("email") is None:
raise ValidationError("Group owner must have at least one of 'name' or 'email'.")
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
"""Convert date to datetime (at midnight), and add local time zone if naive"""
if d is None:
return None
# convert date to datetime
dt = d if type(d) == datetime.datetime else datetime.datetime(d.year, d.month, d.day)
if not dt.tzinfo:
# date is naive, re-interpret as system time zone
dt = dt.astimezone()
return dt

View File

@@ -45,6 +45,7 @@ class PublicModel(dbtClassMixin, ManifestOrPublicNode):
# list of model unique_ids
public_node_dependencies: List[str] = field(default_factory=list)
generated_at: datetime = field(default_factory=datetime.utcnow)
deprecation_date: Optional[datetime] = None
@property
def is_latest_version(self) -> bool:

View File

@@ -1174,6 +1174,49 @@ message UnpinnedRefNewVersionAvailableMsg {
UnpinnedRefNewVersionAvailable data = 2;
}
// I065
message DeprecatedModel {
string model_name = 1;
string model_version = 2;
string deprecation_date = 3;
}
message DeprecatedModelMsg {
EventInfo info = 1;
DeprecatedModel data = 2;
}
// I066
message UpcomingReferenceDeprecation {
string model_name = 1;
string ref_model_package = 2;
string ref_model_name = 3;
string ref_model_version = 4;
string ref_model_latest_version = 5;
string ref_model_deprecation_date = 6;
}
message UpcomingReferenceDeprecationMsg {
EventInfo info = 1;
UpcomingReferenceDeprecation data = 2;
}
// I067
message DeprecatedReference {
string model_name = 1;
string ref_model_package = 2;
string ref_model_name = 3;
string ref_model_version = 4;
string ref_model_latest_version = 5;
string ref_model_deprecation_date = 6;
}
message DeprecatedReferenceMsg {
EventInfo info = 1;
DeprecatedReference data = 2;
}
// M - Deps generation
// M001

View File

@@ -1146,6 +1146,62 @@ class UnpinnedRefNewVersionAvailable(InfoLevel):
return msg
class DeprecatedModel(WarnLevel):
def code(self):
return "I065"
def message(self) -> str:
version = ".v" + self.model_version if self.model_version else ""
return (
f"Model {self.model_name}{version} has passed its deprecation date of {self.deprecation_date}. "
"This model should be disabled or removed."
)
class UpcomingReferenceDeprecation(WarnLevel):
def code(self):
return "I066"
def message(self) -> str:
ref_model_version = ".v" + self.ref_model_version if self.ref_model_version else ""
msg = (
f"While compiling '{self.model_name}': Found a reference to {self.ref_model_name}{ref_model_version}, "
f"which is slated for deprecation on '{self.ref_model_deprecation_date}'. "
)
if self.ref_model_version and self.ref_model_version != self.ref_model_latest_version:
coda = (
f"A new version of '{self.ref_model_name}' is available. Try it out: "
f"{{{{ ref('{self.ref_model_package}', '{self.ref_model_name}', "
f"v='{self.ref_model_latest_version}') }}}}."
)
msg = msg + coda
return msg
class DeprecatedReference(WarnLevel):
def code(self):
return "I067"
def message(self) -> str:
ref_model_version = ".v" + self.ref_model_version if self.ref_model_version else ""
msg = (
f"While compiling '{self.model_name}': Found a reference to {self.ref_model_name}{ref_model_version}, "
f"which was deprecated on '{self.ref_model_deprecation_date}'. "
)
if self.ref_model_version and self.ref_model_version != self.ref_model_latest_version:
coda = (
f"A new version of '{self.ref_model_name}' is available. Migrate now: "
f"{{{{ ref('{self.ref_model_package}', '{self.ref_model_name}', "
f"v='{self.ref_model_latest_version}') }}}}."
)
msg = msg + coda
return msg
# =======================================================
# M - Deps generation
# =======================================================

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,7 @@
from copy import deepcopy
from dataclasses import dataclass
from dataclasses import field
from datetime import datetime
import datetime
import os
import traceback
from typing import (
@@ -22,6 +22,7 @@ import time
from dbt.events.base_types import EventLevel
import json
import pprint
import msgpack
import dbt.exceptions
import dbt.tracking
@@ -51,6 +52,9 @@ from dbt.events.types import (
StateCheckVarsHash,
Note,
PublicationArtifactChanged,
DeprecatedModel,
DeprecatedReference,
UpcomingReferenceDeprecation,
)
from dbt.logger import DbtProcessState
from dbt.node_types import NodeType, AccessType
@@ -131,6 +135,45 @@ PARSING_STATE = DbtProcessState("parsing")
PERF_INFO_FILE_NAME = "perf_info.json"
def extended_mashumaro_encoder(data):
return msgpack.packb(data, default=extended_msgpack_encoder, use_bin_type=True)
def extended_msgpack_encoder(obj):
if type(obj) is datetime.date:
date_bytes = msgpack.ExtType(1, obj.isoformat().encode())
return date_bytes
elif type(obj) is datetime.datetime:
datetime_bytes = msgpack.ExtType(2, obj.isoformat().encode())
return datetime_bytes
return obj
def extended_mashumuro_decoder(data):
return msgpack.unpackb(data, ext_hook=extended_msgpack_decoder, raw=False)
def extended_msgpack_decoder(code, data):
if code == 1:
d = datetime.date.fromisoformat(data.decode())
return d
elif code == 2:
dt = datetime.datetime.fromisoformat(data.decode())
return dt
else:
return msgpack.ExtType(code, data)
def version_to_str(version: Optional[Union[str, int]]) -> str:
if isinstance(version, int):
return str(version)
elif isinstance(version, str):
return version
return ""
class ReparseReason(StrEnum):
version_mismatch = "01_version_mismatch"
file_not_found = "02_file_not_found"
@@ -511,8 +554,46 @@ class ManifestLoader:
# write out the fully parsed manifest
self.write_manifest_for_partial_parse()
self.check_for_model_deprecations()
return self.manifest
def check_for_model_deprecations(self):
for node in self.manifest.nodes.values():
if isinstance(node, ModelNode):
if (
node.deprecation_date
and node.deprecation_date < datetime.datetime.now().astimezone()
):
fire_event(
DeprecatedModel(
model_name=node.name,
model_version=version_to_str(node.version),
deprecation_date=node.deprecation_date.isoformat(),
)
)
resolved_refs = self.manifest.resolve_refs(node, self.root_project.project_name)
resolved_model_refs = [r for r in resolved_refs if isinstance(r, ModelNode)]
for resolved_ref in resolved_model_refs:
if resolved_ref.deprecation_date:
if resolved_ref.deprecation_date < datetime.datetime.now().astimezone():
event_cls = DeprecatedReference
else:
event_cls = UpcomingReferenceDeprecation
fire_event(
event_cls(
model_name=node.name,
ref_model_package=resolved_ref.package_name,
ref_model_name=resolved_ref.name,
ref_model_version=version_to_str(resolved_ref.version),
ref_model_latest_version=str(resolved_ref.latest_version),
ref_model_deprecation_date=resolved_ref.deprecation_date.isoformat(),
)
)
def load_and_parse_macros(self, project_parser_files):
for project in self.all_projects.values():
if project.project_name not in project_parser_files:
@@ -658,7 +739,7 @@ class ManifestLoader:
UnableToPartialParse(reason="saved manifest contained the wrong version")
)
self.manifest.metadata.dbt_version = __version__
manifest_msgpack = self.manifest.to_msgpack()
manifest_msgpack = self.manifest.to_msgpack(extended_mashumaro_encoder)
make_directory(os.path.dirname(path))
with open(path, "wb") as fp:
fp.write(manifest_msgpack)
@@ -872,14 +953,14 @@ class ManifestLoader:
try:
with open(path, "rb") as fp:
manifest_mp = fp.read()
manifest: Manifest = Manifest.from_msgpack(manifest_mp) # type: ignore
manifest: Manifest = Manifest.from_msgpack(manifest_mp, decoder=extended_mashumuro_decoder) # type: ignore
# keep this check inside the try/except in case something about
# the file has changed in weird ways, perhaps due to being a
# different version of dbt
is_partial_parsable, reparse_reason = self.is_partial_parsable(manifest)
if is_partial_parsable:
# We don't want to have stale generated_at dates
manifest.metadata.generated_at = datetime.utcnow()
manifest.metadata.generated_at = datetime.datetime.utcnow()
# or invocation_ids
manifest.metadata.invocation_id = get_invocation_id()
return manifest
@@ -1718,6 +1799,7 @@ def write_publication_artifact(root_project: RuntimeConfig, manifest: Manifest):
latest_version=model.latest_version,
public_node_dependencies=list(public_node_dependencies),
generated_at=metadata.generated_at,
deprecation_date=model.deprecation_date,
)
public_models[unique_id] = public_model

View File

@@ -1,7 +1,8 @@
import datetime
import time
from abc import ABCMeta, abstractmethod
from typing import Iterable, Dict, Any, List, Generic, TypeVar, Type, Callable
from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar
from dataclasses import dataclass, field
from dbt.dataclass_schema import ValidationError, dbtClassMixin
@@ -515,6 +516,10 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
# We're not passing the ParsedNodePatch around anymore, so we
# could possibly skip creating one. Leaving here for now for
# code consistency.
deprecation_date: Optional[datetime.datetime] = None
if isinstance(block.target, UnparsedModelUpdate):
deprecation_date = block.target.deprecation_date
patch = ParsedNodePatch(
name=block.target.name,
original_file_path=block.target.original_file_path,
@@ -529,6 +534,7 @@ class NodePatchParser(PatchParser[NodeTarget, ParsedNodePatch], Generic[NodeTarg
version=None,
latest_version=None,
constraints=block.target.constraints,
deprecation_date=deprecation_date,
)
assert isinstance(self.yaml.file, SchemaSourceFile)
source_file: SchemaSourceFile = self.yaml.file
@@ -761,6 +767,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
version=unparsed_version.v,
latest_version=latest_version,
constraints=unparsed_version.constraints or target.constraints,
deprecation_date=unparsed_version.deprecation_date,
)
# Node patched before config because config patching depends on model name,
# which may have been updated in the version patch
@@ -782,6 +789,7 @@ class ModelPatchParser(NodePatchParser[UnparsedModelUpdate]):
super().patch_node_properties(node, patch)
node.version = patch.version
node.latest_version = patch.latest_version
node.deprecation_date = patch.deprecation_date
if patch.access:
if AccessType.is_valid(patch.access):
node.access = AccessType(patch.access)

View File

@@ -185,7 +185,7 @@ def _deep_map_render(
value: Any,
keypath: Tuple[Union[str, int], ...],
) -> Any:
atomic_types: Tuple[Type[Any], ...] = (int, float, str, type(None), bool)
atomic_types: Tuple[Type[Any], ...] = (int, float, str, type(None), bool, datetime.date)
ret: Any

View File

@@ -7,3 +7,4 @@ env_files =
testpaths =
test/unit
tests/functional
tests/unit

View File

@@ -1977,6 +1977,16 @@
"type": "null"
}
]
},
"deprecation_date": {
"oneOf": [
{
"type": "string"
},
{
"type": "null"
}
]
}
},
"additionalProperties": false,

View File

@@ -93,6 +93,7 @@ REQUIRED_PARSED_NODE_KEYS = frozenset(
"version",
"latest_version",
"constraints",
"deprecation_date",
"state_relation",
}
)

View File

@@ -263,6 +263,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
"refs": [{"name": "seed", "package": None, "version": None}],
"sources": [],
"depends_on": {"nodes": ["seed.test.seed"], "macros": [], "public_nodes": []},
"deprecation_date": None,
"unique_id": "model.test.model",
"fqn": ["test", "model"],
"metrics": [],
@@ -355,6 +356,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False):
"refs": [{"name": "seed", "package": None, "version": None}],
"sources": [],
"depends_on": {"nodes": ["seed.test.seed"], "macros": [], "public_nodes": []},
"deprecation_date": None,
"unique_id": "model.test.second_model",
"fqn": ["test", "second_model"],
"metrics": [],
@@ -925,6 +927,7 @@ def expected_references_manifest(project):
"nodes": ["source.test.my_source.my_table"],
"public_nodes": [],
},
"deprecation_date": None,
"deferred": False,
"description": "",
"docs": {"node_color": None, "show": True},
@@ -991,6 +994,7 @@ def expected_references_manifest(project):
"nodes": ["model.test.ephemeral_copy"],
"public_nodes": [],
},
"deprecation_date": None,
"deferred": False,
"description": "A summmary table of the ephemeral copy of the seed data",
"docs": {"node_color": None, "show": True},
@@ -1060,6 +1064,7 @@ def expected_references_manifest(project):
"nodes": ["model.test.ephemeral_summary"],
"public_nodes": [],
},
"deprecation_date": None,
"deferred": False,
"description": "A view of the summary of the ephemeral copy of the seed data",
"docs": {"node_color": None, "show": True},
@@ -1509,6 +1514,7 @@ def expected_versions_manifest(project):
"depends_on": {"macros": [], "nodes": [], "public_nodes": []},
"deferred": False,
"description": "A versioned model",
"deprecation_date": ANY,
"docs": {"node_color": None, "show": True},
"fqn": ["test", "versioned_model", "v1"],
"group": "test_group",
@@ -1579,6 +1585,7 @@ def expected_versions_manifest(project):
"depends_on": {"macros": [], "nodes": [], "public_nodes": []},
"deferred": False,
"description": "A versioned model",
"deprecation_date": None,
"docs": {"node_color": None, "show": True},
"fqn": ["test", "versioned_model", "v2"],
"group": "test_group",
@@ -1630,6 +1637,7 @@ def expected_versions_manifest(project):
],
"public_nodes": [],
},
"deprecation_date": None,
"deferred": False,
"description": "",
"docs": {"node_color": None, "show": True},

View File

@@ -360,6 +360,7 @@ models:
versions:
- v: 1
defined_in: arbitrary_file_name
deprecation_date: 2022-07-11
- v: 2
config:
materialized: view

View File

@@ -0,0 +1,78 @@
import pytest
from dbt.cli.main import dbtRunner
deprecated_model__yml = """
version: 2
models:
- name: my_model
description: deprecated
deprecation_date: 1999-01-01
"""
deprecating_model__yml = """
version: 2
models:
- name: my_model
description: deprecating in the future
deprecation_date: 2999-01-01
"""
model__sql = """
select 1 as Id
"""
dependant_model__sql = """
select * from {{ ref("my_model") }}
"""
class TestModelDeprecationWarning:
@pytest.fixture(scope="class")
def models(self):
return {"my_model.sql": model__sql, "my_schema.yml": deprecated_model__yml}
def test_deprecation_warning(self, project):
events = []
dbtRunner(callbacks=[events.append]).invoke(["parse"])
matches = list([e for e in events if e.info.name == "DeprecatedModel"])
assert len(matches) == 1
assert matches[0].data.model_name == "my_model"
class TestReferenceDeprecatingWarning:
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": model__sql,
"my_dependant_model.sql": dependant_model__sql,
"my_schema.yml": deprecating_model__yml,
}
def test_deprecation_warning(self, project):
events = []
dbtRunner(callbacks=[events.append]).invoke(["parse"])
matches = list([e for e in events if e.info.name == "UpcomingReferenceDeprecation"])
assert len(matches) == 1
assert matches[0].data.model_name == "my_dependant_model"
assert matches[0].data.ref_model_name == "my_model"
class TestReferenceDeprecatedWarning:
@pytest.fixture(scope="class")
def models(self):
return {
"my_model.sql": model__sql,
"my_dependant_model.sql": dependant_model__sql,
"my_schema.yml": deprecated_model__yml,
}
def test_deprecation_warning(self, project):
events = []
dbtRunner(callbacks=[events.append]).invoke(["parse"])
matches = list([e for e in events if e.info.name == "DeprecatedReference"])
assert len(matches) == 1
assert matches[0].data.model_name == "my_dependant_model"
assert matches[0].data.ref_model_name == "my_model"

View File

@@ -233,6 +233,21 @@ sample_values = [
types.UnpinnedRefNewVersionAvailable(
ref_node_name="", ref_node_package="", ref_node_version="", ref_max_version=""
),
types.DeprecatedModel(model_name="", model_version="", deprecation_date=""),
types.DeprecatedReference(
model_name="",
ref_model_name="",
ref_model_package="",
ref_model_deprecation_date="",
ref_model_latest_version="",
),
types.UpcomingReferenceDeprecation(
model_name="",
ref_model_name="",
ref_model_package="",
ref_model_deprecation_date="",
ref_model_latest_version="",
),
# M - Deps generation ======================
types.GitSparseCheckoutSubdirectory(subdir=""),
types.GitProgressCheckoutRevision(revision=""),

View File

@@ -0,0 +1,99 @@
from __future__ import annotations
from typing import Any, Callable, Dict, List, Optional, Tuple
from msgpack.exceptions import (
BufferFull,
ExtraData,
FormatError,
OutOfData,
PackException,
PackOverflowError,
PackValueError,
StackError,
UnpackException,
UnpackValueError,
)
from typing_extensions import Protocol
from msgpack.fallback import Packer, Unpacker, unpackb
from msgpack import exceptions
from msgpack.ext import ExtType
from msgpack import ext
class _Stream(Protocol):
def read(self) -> bytes: ...
class _FileLike(Protocol):
def read(self, n: int) -> bytes: ...
def pack(
o: Any,
stream: _Stream,
default: Optional[Callable[[Any], Any]] = ...,
use_single_float: bool = ...,
autoreset: bool = ...,
use_bin_type: bool = ...,
strict_types: bool = ...,
datetime: bool = ...,
unicode_errors: Optional[str] = ...,
) -> None: ...
def packb(
o: Any,
default: Optional[Callable[[Any], Any]] = ...,
use_single_float: bool = ...,
autoreset: bool = ...,
use_bin_type: bool = ...,
strict_types: bool = ...,
datetime: bool = ...,
unicode_errors: Optional[str] = ...,
) -> bytes: ...
def unpack(
stream: _Stream,
file_like: Optional[_FileLike] = ...,
read_size: int = ...,
use_list: bool = ...,
raw: bool = ...,
timestamp: int = ...,
strict_map_key: bool = ...,
object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ...,
object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
list_hook: Optional[Callable[[List[Any]], Any]] = ...,
unicode_errors: Optional[str] = ...,
max_buffer_size: int = ...,
ext_hook: Callable[[int, bytes], Any] = ...,
max_str_len: int = ...,
max_bin_len: int = ...,
max_array_len: int = ...,
max_map_len: int = ...,
max_ext_len: int = ...,
) -> Any: ...
load = unpack
loads = unpackb
dump = pack
dumps = packb
__all__ = [
"BufferFull",
"ExtType",
"ExtraData",
"FormatError",
"OutOfData",
"PackException",
"PackOverflowError",
"PackValueError",
"Packer",
"StackError",
"UnpackException",
"UnpackValueError",
"Unpacker",
"dump",
"dumps",
"exceptions",
"ext",
"load",
"loads",
"pack",
"packb",
"unpack",
"unpackb",
]

View File

@@ -0,0 +1,3 @@
from typing import Tuple
version: Tuple[int, int, int]

View File

@@ -0,0 +1,16 @@
from typing import Any
class UnpackException(Exception): ...
class BufferFull(UnpackException): ...
class OutOfData(UnpackException): ...
class FormatError(ValueError, UnpackException): ...
class StackError(ValueError, UnpackException): ...
UnpackValueError = ValueError
class ExtraData(UnpackValueError):
def __init__(self, unpacked: Any, exta: Any) -> None: ...
PackException = Exception
PackValueError = ValueError
PackOverflowError = OverflowError

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
from typing import NamedTuple
import datetime
class _ExtType(NamedTuple):
code: int
data: bytes
class ExtType(_ExtType): ...
class TimeStamp:
def __init__(self, seconds: int, nanoseconds: int = ...) -> None: ...
def __eq__(self, o: object) -> bool: ...
def __ne__(self, o: object) -> bool: ...
@staticmethod
def from_bytes(b: bytes) -> TimeStamp: ...
@staticmethod
def to_bytes(self) -> bytes: ...
@staticmethod
def from_unix(self, unix_sec: float) -> TimeStamp: ...
def to_unix(self) -> float: ...
@staticmethod
def from_unix_nano(unix_ns: int) -> TimeStamp: ...
@staticmethod
def to_unix_nano(self) -> int: ...
def to_datetime(self) -> datetime.datetime: ...
@staticmethod
def from_datetime(dt: datetime.datetime) -> TimeStamp: ...

View File

@@ -0,0 +1,78 @@
from __future__ import annotations
from typing import Any, Callable, Dict, List, Optional, Tuple
from typing_extensions import Protocol
class _FileLike(Protocol):
def read(self, n: int) -> bytes: ...
def unpackb(
packed: bytes,
file_like: Optional[_FileLike] = ...,
read_size: int = ...,
use_list: bool = ...,
raw: bool = ...,
timestamp: int = ...,
strict_map_key: bool = ...,
object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ...,
object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
unicode_errors: Optional[str] = ...,
max_buffer_size: int = ...,
ext_hook: Callable[[int, bytes], Any] = ...,
max_str_len: int = ...,
max_bin_len: int = ...,
max_array_len: int = ...,
max_map_len: int = ...,
max_ext_len: int = ...,
) -> Any: ...
class Unpacker:
def __init__(
self,
file_like: Optional[_FileLike] = ...,
read_size: int = ...,
use_list: bool = ...,
raw: bool = ...,
timestamp: int = ...,
strict_map_key: bool = ...,
object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ...,
object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ...,
unicode_errors: Optional[str] = ...,
max_buffer_size: int = ...,
ext_hook: Callable[[int, bytes], Any] = ...,
max_str_len: int = ...,
max_bin_len: int = ...,
max_array_len: int = ...,
max_map_len: int = ...,
max_ext_len: int = ...,
): ...
def feed(self, next_bytes: bytes) -> None: ...
def read_bytes(self, n: int) -> bytearray: ...
def __iter__(self) -> Unpacker: ...
def __next__(self) -> Any: ...
def next(self) -> Any: ...
def skip(self) -> None: ...
def unpack(self) -> Any: ...
def read_array_header(self) -> Any: ...
def read_map_header(self) -> Any: ...
def tell(self) -> int: ...
class Packer:
def __init__(
self,
default: Optional[Callable[[Any], Any]] = ...,
use_single_float: bool = ...,
autoreset: bool = ...,
use_bin_type: bool = ...,
strict_types: bool = ...,
datetime: bool = ...,
unicode_errors: Optional[str] = ...,
): ...
def pack(self, obj: Any) -> bytes: ...
def pack_map_pairs(self, pairs: Any) -> bytes: ...
def pack_array_header(self, n: int) -> bytes: ...
def pack_map_header(self, n: int) -> bytes: ...
def pack_ext_type(self, typecode: int, data: bytes) -> None: ...
def bytes(self) -> bytes: ...
def reset(self) -> None: ...
def getbuffer(self) -> memoryview: ...