Compare commits

...

2 Commits

Author SHA1 Message Date
Gerda Shank
09529aa534 point to Mashumaro branch without Python 3.6 fix 2021-02-24 12:02:31 -05:00
Gerda Shank
14f06c0f7c Use updated Mashumaro code 2021-02-24 11:59:44 -05:00
42 changed files with 157 additions and 141 deletions

View File

@@ -45,7 +45,7 @@ class BaseRelation(FakeAPIObject, Hashable):
def __eq__(self, other): def __eq__(self, other):
if not isinstance(other, self.__class__): if not isinstance(other, self.__class__):
return False return False
return self.to_dict() == other.to_dict() return self.to_dict(omit_none=True) == other.to_dict(omit_none=True)
@classmethod @classmethod
def get_default_quote_policy(cls) -> Policy: def get_default_quote_policy(cls) -> Policy:
@@ -185,10 +185,10 @@ class BaseRelation(FakeAPIObject, Hashable):
def create_from_source( def create_from_source(
cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any cls: Type[Self], source: ParsedSourceDefinition, **kwargs: Any
) -> Self: ) -> Self:
source_quoting = source.quoting.to_dict() source_quoting = source.quoting.to_dict(omit_none=True)
source_quoting.pop('column', None) source_quoting.pop('column', None)
quote_policy = deep_merge( quote_policy = deep_merge(
cls.get_default_quote_policy().to_dict(), cls.get_default_quote_policy().to_dict(omit_none=True),
source_quoting, source_quoting,
kwargs.get('quote_policy', {}), kwargs.get('quote_policy', {}),
) )

View File

@@ -138,7 +138,7 @@ class Linker:
""" """
out_graph = self.graph.copy() out_graph = self.graph.copy()
for node_id in self.graph.nodes(): for node_id in self.graph.nodes():
data = manifest.expect(node_id).to_dict() data = manifest.expect(node_id).to_dict(omit_none=True)
out_graph.add_node(node_id, **data) out_graph.add_node(node_id, **data)
nx.write_gpickle(out_graph, outfile) nx.write_gpickle(out_graph, outfile)
@@ -339,7 +339,7 @@ class Compiler:
model.compiled_sql = injected_sql model.compiled_sql = injected_sql
model.extra_ctes_injected = True model.extra_ctes_injected = True
model.extra_ctes = prepended_ctes model.extra_ctes = prepended_ctes
model.validate(model.to_dict()) model.validate(model.to_dict(omit_none=True))
manifest.update_node(model) manifest.update_node(model)
@@ -388,7 +388,7 @@ class Compiler:
logger.debug("Compiling {}".format(node.unique_id)) logger.debug("Compiling {}".format(node.unique_id))
data = node.to_dict() data = node.to_dict(omit_none=True)
data.update({ data.update({
'compiled': False, 'compiled': False,
'compiled_sql': None, 'compiled_sql': None,

View File

@@ -111,8 +111,8 @@ class Profile(HasCredentials):
'credentials': self.credentials, 'credentials': self.credentials,
} }
if serialize_credentials: if serialize_credentials:
result['config'] = self.config.to_dict() result['config'] = self.config.to_dict(omit_none=True)
result['credentials'] = self.credentials.to_dict() result['credentials'] = self.credentials.to_dict(omit_none=True)
return result return result
def to_target_dict(self) -> Dict[str, Any]: def to_target_dict(self) -> Dict[str, Any]:
@@ -125,7 +125,7 @@ class Profile(HasCredentials):
'name': self.target_name, 'name': self.target_name,
'target_name': self.target_name, 'target_name': self.target_name,
'profile_name': self.profile_name, 'profile_name': self.profile_name,
'config': self.config.to_dict(), 'config': self.config.to_dict(omit_none=True),
}) })
return target return target
@@ -138,7 +138,7 @@ class Profile(HasCredentials):
def validate(self): def validate(self):
try: try:
if self.credentials: if self.credentials:
dct = self.credentials.to_dict() dct = self.credentials.to_dict(omit_none=True)
self.credentials.validate(dct) self.credentials.validate(dct)
dct = self.to_profile_info(serialize_credentials=True) dct = self.to_profile_info(serialize_credentials=True)
ProfileConfig.validate(dct) ProfileConfig.validate(dct)

View File

@@ -347,7 +347,7 @@ class PartialProject(RenderComponents):
# break many things # break many things
quoting: Dict[str, Any] = {} quoting: Dict[str, Any] = {}
if cfg.quoting is not None: if cfg.quoting is not None:
quoting = cfg.quoting.to_dict() quoting = cfg.quoting.to_dict(omit_none=True)
models: Dict[str, Any] models: Dict[str, Any]
seeds: Dict[str, Any] seeds: Dict[str, Any]
@@ -578,10 +578,11 @@ class Project:
'config-version': self.config_version, 'config-version': self.config_version,
}) })
if self.query_comment: if self.query_comment:
result['query-comment'] = self.query_comment.to_dict() result['query-comment'] = \
self.query_comment.to_dict(omit_none=True)
if with_packages: if with_packages:
result.update(self.packages.to_dict()) result.update(self.packages.to_dict(omit_none=True))
return result return result

View File

@@ -78,7 +78,7 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
get_relation_class_by_name(profile.credentials.type) get_relation_class_by_name(profile.credentials.type)
.get_default_quote_policy() .get_default_quote_policy()
.replace_dict(_project_quoting_dict(project, profile)) .replace_dict(_project_quoting_dict(project, profile))
).to_dict() ).to_dict(omit_none=True)
cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}')) cli_vars: Dict[str, Any] = parse_cli_vars(getattr(args, 'vars', '{}'))
@@ -391,7 +391,7 @@ class UnsetConfig(UserConfig):
f"'UnsetConfig' object has no attribute {name}" f"'UnsetConfig' object has no attribute {name}"
) )
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
return {} return {}

View File

@@ -538,4 +538,5 @@ class BaseContext(metaclass=ContextMeta):
def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]: def generate_base_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]:
ctx = BaseContext(cli_vars) ctx = BaseContext(cli_vars)
# This is not a Mashumaro to_dict call
return ctx.to_dict() return ctx.to_dict()

View File

@@ -196,7 +196,7 @@ class ContextConfigGenerator(BaseContextConfigGenerator[C]):
base=base, base=base,
) )
finalized = config.finalize_and_validate() finalized = config.finalize_and_validate()
return finalized.to_dict() return finalized.to_dict(omit_none=True)
class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]): class UnrenderedConfigGenerator(BaseContextConfigGenerator[Dict[str, Any]]):

View File

@@ -77,4 +77,5 @@ def generate_runtime_docs(
current_project: str, current_project: str,
) -> Dict[str, Any]: ) -> Dict[str, Any]:
ctx = DocsRuntimeContext(config, target, manifest, current_project) ctx = DocsRuntimeContext(config, target, manifest, current_project)
# This is not a Mashumaro to_dict call
return ctx.to_dict() return ctx.to_dict()

View File

@@ -1115,7 +1115,7 @@ class ProviderContext(ManifestContext):
@contextproperty('model') @contextproperty('model')
def ctx_model(self) -> Dict[str, Any]: def ctx_model(self) -> Dict[str, Any]:
return self.model.to_dict() return self.model.to_dict(omit_none=True)
@contextproperty @contextproperty
def pre_hooks(self) -> Optional[List[Dict[str, Any]]]: def pre_hooks(self) -> Optional[List[Dict[str, Any]]]:
@@ -1231,7 +1231,7 @@ class ModelContext(ProviderContext):
if isinstance(self.model, ParsedSourceDefinition): if isinstance(self.model, ParsedSourceDefinition):
return [] return []
return [ return [
h.to_dict() for h in self.model.config.pre_hook h.to_dict(omit_none=True) for h in self.model.config.pre_hook
] ]
@contextproperty @contextproperty
@@ -1239,7 +1239,7 @@ class ModelContext(ProviderContext):
if isinstance(self.model, ParsedSourceDefinition): if isinstance(self.model, ParsedSourceDefinition):
return [] return []
return [ return [
h.to_dict() for h in self.model.config.post_hook h.to_dict(omit_none=True) for h in self.model.config.post_hook
] ]
@contextproperty @contextproperty

View File

@@ -132,7 +132,7 @@ class Credentials(
) -> Iterable[Tuple[str, Any]]: ) -> Iterable[Tuple[str, Any]]:
"""Return an ordered iterator of key/value pairs for pretty-printing. """Return an ordered iterator of key/value pairs for pretty-printing.
""" """
as_dict = self.to_dict(options={'keep_none': True}) as_dict = self.to_dict(omit_none=False)
connection_keys = set(self._connection_keys()) connection_keys = set(self._connection_keys())
aliases: List[str] = [] aliases: List[str] = []
if with_aliases: if with_aliases:
@@ -148,8 +148,8 @@ class Credentials(
raise NotImplementedError raise NotImplementedError
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
data = cls.translate_aliases(data) data = cls.translate_aliases(data)
return data return data
@@ -159,7 +159,7 @@ class Credentials(
) -> Dict[str, Any]: ) -> Dict[str, Any]:
return translate_aliases(kwargs, cls._ALIASES, recurse) return translate_aliases(kwargs, cls._ALIASES, recurse)
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
# no super() -- do we need it? # no super() -- do we need it?
if self._ALIASES: if self._ALIASES:
dct.update({ dct.update({

View File

@@ -178,7 +178,7 @@ def parsed_instance_for(compiled: CompiledNode) -> ParsedResource:
raise ValueError('invalid resource_type: {}' raise ValueError('invalid resource_type: {}'
.format(compiled.resource_type)) .format(compiled.resource_type))
return cls.from_dict(compiled.to_dict()) return cls.from_dict(compiled.to_dict(omit_none=True))
NonSourceCompiledNode = Union[ NonSourceCompiledNode = Union[

View File

@@ -240,7 +240,7 @@ def build_edges(nodes: List[ManifestNode]):
def _deepcopy(value): def _deepcopy(value):
return value.from_dict(value.to_dict()) return value.from_dict(value.to_dict(omit_none=True))
class Locality(enum.IntEnum): class Locality(enum.IntEnum):
@@ -564,11 +564,11 @@ class Manifest(MacroMethods):
""" """
self.flat_graph = { self.flat_graph = {
'nodes': { 'nodes': {
k: v.to_dict(options={'keep_none': True}) k: v.to_dict(omit_none=False)
for k, v in self.nodes.items() for k, v in self.nodes.items()
}, },
'sources': { 'sources': {
k: v.to_dict(options={'keep_none': True}) k: v.to_dict(omit_none=False)
for k, v in self.sources.items() for k, v in self.sources.items()
} }
} }
@@ -755,7 +755,7 @@ class Manifest(MacroMethods):
# When 'to_dict' is called on the Manifest, it substitues a # When 'to_dict' is called on the Manifest, it substitues a
# WritableManifest # WritableManifest
def __pre_serialize__(self, options=None): def __pre_serialize__(self):
return self.writable_manifest() return self.writable_manifest()
def write(self, path): def write(self, path):

View File

@@ -307,7 +307,7 @@ class BaseConfig(
""" """
# sadly, this is a circular import # sadly, this is a circular import
from dbt.adapters.factory import get_config_class_by_name from dbt.adapters.factory import get_config_class_by_name
dct = self.to_dict(options={'keep_none': True}) dct = self.to_dict(omit_none=False)
adapter_config_cls = get_config_class_by_name(adapter_type) adapter_config_cls = get_config_class_by_name(adapter_type)
@@ -326,12 +326,12 @@ class BaseConfig(
return self.from_dict(dct) return self.from_dict(dct)
def finalize_and_validate(self: T) -> T: def finalize_and_validate(self: T) -> T:
dct = self.to_dict(options={'keep_none': True}) dct = self.to_dict(omit_none=False)
self.validate(dct) self.validate(dct)
return self.from_dict(dct) return self.from_dict(dct)
def replace(self, **kwargs): def replace(self, **kwargs):
dct = self.to_dict() dct = self.to_dict(omit_none=True)
mapping = self.field_mapping() mapping = self.field_mapping()
for key, value in kwargs.items(): for key, value in kwargs.items():
@@ -396,8 +396,8 @@ class NodeConfig(BaseConfig):
full_refresh: Optional[bool] = None full_refresh: Optional[bool] = None
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
field_map = {'post-hook': 'post_hook', 'pre-hook': 'pre_hook'} field_map = {'post-hook': 'post_hook', 'pre-hook': 'pre_hook'}
# create a new dict because otherwise it gets overwritten in # create a new dict because otherwise it gets overwritten in
# tests # tests
@@ -414,8 +414,8 @@ class NodeConfig(BaseConfig):
data[new_name] = data.pop(field_name) data[new_name] = data.pop(field_name)
return data return data
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
dct = super().__post_serialize__(dct, options=options) dct = super().__post_serialize__(dct)
field_map = {'post_hook': 'post-hook', 'pre_hook': 'pre-hook'} field_map = {'post_hook': 'post-hook', 'pre_hook': 'pre-hook'}
for field_name in field_map: for field_name in field_map:
if field_name in dct: if field_name in dct:
@@ -480,7 +480,7 @@ class SnapshotConfig(EmptySnapshotConfig):
# formerly supported with GenericSnapshotConfig # formerly supported with GenericSnapshotConfig
def finalize_and_validate(self): def finalize_and_validate(self):
data = self.to_dict() data = self.to_dict(omit_none=True)
self.validate(data) self.validate(data)
return self.from_dict(data) return self.from_dict(data)

View File

@@ -99,8 +99,8 @@ class HasRelationMetadata(dbtClassMixin, Replaceable):
# because it messes up the subclasses and default parameters # because it messes up the subclasses and default parameters
# so hack it here # so hack it here
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
if 'database' not in data: if 'database' not in data:
data['database'] = None data['database'] = None
return data return data
@@ -141,7 +141,7 @@ class ParsedNodeMixins(dbtClassMixin):
# Maybe there should be validation or restrictions # Maybe there should be validation or restrictions
# elsewhere? # elsewhere?
assert isinstance(self, dbtClassMixin) assert isinstance(self, dbtClassMixin)
dct = self.to_dict(options={'keep_none': True}) dct = self.to_dict(omit_none=False)
self.validate(dct) self.validate(dct)
def get_materialization(self): def get_materialization(self):
@@ -454,7 +454,7 @@ class ParsedMacro(UnparsedBaseNode, HasUniqueID):
if flags.STRICT_MODE: if flags.STRICT_MODE:
# What does this actually validate? # What does this actually validate?
assert isinstance(self, dbtClassMixin) assert isinstance(self, dbtClassMixin)
dct = self.to_dict(options={'keep_none': True}) dct = self.to_dict(omit_none=False)
self.validate(dct) self.validate(dct)
def same_contents(self, other: Optional['ParsedMacro']) -> bool: def same_contents(self, other: Optional['ParsedMacro']) -> bool:

View File

@@ -231,12 +231,9 @@ class UnparsedSourceTableDefinition(HasColumnTests, HasTests):
external: Optional[ExternalTable] = None external: Optional[ExternalTable] = None
tags: List[str] = field(default_factory=list) tags: List[str] = field(default_factory=list)
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
dct = super().__post_serialize__(dct) dct = super().__post_serialize__(dct)
keep_none = False if 'freshness' not in dct and self.freshness is None:
if options and 'keep_none' in options and options['keep_none']:
keep_none = True
if not keep_none and self.freshness is None:
dct['freshness'] = None dct['freshness'] = None
return dct return dct
@@ -261,12 +258,9 @@ class UnparsedSourceDefinition(dbtClassMixin, Replaceable):
def yaml_key(self) -> 'str': def yaml_key(self) -> 'str':
return 'sources' return 'sources'
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
dct = super().__post_serialize__(dct) dct = super().__post_serialize__(dct)
keep_none = False if 'freshnewss' not in dct and self.freshness is None:
if options and 'keep_none' in options and options['keep_none']:
keep_none = True
if not keep_none and self.freshness is None:
dct['freshness'] = None dct['freshness'] = None
return dct return dct
@@ -290,7 +284,7 @@ class SourceTablePatch(dbtClassMixin):
columns: Optional[Sequence[UnparsedColumn]] = None columns: Optional[Sequence[UnparsedColumn]] = None
def to_patch_dict(self) -> Dict[str, Any]: def to_patch_dict(self) -> Dict[str, Any]:
dct = self.to_dict() dct = self.to_dict(omit_none=True)
remove_keys = ('name') remove_keys = ('name')
for key in remove_keys: for key in remove_keys:
if key in dct: if key in dct:
@@ -327,7 +321,7 @@ class SourcePatch(dbtClassMixin, Replaceable):
tags: Optional[List[str]] = None tags: Optional[List[str]] = None
def to_patch_dict(self) -> Dict[str, Any]: def to_patch_dict(self) -> Dict[str, Any]:
dct = self.to_dict() dct = self.to_dict(omit_none=True)
remove_keys = ('name', 'overrides', 'tables', 'path') remove_keys = ('name', 'overrides', 'tables', 'path')
for key in remove_keys: for key in remove_keys:
if key in dct: if key in dct:

View File

@@ -52,7 +52,7 @@ class FakeAPIObject(dbtClassMixin, Replaceable, Mapping):
return len(fields(self.__class__)) return len(fields(self.__class__))
def incorporate(self, **kwargs): def incorporate(self, **kwargs):
value = self.to_dict() value = self.to_dict(omit_none=True)
value = deep_merge(value, kwargs) value = deep_merge(value, kwargs)
return self.from_dict(value) return self.from_dict(value)

View File

@@ -97,8 +97,8 @@ class BaseResult(dbtClassMixin):
message: Optional[Union[str, int]] message: Optional[Union[str, int]]
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
if 'message' not in data: if 'message' not in data:
data['message'] = None data['message'] = None
return data return data
@@ -206,7 +206,7 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
) )
def write(self, path: str): def write(self, path: str):
write_json(path, self.to_dict(options={'keep_none': True})) write_json(path, self.to_dict(omit_none=False))
@dataclass @dataclass
@@ -448,8 +448,8 @@ class CatalogResults(dbtClassMixin):
errors: Optional[List[str]] = None errors: Optional[List[str]] = None
_compile_results: Optional[Any] = None _compile_results: Optional[Any] = None
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
dct = super().__post_serialize__(dct, options=options) dct = super().__post_serialize__(dct)
if '_compile_results' in dct: if '_compile_results' in dct:
del dct['_compile_results'] del dct['_compile_results']
return dct return dct

View File

@@ -38,8 +38,8 @@ class RPCParameters(dbtClassMixin):
timeout: Optional[float] timeout: Optional[float]
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data, omit_none=True):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
if 'timeout' not in data: if 'timeout' not in data:
data['timeout'] = None data['timeout'] = None
if 'task_tags' not in data: if 'task_tags' not in data:
@@ -428,8 +428,8 @@ class TaskTiming(dbtClassMixin):
# These ought to be defaults but superclass order doesn't # These ought to be defaults but superclass order doesn't
# allow that to work # allow that to work
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
for field_name in ('start', 'end', 'elapsed'): for field_name in ('start', 'end', 'elapsed'):
if field_name not in data: if field_name not in data:
data[field_name] = None data[field_name] = None
@@ -496,8 +496,8 @@ class PollResult(RemoteResult, TaskTiming):
# These ought to be defaults but superclass order doesn't # These ought to be defaults but superclass order doesn't
# allow that to work # allow that to work
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
for field_name in ('start', 'end', 'elapsed'): for field_name in ('start', 'end', 'elapsed'):
if field_name not in data: if field_name not in data:
data[field_name] = None data[field_name] = None

View File

@@ -58,7 +58,7 @@ class Mergeable(Replaceable):
class Writable: class Writable:
def write(self, path: str): def write(self, path: str):
write_json( write_json(
path, self.to_dict(options={'keep_none': True}) # type: ignore path, self.to_dict(omit_none=False) # type: ignore
) )
@@ -74,7 +74,7 @@ class AdditionalPropertiesMixin:
# not in the class definitions and puts them in an # not in the class definitions and puts them in an
# _extra dict in the class # _extra dict in the class
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
# dir() did not work because fields with # dir() did not work because fields with
# metadata settings are not found # metadata settings are not found
# The original version of this would create the # The original version of this would create the
@@ -93,18 +93,18 @@ class AdditionalPropertiesMixin:
else: else:
new_dict[key] = value new_dict[key] = value
data = new_dict data = new_dict
data = super().__pre_deserialize__(data, options=options) data = super().__pre_deserialize__(data)
return data return data
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
data = super().__post_serialize__(dct, options=options) data = super().__post_serialize__(dct)
data.update(self.extra) data.update(self.extra)
if '_extra' in data: if '_extra' in data:
del data['_extra'] del data['_extra']
return data return data
def replace(self, **kwargs): def replace(self, **kwargs):
dct = self.to_dict(options={'keep_none': True}) dct = self.to_dict(omit_none=False)
dct.update(kwargs) dct.update(kwargs)
return self.from_dict(dct) return self.from_dict(dct)

View File

@@ -1,5 +1,5 @@
from typing import ( from typing import (
Type, ClassVar, Dict, cast, TypeVar Type, ClassVar, cast,
) )
import re import re
from dataclasses import fields from dataclasses import fields
@@ -9,29 +9,28 @@ from dateutil.parser import parse
from hologram import JsonSchemaMixin, FieldEncoder, ValidationError from hologram import JsonSchemaMixin, FieldEncoder, ValidationError
# type: ignore
from mashumaro import DataClassDictMixin from mashumaro import DataClassDictMixin
from mashumaro.types import SerializableEncoder, SerializableType from mashumaro.config import (
TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig as MashBaseConfig
)
from mashumaro.types import SerializableType, SerializationStrategy
class DateTimeSerializableEncoder(SerializableEncoder[datetime]): class DateTimeSerialization(SerializationStrategy):
@classmethod def serialize(self, value):
def _serialize(cls, value: datetime) -> str:
out = value.isoformat() out = value.isoformat()
# Assume UTC if timezone is missing # Assume UTC if timezone is missing
if value.tzinfo is None: if value.tzinfo is None:
out = out + "Z" out = out + "Z"
return out return out
@classmethod def deserialize(self, value):
def _deserialize(cls, value: str) -> datetime:
return ( return (
value if isinstance(value, datetime) else parse(cast(str, value)) value if isinstance(value, datetime) else parse(cast(str, value))
) )
TV = TypeVar("TV")
# This class pulls in both JsonSchemaMixin from Hologram and # This class pulls in both JsonSchemaMixin from Hologram and
# DataClassDictMixin from our fork of Mashumaro. The 'to_dict' # DataClassDictMixin from our fork of Mashumaro. The 'to_dict'
# and 'from_dict' methods come from Mashumaro. Building # and 'from_dict' methods come from Mashumaro. Building
@@ -43,23 +42,21 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
against the schema against the schema
""" """
_serializable_encoders: ClassVar[Dict[str, SerializableEncoder]] = { class Config(MashBaseConfig):
'datetime.datetime': DateTimeSerializableEncoder(), code_generation_options = [
} TO_DICT_ADD_OMIT_NONE_FLAG,
]
serialization_strategy = {
datetime: DateTimeSerialization(),
}
_hyphenated: ClassVar[bool] = False _hyphenated: ClassVar[bool] = False
ADDITIONAL_PROPERTIES: ClassVar[bool] = False ADDITIONAL_PROPERTIES: ClassVar[bool] = False
# This is called by the mashumaro to_dict in order to handle # This is called by the mashumaro to_dict in order to handle
# nested classes. # nested classes.
# Munges the dict that's returned. # Munges the dict that's returned.
def __post_serialize__(self, dct, options=None): def __post_serialize__(self, dct):
keep_none = False
if options and 'keep_none' in options and options['keep_none']:
keep_none = True
if not keep_none: # remove attributes that are None
new_dict = {k: v for k, v in dct.items() if v is not None}
dct = new_dict
if self._hyphenated: if self._hyphenated:
new_dict = {} new_dict = {}
for key in dct: for key in dct:
@@ -75,7 +72,7 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
# This is called by the mashumaro _from_dict method, before # This is called by the mashumaro _from_dict method, before
# performing the conversion to a dict # performing the conversion to a dict
@classmethod @classmethod
def __pre_deserialize__(cls, data, options=None): def __pre_deserialize__(cls, data):
if cls._hyphenated: if cls._hyphenated:
new_dict = {} new_dict = {}
for key in data: for key in data:
@@ -93,7 +90,7 @@ class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin):
def _local_to_dict(self, **kwargs): def _local_to_dict(self, **kwargs):
args = {} args = {}
if 'omit_none' in kwargs and kwargs['omit_none'] is False: if 'omit_none' in kwargs and kwargs['omit_none'] is False:
args['options'] = {'keep_none': True} args['omit_none'] = False
return self.to_dict(**args) return self.to_dict(**args)

View File

@@ -95,7 +95,8 @@ class JsonFormatter(LogMessageFormatter):
# utils imports exceptions which imports logger... # utils imports exceptions which imports logger...
import dbt.utils import dbt.utils
log_message = super().__call__(record, handler) log_message = super().__call__(record, handler)
return json.dumps(log_message.to_dict(), cls=dbt.utils.JSONEncoder) dct = log_message.to_dict(omit_none=True)
return json.dumps(dct, cls=dbt.utils.JSONEncoder)
class FormatterMixin: class FormatterMixin:
@@ -127,6 +128,7 @@ class OutputHandler(logbook.StreamHandler, FormatterMixin):
The `format_string` parameter only changes the default text output, not The `format_string` parameter only changes the default text output, not
debug mode or json. debug mode or json.
""" """
def __init__( def __init__(
self, self,
stream, stream,
@@ -220,7 +222,8 @@ class TimingProcessor(logbook.Processor):
def process(self, record): def process(self, record):
if self.timing_info is not None: if self.timing_info is not None:
record.extra['timing_info'] = self.timing_info.to_dict() record.extra['timing_info'] = self.timing_info.to_dict(
omit_none=True)
class DbtProcessState(logbook.Processor): class DbtProcessState(logbook.Processor):
@@ -349,6 +352,7 @@ def make_log_dir_if_missing(log_dir):
class DebugWarnings(logbook.compat.redirected_warnings): class DebugWarnings(logbook.compat.redirected_warnings):
"""Log warnings, except send them to 'debug' instead of 'warning' level. """Log warnings, except send them to 'debug' instead of 'warning' level.
""" """
def make_record(self, message, exception, filename, lineno): def make_record(self, message, exception, filename, lineno):
rv = super().make_record(message, exception, filename, lineno) rv = super().make_record(message, exception, filename, lineno)
rv.level = logbook.DEBUG rv.level = logbook.DEBUG

View File

@@ -252,7 +252,7 @@ class ConfiguredParser(
'raw_sql': block.contents, 'raw_sql': block.contents,
'unique_id': self.generate_unique_id(name), 'unique_id': self.generate_unique_id(name),
'config': self.config_dict(config), 'config': self.config_dict(config),
'checksum': block.file.checksum.to_dict(), 'checksum': block.file.checksum.to_dict(omit_none=True),
} }
dct.update(kwargs) dct.update(kwargs)
try: try:
@@ -301,7 +301,7 @@ class ConfiguredParser(
self, parsed_node: IntermediateNode, config_dict: Dict[str, Any] self, parsed_node: IntermediateNode, config_dict: Dict[str, Any]
) -> None: ) -> None:
# Overwrite node config # Overwrite node config
final_config_dict = parsed_node.config.to_dict() final_config_dict = parsed_node.config.to_dict(omit_none=True)
final_config_dict.update(config_dict) final_config_dict.update(config_dict)
# re-mangle hooks, in case we got new ones # re-mangle hooks, in case we got new ones
self._mangle_hooks(final_config_dict) self._mangle_hooks(final_config_dict)

View File

@@ -385,7 +385,7 @@ class SchemaParser(SimpleParser[SchemaTestBlock, ParsedSchemaTestNode]):
'config': self.config_dict(config), 'config': self.config_dict(config),
'test_metadata': test_metadata, 'test_metadata': test_metadata,
'column_name': column_name, 'column_name': column_name,
'checksum': FileHash.empty().to_dict(), 'checksum': FileHash.empty().to_dict(omit_none=True),
} }
try: try:
ParsedSchemaTestNode.validate(dct) ParsedSchemaTestNode.validate(dct)

View File

@@ -68,7 +68,8 @@ class SnapshotParser(
def transform(self, node: IntermediateSnapshotNode) -> ParsedSnapshotNode: def transform(self, node: IntermediateSnapshotNode) -> ParsedSnapshotNode:
try: try:
parsed_node = ParsedSnapshotNode.from_dict(node.to_dict()) dct = node.to_dict(omit_none=True)
parsed_node = ParsedSnapshotNode.from_dict(dct)
self.set_snapshot_attributes(parsed_node) self.set_snapshot_attributes(parsed_node)
return parsed_node return parsed_node
except ValidationError as exc: except ValidationError as exc:

View File

@@ -49,8 +49,8 @@ class SourcePatcher:
if patch is None: if patch is None:
return unpatched return unpatched
source_dct = unpatched.source.to_dict() source_dct = unpatched.source.to_dict(omit_none=True)
table_dct = unpatched.table.to_dict() table_dct = unpatched.table.to_dict(omit_none=True)
patch_path: Optional[Path] = None patch_path: Optional[Path] = None
source_table_patch: Optional[SourceTablePatch] = None source_table_patch: Optional[SourceTablePatch] = None

View File

@@ -177,7 +177,7 @@ def poll_complete(
def _dict_logs(logs: List[LogMessage]) -> List[Dict[str, Any]]: def _dict_logs(logs: List[LogMessage]) -> List[Dict[str, Any]]:
return [log.to_dict() for log in logs] return [log.to_dict(omit_none=True) for log in logs]
class Poll(RemoteBuiltinMethod[PollParameters, PollResult]): class Poll(RemoteBuiltinMethod[PollParameters, PollResult]):

View File

@@ -97,7 +97,7 @@ class ResponseManager(JSONRPCResponseManager):
# Note: errors in to_dict do not show up anywhere in # Note: errors in to_dict do not show up anywhere in
# the output and all you get is a generic 500 error # the output and all you get is a generic 500 error
output.result = \ output.result = \
output.result.to_dict(options={'keep_none': True}) output.result.to_dict(omit_none=False)
yield output yield output
@classmethod @classmethod

View File

@@ -391,7 +391,7 @@ class RequestTaskHandler(threading.Thread, TaskHandlerProtocol):
except RPCException as exc: except RPCException as exc:
# RPC Exceptions come already preserialized for the jsonrpc # RPC Exceptions come already preserialized for the jsonrpc
# framework # framework
exc.logs = [log.to_dict() for log in self.logs] exc.logs = [log.to_dict(omit_none=True) for log in self.logs]
exc.tags = self.tags exc.tags = self.tags
raise raise

View File

@@ -114,8 +114,8 @@ class Catalog(Dict[CatalogKey, CatalogTable]):
if unique_id in sources: if unique_id in sources:
dbt.exceptions.raise_ambiguous_catalog_match( dbt.exceptions.raise_ambiguous_catalog_match(
unique_id, unique_id,
sources[unique_id].to_dict(), sources[unique_id].to_dict(omit_none=True),
table.to_dict(), table.to_dict(omit_none=True),
) )
else: else:
sources[unique_id] = table.replace(unique_id=unique_id) sources[unique_id] = table.replace(unique_id=unique_id)

View File

@@ -110,7 +110,7 @@ class ListTask(GraphRunnableTask):
for node in self._iterate_selected_nodes(): for node in self._iterate_selected_nodes():
yield json.dumps({ yield json.dumps({
k: v k: v
for k, v in node.to_dict(options={'keep_none': True}).items() for k, v in node.to_dict(omit_none=False).items()
if k in self.ALLOWED_KEYS if k in self.ALLOWED_KEYS
}) })

View File

@@ -169,7 +169,7 @@ def print_snapshot_result_line(
info, status, logger_fn = get_printable_result( info, status, logger_fn = get_printable_result(
result, 'snapshotted', 'snapshotting') result, 'snapshotted', 'snapshotting')
cfg = model.config.to_dict() cfg = model.config.to_dict(omit_none=True)
msg = "{info} {description}".format( msg = "{info} {description}".format(
info=info, description=description, **cfg) info=info, description=description, **cfg)

View File

@@ -117,7 +117,7 @@ def track_model_run(index, num_nodes, run_model_result):
"hashed_contents": utils.get_hashed_contents( "hashed_contents": utils.get_hashed_contents(
run_model_result.node run_model_result.node
), ),
"timing": [t.to_dict() for t in run_model_result.timing], "timing": [t.to_dict(omit_none=True) for t in run_model_result.timing],
}) })
@@ -193,7 +193,7 @@ class ModelRunner(CompileRunner):
result = context['load_result']('main') result = context['load_result']('main')
adapter_response = {} adapter_response = {}
if isinstance(result.response, dbtClassMixin): if isinstance(result.response, dbtClassMixin):
adapter_response = result.response.to_dict() adapter_response = result.response.to_dict(omit_none=True)
return RunResult( return RunResult(
node=model, node=model,
status=RunStatus.Success, status=RunStatus.Success,

View File

@@ -320,7 +320,7 @@ class JSONEncoder(json.JSONEncoder):
if hasattr(obj, 'to_dict'): if hasattr(obj, 'to_dict'):
# if we have a to_dict we should try to serialize the result of # if we have a to_dict we should try to serialize the result of
# that! # that!
return obj.to_dict() return obj.to_dict(omit_none=True)
return super().default(obj) return super().default(obj)

View File

@@ -13,5 +13,5 @@ mypy==0.782
wheel wheel
twine twine
pytest-logbook>=1.2.0,<1.3 pytest-logbook>=1.2.0,<1.3
git+https://github.com/fishtown-analytics/dbt-mashumaro.git@dbt-customizations git+https://github.com/fishtown-analytics/dbt-mashumaro.git@no-python3.6-fix
jsonschema jsonschema

View File

@@ -659,7 +659,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
self.assertEqual( self.assertEqual(
adapter.parse_partition_by({ adapter.parse_partition_by({
"field": "ts", "field": "ts",
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "date", "data_type": "date",
"granularity": "day" "granularity": "day"
@@ -670,7 +670,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
adapter.parse_partition_by({ adapter.parse_partition_by({
"field": "ts", "field": "ts",
"data_type": "date", "data_type": "date",
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "date", "data_type": "date",
"granularity": "day" "granularity": "day"
@@ -683,7 +683,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "date", "data_type": "date",
"granularity": "MONTH" "granularity": "MONTH"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "date", "data_type": "date",
"granularity": "MONTH" "granularity": "MONTH"
@@ -696,7 +696,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "date", "data_type": "date",
"granularity": "YEAR" "granularity": "YEAR"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "date", "data_type": "date",
"granularity": "YEAR" "granularity": "YEAR"
@@ -709,7 +709,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "HOUR" "granularity": "HOUR"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "HOUR" "granularity": "HOUR"
@@ -722,7 +722,8 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "MONTH" "granularity": "MONTH"
}).to_dict(), { }).to_dict(omit_none=True
), {
"field": "ts", "field": "ts",
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "MONTH" "granularity": "MONTH"
@@ -735,7 +736,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "YEAR" "granularity": "YEAR"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "timestamp", "data_type": "timestamp",
"granularity": "YEAR" "granularity": "YEAR"
@@ -748,7 +749,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "datetime", "data_type": "datetime",
"granularity": "HOUR" "granularity": "HOUR"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "datetime", "data_type": "datetime",
"granularity": "HOUR" "granularity": "HOUR"
@@ -761,7 +762,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "datetime", "data_type": "datetime",
"granularity": "MONTH" "granularity": "MONTH"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "datetime", "data_type": "datetime",
"granularity": "MONTH" "granularity": "MONTH"
@@ -774,7 +775,7 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"data_type": "datetime", "data_type": "datetime",
"granularity": "YEAR" "granularity": "YEAR"
}).to_dict(), { }).to_dict(omit_none=True), {
"field": "ts", "field": "ts",
"data_type": "datetime", "data_type": "datetime",
"granularity": "YEAR" "granularity": "YEAR"
@@ -795,7 +796,8 @@ class TestBigQueryAdapter(BaseTestBigQueryAdapter):
"end": 100, "end": 100,
"interval": 20 "interval": 20
} }
}).to_dict(), { }).to_dict(omit_none=True
), {
"field": "id", "field": "id",
"data_type": "int64", "data_type": "int64",
"granularity": "day", "granularity": "day",

View File

@@ -776,7 +776,7 @@ class TestProject(BaseConfigTest):
LocalPackage(local='foo'), LocalPackage(local='foo'),
GitPackage(git='git@example.com:fishtown-analytics/dbt-utils.git', revision='test-rev') GitPackage(git='git@example.com:fishtown-analytics/dbt-utils.git', revision='test-rev')
])) ]))
str(project) str(project) # this does the equivalent of project.to_project_config(with_packages=True)
json.dumps(project.to_project_config()) json.dumps(project.to_project_config())
def test_string_run_hooks(self): def test_string_run_hooks(self):

View File

@@ -1618,7 +1618,7 @@ def test_timestamp_snapshot_ok(basic_timestamp_snapshot_dict, basic_timestamp_sn
assert_symmetric(node, node_dict, ParsedSnapshotNode) assert_symmetric(node, node_dict, ParsedSnapshotNode)
assert_symmetric(inter, node_dict, IntermediateSnapshotNode) assert_symmetric(inter, node_dict, IntermediateSnapshotNode)
assert ParsedSnapshotNode.from_dict(inter.to_dict()) == node assert ParsedSnapshotNode.from_dict(inter.to_dict(omit_none=True)) == node
assert node.is_refable is True assert node.is_refable is True
assert node.is_ephemeral is False assert node.is_ephemeral is False
pickle.loads(pickle.dumps(node)) pickle.loads(pickle.dumps(node))
@@ -1631,7 +1631,7 @@ def test_check_snapshot_ok(basic_check_snapshot_dict, basic_check_snapshot_objec
assert_symmetric(node, node_dict, ParsedSnapshotNode) assert_symmetric(node, node_dict, ParsedSnapshotNode)
assert_symmetric(inter, node_dict, IntermediateSnapshotNode) assert_symmetric(inter, node_dict, IntermediateSnapshotNode)
assert ParsedSnapshotNode.from_dict(inter.to_dict()) == node assert ParsedSnapshotNode.from_dict(inter.to_dict(omit_none=True)) == node
assert node.is_refable is True assert node.is_refable is True
assert node.is_ephemeral is False assert node.is_ephemeral is False
pickle.loads(pickle.dumps(node)) pickle.loads(pickle.dumps(node))

View File

@@ -32,7 +32,7 @@ class GenerateTest(unittest.TestCase):
sources=sources, sources=sources,
errors=None, errors=None,
) )
return result.to_dict(options={'keep_none': True})['nodes'] return result.to_dict(omit_none=False)['nodes']
def test__unflatten_empty(self): def test__unflatten_empty(self):
columns = {} columns = {}

View File

@@ -212,9 +212,9 @@ class ManifestTest(unittest.TestCase):
), ),
} }
for node in self.nested_nodes.values(): for node in self.nested_nodes.values():
node.validate(node.to_dict()) node.validate(node.to_dict(omit_none=True))
for source in self.sources.values(): for source in self.sources.values():
source.validate(source.to_dict()) source.validate(source.to_dict(omit_none=True))
os.environ['DBT_ENV_CUSTOM_ENV_key'] = 'value' os.environ['DBT_ENV_CUSTOM_ENV_key'] = 'value'
@@ -229,7 +229,7 @@ class ManifestTest(unittest.TestCase):
metadata=ManifestMetadata(generated_at=datetime.utcnow()), metadata=ManifestMetadata(generated_at=datetime.utcnow()),
) )
self.assertEqual( self.assertEqual(
manifest.writable_manifest().to_dict(), manifest.writable_manifest().to_dict(omit_none=True),
{ {
'nodes': {}, 'nodes': {},
'sources': {}, 'sources': {},
@@ -258,7 +258,7 @@ class ManifestTest(unittest.TestCase):
exposures={}, selectors={}, exposures={}, selectors={},
metadata=ManifestMetadata(generated_at=datetime.utcnow()), metadata=ManifestMetadata(generated_at=datetime.utcnow()),
) )
serialized = manifest.writable_manifest().to_dict() serialized = manifest.writable_manifest().to_dict(omit_none=True)
self.assertEqual(serialized['metadata']['generated_at'], '2018-02-14T09:15:13Z') self.assertEqual(serialized['metadata']['generated_at'], '2018-02-14T09:15:13Z')
self.assertEqual(serialized['docs'], {}) self.assertEqual(serialized['docs'], {})
self.assertEqual(serialized['disabled'], []) self.assertEqual(serialized['disabled'], [])
@@ -371,7 +371,7 @@ class ManifestTest(unittest.TestCase):
metadata=metadata, files={}, exposures={}) metadata=metadata, files={}, exposures={})
self.assertEqual( self.assertEqual(
manifest.writable_manifest().to_dict(), manifest.writable_manifest().to_dict(omit_none=True),
{ {
'nodes': {}, 'nodes': {},
'sources': {}, 'sources': {},
@@ -612,7 +612,7 @@ class MixedManifestTest(unittest.TestCase):
manifest = Manifest(nodes={}, sources={}, macros={}, docs={}, selectors={}, manifest = Manifest(nodes={}, sources={}, macros={}, docs={}, selectors={},
disabled=[], metadata=metadata, files={}, exposures={}) disabled=[], metadata=metadata, files={}, exposures={})
self.assertEqual( self.assertEqual(
manifest.writable_manifest().to_dict(), manifest.writable_manifest().to_dict(omit_none=True),
{ {
'nodes': {}, 'nodes': {},
'macros': {}, 'macros': {},
@@ -640,7 +640,7 @@ class MixedManifestTest(unittest.TestCase):
disabled=[], selectors={}, disabled=[], selectors={},
metadata=ManifestMetadata(generated_at=datetime.utcnow()), metadata=ManifestMetadata(generated_at=datetime.utcnow()),
files={}, exposures={}) files={}, exposures={})
serialized = manifest.writable_manifest().to_dict() serialized = manifest.writable_manifest().to_dict(omit_none=True)
self.assertEqual(serialized['metadata']['generated_at'], '2018-02-14T09:15:13Z') self.assertEqual(serialized['metadata']['generated_at'], '2018-02-14T09:15:13Z')
self.assertEqual(serialized['disabled'], []) self.assertEqual(serialized['disabled'], [])
parent_map = serialized['parent_map'] parent_map = serialized['parent_map']

View File

@@ -145,7 +145,7 @@ class ContractTestCase(TestCase):
super().setUp() super().setUp()
def assert_to_dict(self, obj, dct): def assert_to_dict(self, obj, dct):
self.assertEqual(obj.to_dict(), dct) self.assertEqual(obj.to_dict(omit_none=True), dct)
def assert_from_dict(self, obj, dct, cls=None): def assert_from_dict(self, obj, dct, cls=None):
if cls is None: if cls is None:
@@ -185,7 +185,7 @@ def compare_dicts(dict1, dict2):
def assert_to_dict(obj, dct): def assert_to_dict(obj, dct):
assert obj.to_dict() == dct assert obj.to_dict(omit_none=True) == dct
def assert_from_dict(obj, dct, cls=None): def assert_from_dict(obj, dct, cls=None):

View File

@@ -0,0 +1,10 @@
from mashumaro.types import SerializationStrategy as SerializationStrategy
from typing import Any, Callable, Dict, List, Union
TO_DICT_ADD_OMIT_NONE_FLAG: str
SerializationStrategyValueType = Union[SerializationStrategy, Dict[str, Union[str, Callable]]]
class BaseConfig:
debug: bool = ...
code_generation_options: List[str] = ...
serialization_strategy: Dict[Any, SerializationStrategyValueType] = ...

View File

@@ -2,10 +2,15 @@ from typing import Any, Mapping, Dict, Optional
class DataClassDictMixin: class DataClassDictMixin:
def __init_subclass__(cls, **kwargs: Any) -> None: ... def __init_subclass__(cls, **kwargs: Any) -> None: ...
def __pre_serialize__(self, options: Optional[Dict[str, Any]]) -> Any: ... def __pre_serialize__(self) -> Any: ...
def __post_serialize__(self, dct: Mapping, options: Optional[Dict[str, Any]]) -> Any: ... def __post_serialize__(self, dct: Mapping) -> Any: ...
@classmethod @classmethod
def __pre_deserialize__(cls: Any, dct: Mapping, options: Optional[Dict[str, Any]]) -> Any: ... def __pre_deserialize__(cls: Any, dct: Mapping) -> Any: ...
def to_dict( self, use_bytes: bool = False, use_enum: bool = False, use_datetime: bool = False, options: Optional[Dict[str, Any]] = None) -> dict: ... # This is absolutely totally wrong. This is *not* the signature of the Mashumaro to_dict
# But mypy insists that the DataClassDictMixin to_dict and the JsonSchemaMixin to_dict
# must have the same signatures now that we have an 'omit_none' flag on the Mashumaro to_dict.
# There is no 'validate = False' in Mashumaro.
# Could not find a way to tell mypy to ignore it.
def to_dict( self, omit_none = False, validate = False) -> dict: ...
@classmethod @classmethod
def from_dict( cls, d: Mapping, use_bytes: bool = False, use_enum: bool = False, use_datetime: bool = False, options: Optional[Dict[str, Any]] = None) -> Any: ... def from_dict( cls, d: Mapping, use_bytes: bool = False, use_enum: bool = False, use_datetime: bool = False) -> Any: ...