mirror of
https://github.com/dbt-labs/dbt-core
synced 2025-12-17 19:31:34 +00:00
bump black in dev-requirements and pre-commit-config (#10407)
This commit is contained in:
6
.changes/unreleased/Under the Hood-20240716-184859.yaml
Normal file
6
.changes/unreleased/Under the Hood-20240716-184859.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
kind: Under the Hood
|
||||||
|
body: bump black to 24.3.0
|
||||||
|
time: 2024-07-16T18:48:59.651834-04:00
|
||||||
|
custom:
|
||||||
|
Author: michelleark
|
||||||
|
Issue: "10454"
|
||||||
1
.flake8
1
.flake8
@@ -7,6 +7,7 @@ ignore =
|
|||||||
W503 # makes Flake8 work like black
|
W503 # makes Flake8 work like black
|
||||||
W504
|
W504
|
||||||
E203 # makes Flake8 work like black
|
E203 # makes Flake8 work like black
|
||||||
|
E704 # makes Flake8 work like black
|
||||||
E741
|
E741
|
||||||
E501 # long line checking is done in black
|
E501 # long line checking is done in black
|
||||||
exclude = test/
|
exclude = test/
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ repos:
|
|||||||
- id: isort
|
- id: isort
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
# rev must match what's in dev-requirements.txt
|
# rev must match what's in dev-requirements.txt
|
||||||
rev: 22.3.0
|
rev: 24.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- id: black
|
- id: black
|
||||||
|
|||||||
@@ -158,7 +158,8 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def upgrade_schema_version(cls, data):
|
def upgrade_schema_version(cls, data):
|
||||||
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
||||||
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
|
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
||||||
|
"""
|
||||||
run_results_schema_version = get_artifact_schema_version(data)
|
run_results_schema_version = get_artifact_schema_version(data)
|
||||||
# If less than the current version (v5), preprocess contents to match latest schema version
|
# If less than the current version (v5), preprocess contents to match latest schema version
|
||||||
if run_results_schema_version <= 5:
|
if run_results_schema_version <= 5:
|
||||||
|
|||||||
@@ -179,9 +179,11 @@ def postflight(func):
|
|||||||
process_in_blocks=rusage.ru_inblock,
|
process_in_blocks=rusage.ru_inblock,
|
||||||
process_out_blocks=rusage.ru_oublock,
|
process_out_blocks=rusage.ru_oublock,
|
||||||
),
|
),
|
||||||
EventLevel.INFO
|
(
|
||||||
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
EventLevel.INFO
|
||||||
else None,
|
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
|
||||||
|
else None
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
fire_event(
|
fire_event(
|
||||||
|
|||||||
@@ -290,9 +290,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
|
|||||||
project_name=self.project_name,
|
project_name=self.project_name,
|
||||||
project_id=self.hashed_name(),
|
project_id=self.hashed_name(),
|
||||||
user_id=tracking.active_user.id if tracking.active_user else None,
|
user_id=tracking.active_user.id if tracking.active_user else None,
|
||||||
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
|
send_anonymous_usage_stats=(
|
||||||
if tracking.active_user
|
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
|
||||||
else None,
|
),
|
||||||
adapter_type=self.credentials.type,
|
adapter_type=self.credentials.type,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -27,8 +27,7 @@ class ConfigSource:
|
|||||||
def __init__(self, project):
|
def __init__(self, project):
|
||||||
self.project = project
|
self.project = project
|
||||||
|
|
||||||
def get_config_dict(self, resource_type: NodeType):
|
def get_config_dict(self, resource_type: NodeType): ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class UnrenderedConfig(ConfigSource):
|
class UnrenderedConfig(ConfigSource):
|
||||||
@@ -130,12 +129,12 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
return self._project_configs(self._active_project, fqn, resource_type)
|
return self._project_configs(self._active_project, fqn, resource_type)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T:
|
def _update_from_config(
|
||||||
...
|
self, result: T, partial: Dict[str, Any], validate: bool = False
|
||||||
|
) -> T: ...
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def initial_result(self, resource_type: NodeType, base: bool) -> T:
|
def initial_result(self, resource_type: NodeType, base: bool) -> T: ...
|
||||||
...
|
|
||||||
|
|
||||||
def calculate_node_config(
|
def calculate_node_config(
|
||||||
self,
|
self,
|
||||||
@@ -181,8 +180,7 @@ class BaseContextConfigGenerator(Generic[T]):
|
|||||||
project_name: str,
|
project_name: str,
|
||||||
base: bool,
|
base: bool,
|
||||||
patch_config_dict: Optional[Dict[str, Any]] = None,
|
patch_config_dict: Optional[Dict[str, Any]] = None,
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
class ContextConfigGenerator(BaseContextConfigGenerator[C]):
|
||||||
|
|||||||
@@ -239,8 +239,7 @@ class BaseRefResolver(BaseResolver):
|
|||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(
|
def resolve(
|
||||||
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
|
||||||
) -> RelationProxy:
|
) -> RelationProxy: ...
|
||||||
...
|
|
||||||
|
|
||||||
def _repack_args(
|
def _repack_args(
|
||||||
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
self, name: str, package: Optional[str], version: Optional[NodeVersion]
|
||||||
@@ -306,8 +305,7 @@ class BaseSourceResolver(BaseResolver):
|
|||||||
|
|
||||||
class BaseMetricResolver(BaseResolver):
|
class BaseMetricResolver(BaseResolver):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
|
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ...
|
||||||
...
|
|
||||||
|
|
||||||
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
|
||||||
if package is None:
|
if package is None:
|
||||||
@@ -341,8 +339,7 @@ class BaseMetricResolver(BaseResolver):
|
|||||||
|
|
||||||
|
|
||||||
class Config(Protocol):
|
class Config(Protocol):
|
||||||
def __init__(self, model, context_config: Optional[ContextConfig]):
|
def __init__(self, model, context_config: Optional[ContextConfig]): ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# Implementation of "config(..)" calls in models
|
# Implementation of "config(..)" calls in models
|
||||||
|
|||||||
@@ -1676,9 +1676,9 @@ class MacroManifest(MacroMethods):
|
|||||||
self.macros = macros
|
self.macros = macros
|
||||||
self.metadata = ManifestMetadata(
|
self.metadata = ManifestMetadata(
|
||||||
user_id=tracking.active_user.id if tracking.active_user else None,
|
user_id=tracking.active_user.id if tracking.active_user else None,
|
||||||
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
|
send_anonymous_usage_stats=(
|
||||||
if tracking.active_user
|
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
|
||||||
else None,
|
),
|
||||||
)
|
)
|
||||||
# This is returned by the 'graph' context property
|
# This is returned by the 'graph' context property
|
||||||
# in the ProviderContext class.
|
# in the ProviderContext class.
|
||||||
|
|||||||
@@ -636,9 +636,9 @@ class ModelNode(ModelResource, CompiledNode):
|
|||||||
contract_enforced_disabled: bool = False
|
contract_enforced_disabled: bool = False
|
||||||
columns_removed: List[str] = []
|
columns_removed: List[str] = []
|
||||||
column_type_changes: List[Dict[str, str]] = []
|
column_type_changes: List[Dict[str, str]] = []
|
||||||
enforced_column_constraint_removed: List[
|
enforced_column_constraint_removed: List[Dict[str, str]] = (
|
||||||
Dict[str, str]
|
[]
|
||||||
] = [] # column_name, constraint_type
|
) # column_name, constraint_type
|
||||||
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
|
||||||
materialization_changed: List[str] = []
|
materialization_changed: List[str] = []
|
||||||
|
|
||||||
@@ -1554,7 +1554,7 @@ class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# exports should be in the same order, so we zip them for easy iteration
|
# exports should be in the same order, so we zip them for easy iteration
|
||||||
for (old_export, new_export) in zip(old.exports, self.exports):
|
for old_export, new_export in zip(old.exports, self.exports):
|
||||||
if not (
|
if not (
|
||||||
old_export.name == new_export.name
|
old_export.name == new_export.name
|
||||||
and old_export.config.export_as == new_export.config.export_as
|
and old_export.config.export_as == new_export.config.export_as
|
||||||
|
|||||||
@@ -74,9 +74,7 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
|
|||||||
log_level = (
|
log_level = (
|
||||||
EventLevel.ERROR
|
EventLevel.ERROR
|
||||||
if flags.QUIET
|
if flags.QUIET
|
||||||
else EventLevel.DEBUG
|
else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL)
|
||||||
if flags.DEBUG
|
|
||||||
else EventLevel(flags.LOG_LEVEL)
|
|
||||||
)
|
)
|
||||||
console_config = get_stdout_config(
|
console_config = get_stdout_config(
|
||||||
line_format,
|
line_format,
|
||||||
|
|||||||
@@ -204,7 +204,7 @@ class ModelParser(SimpleSQLParser[ModelNode]):
|
|||||||
dbt_parser = PythonParseVisitor(node)
|
dbt_parser = PythonParseVisitor(node)
|
||||||
dbt_parser.visit(tree)
|
dbt_parser.visit(tree)
|
||||||
|
|
||||||
for (func, args, kwargs) in dbt_parser.dbt_function_calls:
|
for func, args, kwargs in dbt_parser.dbt_function_calls:
|
||||||
if func == "get":
|
if func == "get":
|
||||||
num_args = len(args)
|
num_args = len(args)
|
||||||
if num_args == 0:
|
if num_args == 0:
|
||||||
|
|||||||
@@ -274,9 +274,11 @@ class BaseRunner(metaclass=ABCMeta):
|
|||||||
|
|
||||||
def compile_and_execute(self, manifest, ctx):
|
def compile_and_execute(self, manifest, ctx):
|
||||||
result = None
|
result = None
|
||||||
with self.adapter.connection_named(
|
with (
|
||||||
self.node.unique_id, self.node
|
self.adapter.connection_named(self.node.unique_id, self.node)
|
||||||
) if get_flags().INTROSPECT else nullcontext():
|
if get_flags().INTROSPECT
|
||||||
|
else nullcontext()
|
||||||
|
):
|
||||||
ctx.node.update_event_status(node_status=RunningStatus.Compiling)
|
ctx.node.update_event_status(node_status=RunningStatus.Compiling)
|
||||||
fire_event(
|
fire_event(
|
||||||
NodeCompiling(
|
NodeCompiling(
|
||||||
|
|||||||
1
core/dbt/tests/fixtures/project.py
vendored
1
core/dbt/tests/fixtures/project.py
vendored
@@ -341,6 +341,7 @@ def write_project_files_recursively(path, file_dict):
|
|||||||
# Provide a dictionary of file names to contents. Nested directories
|
# Provide a dictionary of file names to contents. Nested directories
|
||||||
# are handle by nested dictionaries.
|
# are handle by nested dictionaries.
|
||||||
|
|
||||||
|
|
||||||
# models directory
|
# models directory
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def models():
|
def models():
|
||||||
|
|||||||
@@ -291,6 +291,7 @@ class TestProcessingException(Exception):
|
|||||||
|
|
||||||
# Testing utilities that use adapter code
|
# Testing utilities that use adapter code
|
||||||
|
|
||||||
|
|
||||||
# Uses:
|
# Uses:
|
||||||
# adapter.config.credentials
|
# adapter.config.credentials
|
||||||
# adapter.quote
|
# adapter.quote
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-ada
|
|||||||
git+https://github.com/dbt-labs/dbt-common.git@main
|
git+https://github.com/dbt-labs/dbt-common.git@main
|
||||||
git+https://github.com/dbt-labs/dbt-postgres.git@main
|
git+https://github.com/dbt-labs/dbt-postgres.git@main
|
||||||
# black must match what's in .pre-commit-config.yaml to be sure local env matches CI
|
# black must match what's in .pre-commit-config.yaml to be sure local env matches CI
|
||||||
black==22.3.0
|
black==24.3.0
|
||||||
bumpversion
|
bumpversion
|
||||||
ddtrace==2.3.0
|
ddtrace==2.3.0
|
||||||
docutils
|
docutils
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ class BaseConstraintsColumnsEqual:
|
|||||||
def test__constraints_wrong_column_data_types(
|
def test__constraints_wrong_column_data_types(
|
||||||
self, project, string_type, int_type, schema_string_type, schema_int_type, data_types
|
self, project, string_type, int_type, schema_string_type, schema_int_type, data_types
|
||||||
):
|
):
|
||||||
for (sql_column_value, schema_data_type, error_data_type) in data_types:
|
for sql_column_value, schema_data_type, error_data_type in data_types:
|
||||||
# Write parametrized data_type to sql file
|
# Write parametrized data_type to sql file
|
||||||
write_file(
|
write_file(
|
||||||
my_model_data_type_sql.format(sql_value=sql_column_value),
|
my_model_data_type_sql.format(sql_value=sql_column_value),
|
||||||
@@ -146,7 +146,7 @@ class BaseConstraintsColumnsEqual:
|
|||||||
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])
|
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])
|
||||||
|
|
||||||
def test__constraints_correct_column_data_types(self, project, data_types):
|
def test__constraints_correct_column_data_types(self, project, data_types):
|
||||||
for (sql_column_value, schema_data_type, _) in data_types:
|
for sql_column_value, schema_data_type, _ in data_types:
|
||||||
# Write parametrized data_type to sql file
|
# Write parametrized data_type to sql file
|
||||||
write_file(
|
write_file(
|
||||||
my_model_data_type_sql.format(sql_value=sql_column_value),
|
my_model_data_type_sql.format(sql_value=sql_column_value),
|
||||||
|
|||||||
@@ -697,9 +697,9 @@ class TestList:
|
|||||||
"test.unique_outer_id",
|
"test.unique_outer_id",
|
||||||
}
|
}
|
||||||
del os.environ["DBT_RESOURCE_TYPES"]
|
del os.environ["DBT_RESOURCE_TYPES"]
|
||||||
os.environ[
|
os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = (
|
||||||
"DBT_EXCLUDE_RESOURCE_TYPES"
|
"test saved_query metric source semantic_model snapshot seed"
|
||||||
] = "test saved_query metric source semantic_model snapshot seed"
|
)
|
||||||
results = self.run_dbt_ls()
|
results = self.run_dbt_ls()
|
||||||
assert set(results) == {
|
assert set(results) == {
|
||||||
"test.ephemeral",
|
"test.ephemeral",
|
||||||
|
|||||||
@@ -124,9 +124,9 @@ class TestGetFullManifest:
|
|||||||
mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter
|
mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter
|
||||||
mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest
|
mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest
|
||||||
mocker.patch("dbt.parser.manifest._check_manifest").return_value = None
|
mocker.patch("dbt.parser.manifest._check_manifest").return_value = None
|
||||||
mocker.patch(
|
mocker.patch("dbt.parser.manifest.ManifestLoader.save_macros_to_adapter").return_value = (
|
||||||
"dbt.parser.manifest.ManifestLoader.save_macros_to_adapter"
|
None
|
||||||
).return_value = None
|
)
|
||||||
mocker.patch("dbt.tracking.active_user").return_value = User(None)
|
mocker.patch("dbt.tracking.active_user").return_value = User(None)
|
||||||
|
|
||||||
def test_write_perf_info(
|
def test_write_perf_info(
|
||||||
|
|||||||
@@ -193,9 +193,9 @@ class TestFileDiff:
|
|||||||
def partial_parsing(self, manifest, files):
|
def partial_parsing(self, manifest, files):
|
||||||
safe_set_invocation_context()
|
safe_set_invocation_context()
|
||||||
saved_files = deepcopy(files)
|
saved_files = deepcopy(files)
|
||||||
saved_files[
|
saved_files["my_test://models/python_model_untouched.py"].checksum = (
|
||||||
"my_test://models/python_model_untouched.py"
|
FileHash.from_contents("something new")
|
||||||
].checksum = FileHash.from_contents("something new")
|
)
|
||||||
return PartialParsing(manifest, saved_files)
|
return PartialParsing(manifest, saved_files)
|
||||||
|
|
||||||
def test_build_file_diff_basic(self, partial_parsing):
|
def test_build_file_diff_basic(self, partial_parsing):
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ class TestLinker:
|
|||||||
def test_linker_add_dependency(self, linker: Linker) -> None:
|
def test_linker_add_dependency(self, linker: Linker) -> None:
|
||||||
actual_deps = [("A", "B"), ("A", "C"), ("B", "C")]
|
actual_deps = [("A", "B"), ("A", "C"), ("B", "C")]
|
||||||
|
|
||||||
for (l, r) in actual_deps:
|
for l, r in actual_deps:
|
||||||
linker.dependency(l, r)
|
linker.dependency(l, r)
|
||||||
|
|
||||||
queue = self._get_graph_queue(_mock_manifest("ABC"), linker)
|
queue = self._get_graph_queue(_mock_manifest("ABC"), linker)
|
||||||
@@ -119,7 +119,7 @@ class TestLinker:
|
|||||||
actual_deps = [("A", "B")]
|
actual_deps = [("A", "B")]
|
||||||
additional_node = "Z"
|
additional_node = "Z"
|
||||||
|
|
||||||
for (l, r) in actual_deps:
|
for l, r in actual_deps:
|
||||||
linker.dependency(l, r)
|
linker.dependency(l, r)
|
||||||
linker.add_node(additional_node)
|
linker.add_node(additional_node)
|
||||||
|
|
||||||
@@ -150,7 +150,7 @@ class TestLinker:
|
|||||||
def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None:
|
def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None:
|
||||||
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
|
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
|
||||||
|
|
||||||
for (l, r) in actual_deps:
|
for l, r in actual_deps:
|
||||||
linker.dependency(l, r)
|
linker.dependency(l, r)
|
||||||
|
|
||||||
queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"])
|
queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"])
|
||||||
@@ -181,7 +181,7 @@ class TestLinker:
|
|||||||
def test__find_cycles__cycles(self, linker: Linker) -> None:
|
def test__find_cycles__cycles(self, linker: Linker) -> None:
|
||||||
actual_deps = [("A", "B"), ("B", "C"), ("C", "A")]
|
actual_deps = [("A", "B"), ("B", "C"), ("C", "A")]
|
||||||
|
|
||||||
for (l, r) in actual_deps:
|
for l, r in actual_deps:
|
||||||
linker.dependency(l, r)
|
linker.dependency(l, r)
|
||||||
|
|
||||||
assert linker.find_cycles() is not None
|
assert linker.find_cycles() is not None
|
||||||
@@ -189,7 +189,7 @@ class TestLinker:
|
|||||||
def test__find_cycles__no_cycles(self, linker: Linker) -> None:
|
def test__find_cycles__no_cycles(self, linker: Linker) -> None:
|
||||||
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
|
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
|
||||||
|
|
||||||
for (l, r) in actual_deps:
|
for l, r in actual_deps:
|
||||||
linker.dependency(l, r)
|
linker.dependency(l, r)
|
||||||
|
|
||||||
assert linker.find_cycles() is None
|
assert linker.find_cycles() is None
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
Note that all imports should be inside the functions to avoid import/mocking
|
Note that all imports should be inside the functions to avoid import/mocking
|
||||||
issues.
|
issues.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import string
|
import string
|
||||||
from unittest import TestCase, mock
|
from unittest import TestCase, mock
|
||||||
|
|||||||
@@ -42,9 +42,9 @@ def postgres_adapter(
|
|||||||
adapter = get_adapter(runtime_config)
|
adapter = get_adapter(runtime_config)
|
||||||
assert isinstance(adapter, PostgresAdapter)
|
assert isinstance(adapter, PostgresAdapter)
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check").return_value = (
|
||||||
"dbt.parser.manifest.ManifestLoader.build_manifest_state_check"
|
ManifestStateCheck()
|
||||||
).return_value = ManifestStateCheck()
|
)
|
||||||
manifest = ManifestLoader.load_macros(
|
manifest = ManifestLoader.load_macros(
|
||||||
runtime_config,
|
runtime_config,
|
||||||
adapter.connections.set_query_header,
|
adapter.connections.set_query_header,
|
||||||
|
|||||||
@@ -106,6 +106,7 @@ class JSONSchema(DataClassJSONMixin):
|
|||||||
serialize_by_alias: bool
|
serialize_by_alias: bool
|
||||||
aliases: Incomplete
|
aliases: Incomplete
|
||||||
serialization_strategy: Incomplete
|
serialization_strategy: Incomplete
|
||||||
|
|
||||||
def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ...
|
def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ...
|
||||||
def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ...
|
def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ...
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|||||||
Reference in New Issue
Block a user