From bef2d20c2113642aa054b383396cede62c19f9a2 Mon Sep 17 00:00:00 2001 From: Michelle Ark Date: Wed, 17 Jul 2024 12:01:14 -0400 Subject: [PATCH] bump black in dev-requirements and pre-commit-config (#10407) --- .../unreleased/Under the Hood-20240716-184859.yaml | 6 ++++++ .flake8 | 1 + .pre-commit-config.yaml | 2 +- core/dbt/artifacts/schemas/run/v5/run.py | 3 ++- core/dbt/cli/requires.py | 8 +++++--- core/dbt/config/runtime.py | 6 +++--- core/dbt/context/context_config.py | 14 ++++++-------- core/dbt/context/providers.py | 9 +++------ core/dbt/contracts/graph/manifest.py | 6 +++--- core/dbt/contracts/graph/nodes.py | 8 ++++---- core/dbt/events/logging.py | 4 +--- core/dbt/parser/models.py | 2 +- core/dbt/task/base.py | 8 +++++--- core/dbt/tests/fixtures/project.py | 1 + core/dbt/tests/util.py | 1 + dev-requirements.txt | 2 +- .../adapter/constraints/test_constraints.py | 4 ++-- tests/functional/list/test_list.py | 6 +++--- tests/unit/parser/test_manifest.py | 6 +++--- tests/unit/parser/test_partial.py | 6 +++--- tests/unit/test_compilation.py | 10 +++++----- tests/unit/utils/__init__.py | 1 + tests/unit/utils/adapter.py | 6 +++--- third-party-stubs/mashumaro/jsonschema/models.pyi | 1 + 24 files changed, 65 insertions(+), 56 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20240716-184859.yaml diff --git a/.changes/unreleased/Under the Hood-20240716-184859.yaml b/.changes/unreleased/Under the Hood-20240716-184859.yaml new file mode 100644 index 000000000..7ee33b08d --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240716-184859.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: bump black to 24.3.0 +time: 2024-07-16T18:48:59.651834-04:00 +custom: + Author: michelleark + Issue: "10454" diff --git a/.flake8 b/.flake8 index 26e20a5d2..084d3c016 100644 --- a/.flake8 +++ b/.flake8 @@ -7,6 +7,7 @@ ignore = W503 # makes Flake8 work like black W504 E203 # makes Flake8 work like black + E704 # makes Flake8 work like black E741 E501 # long line checking is done in black exclude = test/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c0d813719..b66a90fc3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,7 +26,7 @@ repos: - id: isort - repo: https://github.com/psf/black # rev must match what's in dev-requirements.txt - rev: 22.3.0 + rev: 24.3.0 hooks: - id: black - id: black diff --git a/core/dbt/artifacts/schemas/run/v5/run.py b/core/dbt/artifacts/schemas/run/v5/run.py index 272455434..33a5859cc 100644 --- a/core/dbt/artifacts/schemas/run/v5/run.py +++ b/core/dbt/artifacts/schemas/run/v5/run.py @@ -158,7 +158,8 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin): @classmethod def upgrade_schema_version(cls, data): """This overrides the "upgrade_schema_version" call in VersionedSchema (via - ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.""" + ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results. + """ run_results_schema_version = get_artifact_schema_version(data) # If less than the current version (v5), preprocess contents to match latest schema version if run_results_schema_version <= 5: diff --git a/core/dbt/cli/requires.py b/core/dbt/cli/requires.py index 892705d9b..0c0b19008 100644 --- a/core/dbt/cli/requires.py +++ b/core/dbt/cli/requires.py @@ -179,9 +179,11 @@ def postflight(func): process_in_blocks=rusage.ru_inblock, process_out_blocks=rusage.ru_oublock, ), - EventLevel.INFO - if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT - else None, + ( + EventLevel.INFO + if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT + else None + ), ) fire_event( diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py index f64fd2277..cea8f559a 100644 --- a/core/dbt/config/runtime.py +++ b/core/dbt/config/runtime.py @@ -290,9 +290,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig): project_name=self.project_name, project_id=self.hashed_name(), user_id=tracking.active_user.id if tracking.active_user else None, - send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS - if tracking.active_user - else None, + send_anonymous_usage_stats=( + get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None + ), adapter_type=self.credentials.type, ) diff --git a/core/dbt/context/context_config.py b/core/dbt/context/context_config.py index b0664f33a..51222ceba 100644 --- a/core/dbt/context/context_config.py +++ b/core/dbt/context/context_config.py @@ -27,8 +27,7 @@ class ConfigSource: def __init__(self, project): self.project = project - def get_config_dict(self, resource_type: NodeType): - ... + def get_config_dict(self, resource_type: NodeType): ... class UnrenderedConfig(ConfigSource): @@ -130,12 +129,12 @@ class BaseContextConfigGenerator(Generic[T]): return self._project_configs(self._active_project, fqn, resource_type) @abstractmethod - def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T: - ... + def _update_from_config( + self, result: T, partial: Dict[str, Any], validate: bool = False + ) -> T: ... @abstractmethod - def initial_result(self, resource_type: NodeType, base: bool) -> T: - ... + def initial_result(self, resource_type: NodeType, base: bool) -> T: ... def calculate_node_config( self, @@ -181,8 +180,7 @@ class BaseContextConfigGenerator(Generic[T]): project_name: str, base: bool, patch_config_dict: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: - ... + ) -> Dict[str, Any]: ... class ContextConfigGenerator(BaseContextConfigGenerator[C]): diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py index 15be73b53..bbb5f269c 100644 --- a/core/dbt/context/providers.py +++ b/core/dbt/context/providers.py @@ -239,8 +239,7 @@ class BaseRefResolver(BaseResolver): @abc.abstractmethod def resolve( self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None - ) -> RelationProxy: - ... + ) -> RelationProxy: ... def _repack_args( self, name: str, package: Optional[str], version: Optional[NodeVersion] @@ -306,8 +305,7 @@ class BaseSourceResolver(BaseResolver): class BaseMetricResolver(BaseResolver): @abc.abstractmethod - def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: - ... + def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ... def _repack_args(self, name: str, package: Optional[str]) -> List[str]: if package is None: @@ -341,8 +339,7 @@ class BaseMetricResolver(BaseResolver): class Config(Protocol): - def __init__(self, model, context_config: Optional[ContextConfig]): - ... + def __init__(self, model, context_config: Optional[ContextConfig]): ... # Implementation of "config(..)" calls in models diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py index 267abc6a2..6f4fc01bb 100644 --- a/core/dbt/contracts/graph/manifest.py +++ b/core/dbt/contracts/graph/manifest.py @@ -1676,9 +1676,9 @@ class MacroManifest(MacroMethods): self.macros = macros self.metadata = ManifestMetadata( user_id=tracking.active_user.id if tracking.active_user else None, - send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS - if tracking.active_user - else None, + send_anonymous_usage_stats=( + get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None + ), ) # This is returned by the 'graph' context property # in the ProviderContext class. diff --git a/core/dbt/contracts/graph/nodes.py b/core/dbt/contracts/graph/nodes.py index 338cb39b9..da42fb7d7 100644 --- a/core/dbt/contracts/graph/nodes.py +++ b/core/dbt/contracts/graph/nodes.py @@ -636,9 +636,9 @@ class ModelNode(ModelResource, CompiledNode): contract_enforced_disabled: bool = False columns_removed: List[str] = [] column_type_changes: List[Dict[str, str]] = [] - enforced_column_constraint_removed: List[ - Dict[str, str] - ] = [] # column_name, constraint_type + enforced_column_constraint_removed: List[Dict[str, str]] = ( + [] + ) # column_name, constraint_type enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns materialization_changed: List[str] = [] @@ -1554,7 +1554,7 @@ class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource): return False # exports should be in the same order, so we zip them for easy iteration - for (old_export, new_export) in zip(old.exports, self.exports): + for old_export, new_export in zip(old.exports, self.exports): if not ( old_export.name == new_export.name and old_export.config.export_as == new_export.config.export_as diff --git a/core/dbt/events/logging.py b/core/dbt/events/logging.py index 68f2b2a09..f0bef3ae4 100644 --- a/core/dbt/events/logging.py +++ b/core/dbt/events/logging.py @@ -74,9 +74,7 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = []) log_level = ( EventLevel.ERROR if flags.QUIET - else EventLevel.DEBUG - if flags.DEBUG - else EventLevel(flags.LOG_LEVEL) + else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL) ) console_config = get_stdout_config( line_format, diff --git a/core/dbt/parser/models.py b/core/dbt/parser/models.py index dc3ff334b..dd56d0686 100644 --- a/core/dbt/parser/models.py +++ b/core/dbt/parser/models.py @@ -204,7 +204,7 @@ class ModelParser(SimpleSQLParser[ModelNode]): dbt_parser = PythonParseVisitor(node) dbt_parser.visit(tree) - for (func, args, kwargs) in dbt_parser.dbt_function_calls: + for func, args, kwargs in dbt_parser.dbt_function_calls: if func == "get": num_args = len(args) if num_args == 0: diff --git a/core/dbt/task/base.py b/core/dbt/task/base.py index 62bb96314..dcf592032 100644 --- a/core/dbt/task/base.py +++ b/core/dbt/task/base.py @@ -274,9 +274,11 @@ class BaseRunner(metaclass=ABCMeta): def compile_and_execute(self, manifest, ctx): result = None - with self.adapter.connection_named( - self.node.unique_id, self.node - ) if get_flags().INTROSPECT else nullcontext(): + with ( + self.adapter.connection_named(self.node.unique_id, self.node) + if get_flags().INTROSPECT + else nullcontext() + ): ctx.node.update_event_status(node_status=RunningStatus.Compiling) fire_event( NodeCompiling( diff --git a/core/dbt/tests/fixtures/project.py b/core/dbt/tests/fixtures/project.py index daacef0f5..a12638b16 100644 --- a/core/dbt/tests/fixtures/project.py +++ b/core/dbt/tests/fixtures/project.py @@ -341,6 +341,7 @@ def write_project_files_recursively(path, file_dict): # Provide a dictionary of file names to contents. Nested directories # are handle by nested dictionaries. + # models directory @pytest.fixture(scope="class") def models(): diff --git a/core/dbt/tests/util.py b/core/dbt/tests/util.py index cb2fbeccc..a01ee9b67 100644 --- a/core/dbt/tests/util.py +++ b/core/dbt/tests/util.py @@ -291,6 +291,7 @@ class TestProcessingException(Exception): # Testing utilities that use adapter code + # Uses: # adapter.config.credentials # adapter.quote diff --git a/dev-requirements.txt b/dev-requirements.txt index 8541133ff..20605e632 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -3,7 +3,7 @@ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-ada git+https://github.com/dbt-labs/dbt-common.git@main git+https://github.com/dbt-labs/dbt-postgres.git@main # black must match what's in .pre-commit-config.yaml to be sure local env matches CI -black==22.3.0 +black==24.3.0 bumpversion ddtrace==2.3.0 docutils diff --git a/tests/functional/adapter/constraints/test_constraints.py b/tests/functional/adapter/constraints/test_constraints.py index 7fea5a742..601c88f05 100644 --- a/tests/functional/adapter/constraints/test_constraints.py +++ b/tests/functional/adapter/constraints/test_constraints.py @@ -104,7 +104,7 @@ class BaseConstraintsColumnsEqual: def test__constraints_wrong_column_data_types( self, project, string_type, int_type, schema_string_type, schema_int_type, data_types ): - for (sql_column_value, schema_data_type, error_data_type) in data_types: + for sql_column_value, schema_data_type, error_data_type in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value), @@ -146,7 +146,7 @@ class BaseConstraintsColumnsEqual: assert all([(exp in log_output or exp.upper() in log_output) for exp in expected]) def test__constraints_correct_column_data_types(self, project, data_types): - for (sql_column_value, schema_data_type, _) in data_types: + for sql_column_value, schema_data_type, _ in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value), diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py index 8d462e258..653021c60 100644 --- a/tests/functional/list/test_list.py +++ b/tests/functional/list/test_list.py @@ -697,9 +697,9 @@ class TestList: "test.unique_outer_id", } del os.environ["DBT_RESOURCE_TYPES"] - os.environ[ - "DBT_EXCLUDE_RESOURCE_TYPES" - ] = "test saved_query metric source semantic_model snapshot seed" + os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = ( + "test saved_query metric source semantic_model snapshot seed" + ) results = self.run_dbt_ls() assert set(results) == { "test.ephemeral", diff --git a/tests/unit/parser/test_manifest.py b/tests/unit/parser/test_manifest.py index 705b8256f..e01b41ce5 100644 --- a/tests/unit/parser/test_manifest.py +++ b/tests/unit/parser/test_manifest.py @@ -124,9 +124,9 @@ class TestGetFullManifest: mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest mocker.patch("dbt.parser.manifest._check_manifest").return_value = None - mocker.patch( - "dbt.parser.manifest.ManifestLoader.save_macros_to_adapter" - ).return_value = None + mocker.patch("dbt.parser.manifest.ManifestLoader.save_macros_to_adapter").return_value = ( + None + ) mocker.patch("dbt.tracking.active_user").return_value = User(None) def test_write_perf_info( diff --git a/tests/unit/parser/test_partial.py b/tests/unit/parser/test_partial.py index 40f2e6e80..b3ad25498 100644 --- a/tests/unit/parser/test_partial.py +++ b/tests/unit/parser/test_partial.py @@ -193,9 +193,9 @@ class TestFileDiff: def partial_parsing(self, manifest, files): safe_set_invocation_context() saved_files = deepcopy(files) - saved_files[ - "my_test://models/python_model_untouched.py" - ].checksum = FileHash.from_contents("something new") + saved_files["my_test://models/python_model_untouched.py"].checksum = ( + FileHash.from_contents("something new") + ) return PartialParsing(manifest, saved_files) def test_build_file_diff_basic(self, partial_parsing): diff --git a/tests/unit/test_compilation.py b/tests/unit/test_compilation.py index c18e7fb15..0d5d4b2ea 100644 --- a/tests/unit/test_compilation.py +++ b/tests/unit/test_compilation.py @@ -85,7 +85,7 @@ class TestLinker: def test_linker_add_dependency(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("A", "C"), ("B", "C")] - for (l, r) in actual_deps: + for l, r in actual_deps: linker.dependency(l, r) queue = self._get_graph_queue(_mock_manifest("ABC"), linker) @@ -119,7 +119,7 @@ class TestLinker: actual_deps = [("A", "B")] additional_node = "Z" - for (l, r) in actual_deps: + for l, r in actual_deps: linker.dependency(l, r) linker.add_node(additional_node) @@ -150,7 +150,7 @@ class TestLinker: def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] - for (l, r) in actual_deps: + for l, r in actual_deps: linker.dependency(l, r) queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"]) @@ -181,7 +181,7 @@ class TestLinker: def test__find_cycles__cycles(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "A")] - for (l, r) in actual_deps: + for l, r in actual_deps: linker.dependency(l, r) assert linker.find_cycles() is not None @@ -189,7 +189,7 @@ class TestLinker: def test__find_cycles__no_cycles(self, linker: Linker) -> None: actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] - for (l, r) in actual_deps: + for l, r in actual_deps: linker.dependency(l, r) assert linker.find_cycles() is None diff --git a/tests/unit/utils/__init__.py b/tests/unit/utils/__init__.py index 411ad6ae7..ec9cb5759 100644 --- a/tests/unit/utils/__init__.py +++ b/tests/unit/utils/__init__.py @@ -3,6 +3,7 @@ Note that all imports should be inside the functions to avoid import/mocking issues. """ + import os import string from unittest import TestCase, mock diff --git a/tests/unit/utils/adapter.py b/tests/unit/utils/adapter.py index c760a27ba..66710e645 100644 --- a/tests/unit/utils/adapter.py +++ b/tests/unit/utils/adapter.py @@ -42,9 +42,9 @@ def postgres_adapter( adapter = get_adapter(runtime_config) assert isinstance(adapter, PostgresAdapter) - mocker.patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ).return_value = ManifestStateCheck() + mocker.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check").return_value = ( + ManifestStateCheck() + ) manifest = ManifestLoader.load_macros( runtime_config, adapter.connections.set_query_header, diff --git a/third-party-stubs/mashumaro/jsonschema/models.pyi b/third-party-stubs/mashumaro/jsonschema/models.pyi index 6022d3d12..4b18bc9b3 100644 --- a/third-party-stubs/mashumaro/jsonschema/models.pyi +++ b/third-party-stubs/mashumaro/jsonschema/models.pyi @@ -106,6 +106,7 @@ class JSONSchema(DataClassJSONMixin): serialize_by_alias: bool aliases: Incomplete serialization_strategy: Incomplete + def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ... def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ... def __init__(