bump black in dev-requirements and pre-commit-config (#10407)

This commit is contained in:
Michelle Ark
2024-07-17 12:01:14 -04:00
committed by GitHub
parent 2a26fabfdf
commit bef2d20c21
24 changed files with 65 additions and 56 deletions

View File

@@ -0,0 +1,6 @@
kind: Under the Hood
body: bump black to 24.3.0
time: 2024-07-16T18:48:59.651834-04:00
custom:
Author: michelleark
Issue: "10454"

View File

@@ -7,6 +7,7 @@ ignore =
W503 # makes Flake8 work like black
W504
E203 # makes Flake8 work like black
E704 # makes Flake8 work like black
E741
E501 # long line checking is done in black
exclude = test/

View File

@@ -26,7 +26,7 @@ repos:
- id: isort
- repo: https://github.com/psf/black
# rev must match what's in dev-requirements.txt
rev: 22.3.0
rev: 24.3.0
hooks:
- id: black
- id: black

View File

@@ -158,7 +158,8 @@ class RunResultsArtifact(ExecutionResult, ArtifactMixin):
@classmethod
def upgrade_schema_version(cls, data):
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results."""
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
"""
run_results_schema_version = get_artifact_schema_version(data)
# If less than the current version (v5), preprocess contents to match latest schema version
if run_results_schema_version <= 5:

View File

@@ -179,9 +179,11 @@ def postflight(func):
process_in_blocks=rusage.ru_inblock,
process_out_blocks=rusage.ru_oublock,
),
(
EventLevel.INFO
if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT
else None,
else None
),
)
fire_event(

View File

@@ -290,9 +290,9 @@ class RuntimeConfig(Project, Profile, AdapterRequiredConfig):
project_name=self.project_name,
project_id=self.hashed_name(),
user_id=tracking.active_user.id if tracking.active_user else None,
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
if tracking.active_user
else None,
send_anonymous_usage_stats=(
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
),
adapter_type=self.credentials.type,
)

View File

@@ -27,8 +27,7 @@ class ConfigSource:
def __init__(self, project):
self.project = project
def get_config_dict(self, resource_type: NodeType):
...
def get_config_dict(self, resource_type: NodeType): ...
class UnrenderedConfig(ConfigSource):
@@ -130,12 +129,12 @@ class BaseContextConfigGenerator(Generic[T]):
return self._project_configs(self._active_project, fqn, resource_type)
@abstractmethod
def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T:
...
def _update_from_config(
self, result: T, partial: Dict[str, Any], validate: bool = False
) -> T: ...
@abstractmethod
def initial_result(self, resource_type: NodeType, base: bool) -> T:
...
def initial_result(self, resource_type: NodeType, base: bool) -> T: ...
def calculate_node_config(
self,
@@ -181,8 +180,7 @@ class BaseContextConfigGenerator(Generic[T]):
project_name: str,
base: bool,
patch_config_dict: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
...
) -> Dict[str, Any]: ...
class ContextConfigGenerator(BaseContextConfigGenerator[C]):

View File

@@ -239,8 +239,7 @@ class BaseRefResolver(BaseResolver):
@abc.abstractmethod
def resolve(
self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None
) -> RelationProxy:
...
) -> RelationProxy: ...
def _repack_args(
self, name: str, package: Optional[str], version: Optional[NodeVersion]
@@ -306,8 +305,7 @@ class BaseSourceResolver(BaseResolver):
class BaseMetricResolver(BaseResolver):
@abc.abstractmethod
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference:
...
def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ...
def _repack_args(self, name: str, package: Optional[str]) -> List[str]:
if package is None:
@@ -341,8 +339,7 @@ class BaseMetricResolver(BaseResolver):
class Config(Protocol):
def __init__(self, model, context_config: Optional[ContextConfig]):
...
def __init__(self, model, context_config: Optional[ContextConfig]): ...
# Implementation of "config(..)" calls in models

View File

@@ -1676,9 +1676,9 @@ class MacroManifest(MacroMethods):
self.macros = macros
self.metadata = ManifestMetadata(
user_id=tracking.active_user.id if tracking.active_user else None,
send_anonymous_usage_stats=get_flags().SEND_ANONYMOUS_USAGE_STATS
if tracking.active_user
else None,
send_anonymous_usage_stats=(
get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None
),
)
# This is returned by the 'graph' context property
# in the ProviderContext class.

View File

@@ -636,9 +636,9 @@ class ModelNode(ModelResource, CompiledNode):
contract_enforced_disabled: bool = False
columns_removed: List[str] = []
column_type_changes: List[Dict[str, str]] = []
enforced_column_constraint_removed: List[
Dict[str, str]
] = [] # column_name, constraint_type
enforced_column_constraint_removed: List[Dict[str, str]] = (
[]
) # column_name, constraint_type
enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns
materialization_changed: List[str] = []
@@ -1554,7 +1554,7 @@ class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource):
return False
# exports should be in the same order, so we zip them for easy iteration
for (old_export, new_export) in zip(old.exports, self.exports):
for old_export, new_export in zip(old.exports, self.exports):
if not (
old_export.name == new_export.name
and old_export.config.export_as == new_export.config.export_as

View File

@@ -74,9 +74,7 @@ def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = [])
log_level = (
EventLevel.ERROR
if flags.QUIET
else EventLevel.DEBUG
if flags.DEBUG
else EventLevel(flags.LOG_LEVEL)
else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL)
)
console_config = get_stdout_config(
line_format,

View File

@@ -204,7 +204,7 @@ class ModelParser(SimpleSQLParser[ModelNode]):
dbt_parser = PythonParseVisitor(node)
dbt_parser.visit(tree)
for (func, args, kwargs) in dbt_parser.dbt_function_calls:
for func, args, kwargs in dbt_parser.dbt_function_calls:
if func == "get":
num_args = len(args)
if num_args == 0:

View File

@@ -274,9 +274,11 @@ class BaseRunner(metaclass=ABCMeta):
def compile_and_execute(self, manifest, ctx):
result = None
with self.adapter.connection_named(
self.node.unique_id, self.node
) if get_flags().INTROSPECT else nullcontext():
with (
self.adapter.connection_named(self.node.unique_id, self.node)
if get_flags().INTROSPECT
else nullcontext()
):
ctx.node.update_event_status(node_status=RunningStatus.Compiling)
fire_event(
NodeCompiling(

View File

@@ -341,6 +341,7 @@ def write_project_files_recursively(path, file_dict):
# Provide a dictionary of file names to contents. Nested directories
# are handle by nested dictionaries.
# models directory
@pytest.fixture(scope="class")
def models():

View File

@@ -291,6 +291,7 @@ class TestProcessingException(Exception):
# Testing utilities that use adapter code
# Uses:
# adapter.config.credentials
# adapter.quote

View File

@@ -3,7 +3,7 @@ git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-ada
git+https://github.com/dbt-labs/dbt-common.git@main
git+https://github.com/dbt-labs/dbt-postgres.git@main
# black must match what's in .pre-commit-config.yaml to be sure local env matches CI
black==22.3.0
black==24.3.0
bumpversion
ddtrace==2.3.0
docutils

View File

@@ -104,7 +104,7 @@ class BaseConstraintsColumnsEqual:
def test__constraints_wrong_column_data_types(
self, project, string_type, int_type, schema_string_type, schema_int_type, data_types
):
for (sql_column_value, schema_data_type, error_data_type) in data_types:
for sql_column_value, schema_data_type, error_data_type in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),
@@ -146,7 +146,7 @@ class BaseConstraintsColumnsEqual:
assert all([(exp in log_output or exp.upper() in log_output) for exp in expected])
def test__constraints_correct_column_data_types(self, project, data_types):
for (sql_column_value, schema_data_type, _) in data_types:
for sql_column_value, schema_data_type, _ in data_types:
# Write parametrized data_type to sql file
write_file(
my_model_data_type_sql.format(sql_value=sql_column_value),

View File

@@ -697,9 +697,9 @@ class TestList:
"test.unique_outer_id",
}
del os.environ["DBT_RESOURCE_TYPES"]
os.environ[
"DBT_EXCLUDE_RESOURCE_TYPES"
] = "test saved_query metric source semantic_model snapshot seed"
os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = (
"test saved_query metric source semantic_model snapshot seed"
)
results = self.run_dbt_ls()
assert set(results) == {
"test.ephemeral",

View File

@@ -124,9 +124,9 @@ class TestGetFullManifest:
mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter
mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest
mocker.patch("dbt.parser.manifest._check_manifest").return_value = None
mocker.patch(
"dbt.parser.manifest.ManifestLoader.save_macros_to_adapter"
).return_value = None
mocker.patch("dbt.parser.manifest.ManifestLoader.save_macros_to_adapter").return_value = (
None
)
mocker.patch("dbt.tracking.active_user").return_value = User(None)
def test_write_perf_info(

View File

@@ -193,9 +193,9 @@ class TestFileDiff:
def partial_parsing(self, manifest, files):
safe_set_invocation_context()
saved_files = deepcopy(files)
saved_files[
"my_test://models/python_model_untouched.py"
].checksum = FileHash.from_contents("something new")
saved_files["my_test://models/python_model_untouched.py"].checksum = (
FileHash.from_contents("something new")
)
return PartialParsing(manifest, saved_files)
def test_build_file_diff_basic(self, partial_parsing):

View File

@@ -85,7 +85,7 @@ class TestLinker:
def test_linker_add_dependency(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("A", "C"), ("B", "C")]
for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
queue = self._get_graph_queue(_mock_manifest("ABC"), linker)
@@ -119,7 +119,7 @@ class TestLinker:
actual_deps = [("A", "B")]
additional_node = "Z"
for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
linker.add_node(additional_node)
@@ -150,7 +150,7 @@ class TestLinker:
def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"])
@@ -181,7 +181,7 @@ class TestLinker:
def test__find_cycles__cycles(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "A")]
for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
assert linker.find_cycles() is not None
@@ -189,7 +189,7 @@ class TestLinker:
def test__find_cycles__no_cycles(self, linker: Linker) -> None:
actual_deps = [("A", "B"), ("B", "C"), ("C", "D")]
for (l, r) in actual_deps:
for l, r in actual_deps:
linker.dependency(l, r)
assert linker.find_cycles() is None

View File

@@ -3,6 +3,7 @@
Note that all imports should be inside the functions to avoid import/mocking
issues.
"""
import os
import string
from unittest import TestCase, mock

View File

@@ -42,9 +42,9 @@ def postgres_adapter(
adapter = get_adapter(runtime_config)
assert isinstance(adapter, PostgresAdapter)
mocker.patch(
"dbt.parser.manifest.ManifestLoader.build_manifest_state_check"
).return_value = ManifestStateCheck()
mocker.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check").return_value = (
ManifestStateCheck()
)
manifest = ManifestLoader.load_macros(
runtime_config,
adapter.connections.set_query_header,

View File

@@ -106,6 +106,7 @@ class JSONSchema(DataClassJSONMixin):
serialize_by_alias: bool
aliases: Incomplete
serialization_strategy: Incomplete
def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ...
def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ...
def __init__(