Skip to content

Commit

Permalink
(🎁) update black to 23.3.0 (#15059)
Browse files Browse the repository at this point in the history
Co-authored-by: KotlinIsland <kotlinisland@users.noreply.github.com>
  • Loading branch information
KotlinIsland and KotlinIsland committed Apr 15, 2023
1 parent 1449366 commit 4276308
Show file tree
Hide file tree
Showing 36 changed files with 55 additions and 67 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black
rev: 22.12.0 # must match test-requirements.txt
rev: 23.3.0 # must match test-requirements.txt
hooks:
- id: black
- repo: https://github.com/pycqa/isort
Expand Down
2 changes: 1 addition & 1 deletion misc/analyze_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def load_json(data_path: str, meta_path: str) -> CacheData:


def get_files(root: str) -> Iterable[CacheData]:
for (dirpath, dirnames, filenames) in os.walk(root):
for dirpath, dirnames, filenames in os.walk(root):
for filename in filenames:
if filename.endswith(".data.json"):
meta_filename = filename.replace(".data.json", ".meta.json")
Expand Down
1 change: 0 additions & 1 deletion misc/fix_annotate.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ def foo(self, bar, baz=12):


class FixAnnotate(BaseFix):

# This fixer is compatible with the bottom matcher.
BM_compatible = True

Expand Down
1 change: 0 additions & 1 deletion mypy/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@


def _run(main_wrapper: Callable[[TextIO, TextIO], None]) -> tuple[str, str, int]:

stdout = StringIO()
stderr = StringIO()

Expand Down
2 changes: 0 additions & 2 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -2068,7 +2068,6 @@ def erase_override(t: Type) -> Type:
if not is_subtype(
original.arg_types[i], erase_override(override.arg_types[i])
):

arg_type_in_super = original.arg_types[i]

if isinstance(node, FuncDef):
Expand Down Expand Up @@ -2954,7 +2953,6 @@ def check_compatibility_all_supers(
and lvalue.kind in (MDEF, None)
and len(lvalue_node.info.bases) > 0 # None for Vars defined via self
):

for base in lvalue_node.info.mro[1:]:
tnode = base.names.get(lvalue_node.name)
if tnode is not None:
Expand Down
4 changes: 2 additions & 2 deletions mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@
# Type of callback user for checking individual function arguments. See
# check_args() below for details.
ArgChecker: _TypeAlias = Callable[
[Type, Type, ArgKind, Type, int, int, CallableType, Optional[Type], Context, Context], None,
[Type, Type, ArgKind, Type, int, int, CallableType, Optional[Type], Context, Context], None
]

# Maximum nesting level for math union in overloads, setting this to large values
Expand Down Expand Up @@ -845,7 +845,7 @@ def check_typeddict_call_with_kwargs(
# this may give a better error message.
ret_type = callee

for (item_name, item_expected_type) in ret_type.items.items():
for item_name, item_expected_type in ret_type.items.items():
if item_name in kwargs:
item_value = kwargs[item_name]
self.chk.check_simple_assignment(
Expand Down
1 change: 0 additions & 1 deletion mypy/checkstrformat.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,6 @@ class ConversionSpecifier:
def __init__(
self, match: Match[str], start_pos: int = -1, non_standard_format_spec: bool = False
) -> None:

self.whole_seq = match.group()
self.start_pos = start_pos

Expand Down
2 changes: 1 addition & 1 deletion mypy/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from mypy.options import PER_MODULE_OPTIONS, Options

_CONFIG_VALUE_TYPES: _TypeAlias = Union[
str, bool, int, float, Dict[str, str], List[str], Tuple[int, int],
str, bool, int, float, Dict[str, str], List[str], Tuple[int, int]
]
_INI_PARSER_CALLABLE: _TypeAlias = Callable[[Any], _CONFIG_VALUE_TYPES]

Expand Down
3 changes: 1 addition & 2 deletions mypy/constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -1006,7 +1006,6 @@ def infer_against_overloaded(
return infer_constraints(template, item, self.direction)

def visit_tuple_type(self, template: TupleType) -> list[Constraint]:

actual = self.actual
unpack_index = find_unpack_in_list(template.items)
is_varlength_tuple = (
Expand Down Expand Up @@ -1065,7 +1064,7 @@ def visit_typeddict_type(self, template: TypedDictType) -> list[Constraint]:
res: list[Constraint] = []
# NOTE: Non-matching keys are ignored. Compatibility is checked
# elsewhere so this shouldn't be unsafe.
for (item_name, template_item_type, actual_item_type) in template.zip(actual):
for item_name, template_item_type, actual_item_type in template.zip(actual):
res.extend(infer_constraints(template_item_type, actual_item_type, self.direction))
return res
elif isinstance(actual, AnyType):
Expand Down
2 changes: 0 additions & 2 deletions mypy/dmypy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,6 @@ def ignore_suppressed_imports(module: str) -> bool:


class Server:

# NOTE: the instance is constructed in the parent process but
# serve() is called in the grandchild (by daemonize()).

Expand Down Expand Up @@ -828,7 +827,6 @@ def update_sources(self, sources: list[BuildSource]) -> None:
def update_changed(
self, sources: list[BuildSource], remove: list[str], update: list[str]
) -> ChangesAndRemovals:

changed_paths = self.fswatcher.update_changed(remove, update)
return self._find_changed(sources, changed_paths)

Expand Down
1 change: 0 additions & 1 deletion mypy/fastparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,6 @@ def parse(
errors: Errors | None = None,
options: Options | None = None,
) -> MypyFile:

"""Parse a source file, without doing any semantic analysis.
Return the parse tree. If errors is not provided, raise ParseError
Expand Down
1 change: 0 additions & 1 deletion mypy/ipc.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ def __exit__(


class IPCServer(IPCBase):

BUFFER_SIZE: Final = 2**16

def __init__(self, name: str, timeout: float | None = None) -> None:
Expand Down
4 changes: 2 additions & 2 deletions mypy/meet.py
Original file line number Diff line number Diff line change
Expand Up @@ -828,13 +828,13 @@ def visit_tuple_type(self, t: TupleType) -> ProperType:

def visit_typeddict_type(self, t: TypedDictType) -> ProperType:
if isinstance(self.s, TypedDictType):
for (name, l, r) in self.s.zip(t):
for name, l, r in self.s.zip(t):
if not is_equivalent(l, r) or (name in t.required_keys) != (
name in self.s.required_keys
):
return self.default(self.s)
item_list: list[tuple[str, Type]] = []
for (item_name, s_item_type, t_item_type) in self.s.zipall(t):
for item_name, s_item_type, t_item_type in self.s.zipall(t):
if s_item_type is not None:
item_list.append((item_name, s_item_type))
else:
Expand Down
3 changes: 1 addition & 2 deletions mypy/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -2276,7 +2276,6 @@ def format_callable_args(
arg_strings = []
for arg_name, arg_type, arg_kind in zip(arg_names, arg_types, arg_kinds):
if arg_kind == ARG_POS and arg_name is None or verbosity == 0 and arg_kind.is_positional():

arg_strings.append(format(arg_type))
else:
constructor = ARG_CONSTRUCTOR_NAMES[arg_kind]
Expand Down Expand Up @@ -2383,7 +2382,7 @@ def format_literal_value(typ: LiteralType) -> str:
if not typ.is_anonymous():
return format(typ.fallback)
items = []
for (item_name, item_type) in typ.items.items():
for item_name, item_type in typ.items.items():
modifier = "" if item_name in typ.required_keys else "?"
items.append(f"{item_name!r}{modifier}: {format(item_type)}")
s = f"TypedDict({{{', '.join(items)}}})"
Expand Down
1 change: 0 additions & 1 deletion mypy/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3326,7 +3326,6 @@ def deserialize(cls, data: JsonDict) -> TypeInfo:


class FakeInfo(TypeInfo):

__slots__ = ("msg",)

# types.py defines a single instance of this class, called types.NOT_READY.
Expand Down
1 change: 0 additions & 1 deletion mypy/plugins/attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,6 @@ def _cleanup_decorator(stmt: Decorator, attr_map: dict[str, Attribute]) -> None:
and isinstance(func_decorator.expr, NameExpr)
and func_decorator.expr.name in attr_map
):

if func_decorator.name == "default":
attr_map[func_decorator.expr.name].has_default = True

Expand Down
1 change: 0 additions & 1 deletion mypy/plugins/dataclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,6 @@ def transform(self) -> bool:
and ("__init__" not in info.names or info.names["__init__"].plugin_generated)
and attributes
):

with state.strict_optional_set(self._api.options.strict_optional):
args = [
attr.to_argument(info)
Expand Down
2 changes: 0 additions & 2 deletions mypy/plugins/singledispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ def create_singledispatch_function_callback(ctx: FunctionContext) -> Type:
"""Called for functools.singledispatch"""
func_type = get_proper_type(get_first_arg(ctx.arg_types))
if isinstance(func_type, CallableType):

if len(func_type.arg_kinds) < 1:
fail(
ctx, "Singledispatch function requires at least one argument", func_type.definition
Expand Down Expand Up @@ -176,7 +175,6 @@ def register_function(

fallback_dispatch_type = fallback.arg_types[0]
if not is_subtype(dispatch_type, fallback_dispatch_type):

fail(
ctx,
"Dispatch type {} must be subtype of fallback function first argument {}".format(
Expand Down
3 changes: 1 addition & 2 deletions mypy/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
)

ReporterClasses: _TypeAlias = Dict[
str, Tuple[Callable[["Reports", str], "AbstractReporter"], bool],
str, Tuple[Callable[["Reports", str], "AbstractReporter"], bool]
]

reporter_classes: Final[ReporterClasses] = {}
Expand Down Expand Up @@ -860,7 +860,6 @@ def on_file(
type_map: dict[Expression, Type],
options: Options,
) -> None:

try:
path = os.path.relpath(tree.path)
except ValueError:
Expand Down
2 changes: 1 addition & 1 deletion mypy/semanal.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,7 @@ def prepare_typing_namespace(self, file_node: MypyFile, aliases: dict[str, str])
They will be replaced with real aliases when corresponding targets are ready.
"""

# This is all pretty unfortunate. typeshed now has a
# sys.version_info check for OrderedDict, and we shouldn't
# take it out, because it is correct and a typechecker should
Expand Down Expand Up @@ -4422,7 +4423,6 @@ def process__slots__(self, s: AssignmentStmt) -> None:
and s.lvalues[0].name == "__slots__"
and s.lvalues[0].kind == MDEF
):

# We understand `__slots__` defined as string, tuple, list, set, and dict:
if not isinstance(s.rvalue, (StrExpr, ListExpr, TupleExpr, SetExpr, DictExpr)):
# For example, `__slots__` can be defined as a variable,
Expand Down
2 changes: 1 addition & 1 deletion mypy/semanal_typeddict.py
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ def parse_typeddict_fields_with_types(
seen_keys = set()
items: list[str] = []
types: list[Type] = []
for (field_name_expr, field_type_expr) in dict_items:
for field_name_expr, field_type_expr in dict_items:
if isinstance(field_name_expr, StrExpr):
key = field_name_expr.value
items.append(key)
Expand Down
2 changes: 1 addition & 1 deletion mypy/subtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1034,7 +1034,7 @@ def f(self) -> A: ...
if not members_right.issubset(members_left):
return False
assuming = right.type.assuming_proper if proper_subtype else right.type.assuming
for (l, r) in reversed(assuming):
for l, r in reversed(assuming):
if l == left and r == right:
return True
with pop_on_exit(assuming, left, right):
Expand Down
4 changes: 4 additions & 0 deletions mypy/test/teststubgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -1061,6 +1061,7 @@ def test(self, arg0: str = "") -> None:

def test_generate_c_function_other_module_arg(self) -> None:
"""Test that if argument references type from other module, module will be imported."""

# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
Expand All @@ -1087,6 +1088,7 @@ def test_generate_c_function_same_module(self) -> None:
"""Test that if annotation references type from same module but using full path, no module
will be imported, and type specification will be striped to local reference.
"""

# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
Expand Down Expand Up @@ -1136,6 +1138,7 @@ def test_generate_c_function_same_module_nested(self) -> None:
"""Test that if annotation references type from same module but using full path, no module
will be imported, and type specification will be stripped to local reference.
"""

# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
Expand All @@ -1162,6 +1165,7 @@ def test_generate_c_function_same_module_compound(self) -> None:
"""Test that if annotation references type from same module but using full path, no module
will be imported, and type specification will be stripped to local reference.
"""

# Provide different type in python spec than in docstring to make sure, that docstring
# information is used.
def test(arg0: str) -> None:
Expand Down
7 changes: 3 additions & 4 deletions mypy/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,6 @@ def is_meta_var(self) -> bool:


class TypeVarLikeType(ProperType):

__slots__ = ("name", "fullname", "id", "upper_bound")

name: str # Name (may be qualified)
Expand Down Expand Up @@ -2406,17 +2405,17 @@ def names_are_wider_than(self, other: TypedDictType) -> bool:

def zip(self, right: TypedDictType) -> Iterable[tuple[str, Type, Type]]:
left = self
for (item_name, left_item_type) in left.items.items():
for item_name, left_item_type in left.items.items():
right_item_type = right.items.get(item_name)
if right_item_type is not None:
yield (item_name, left_item_type, right_item_type)

def zipall(self, right: TypedDictType) -> Iterable[tuple[str, Type | None, Type | None]]:
left = self
for (item_name, left_item_type) in left.items.items():
for item_name, left_item_type in left.items.items():
right_item_type = right.items.get(item_name)
yield (item_name, left_item_type, right_item_type)
for (item_name, right_item_type) in right.items.items():
for item_name, right_item_type in right.items.items():
if item_name in left.items:
continue
yield (item_name, None, right_item_type)
Expand Down
4 changes: 2 additions & 2 deletions mypy/typestate.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,15 +112,15 @@ def __init__(self) -> None:
self.infer_unions = False

def is_assumed_subtype(self, left: Type, right: Type) -> bool:
for (l, r) in reversed(self._assuming):
for l, r in reversed(self._assuming):
if get_proper_type(l) == get_proper_type(left) and get_proper_type(
r
) == get_proper_type(right):
return True
return False

def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool:
for (l, r) in reversed(self._assuming_proper):
for l, r in reversed(self._assuming_proper):
if get_proper_type(l) == get_proper_type(left) and get_proper_type(
r
) == get_proper_type(right):
Expand Down
46 changes: 26 additions & 20 deletions mypy/typevartuples.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,17 @@ def split_with_mapped_and_template(
template: tuple[Type, ...],
template_prefix_len: int,
template_suffix_len: int,
) -> tuple[
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
] | None:
) -> (
tuple[
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
]
| None
):
split_result = fully_split_with_mapped_and_template(
mapped,
mapped_prefix_len,
Expand Down Expand Up @@ -101,18 +104,21 @@ def fully_split_with_mapped_and_template(
template: tuple[Type, ...],
template_prefix_len: int,
template_suffix_len: int,
) -> tuple[
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
] | None:
) -> (
tuple[
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
tuple[Type, ...],
]
| None
):
if mapped_prefix_len is not None:
assert mapped_suffix_len is not None
mapped_prefix, mapped_middle, mapped_suffix = split_with_prefix_and_suffix(
Expand Down
Loading

0 comments on commit 4276308

Please sign in to comment.