Skip to content

Commit

Permalink
Autofix ruf010 (#56914)
Browse files Browse the repository at this point in the history
* STY: Autofix RUF010

* STY: Apply ruff format after autofix

* STY: Final manual fix

* STY: Remove old repl rule in pre-commit
  • Loading branch information
tqa236 authored Jan 16, 2024
1 parent 37d7db4 commit 0dade0b
Show file tree
Hide file tree
Showing 52 changed files with 114 additions and 128 deletions.
3 changes: 0 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,6 @@ repos:
# Check for deprecated messages without sphinx directive
|(DEPRECATED|DEPRECATE|Deprecated)(:|,|\.)
# {foo!r} instead of {repr(foo)}
|!r}
# builtin filter function
|(?<!def)[\(\s]filter\(
types_or: [python, cython, rst]
Expand Down
6 changes: 3 additions & 3 deletions doc/scripts/eval_performance.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def bench_with(n, times=10, repeat=3, engine="numexpr"):
return (
np.array(
timeit(
f"df.eval(s, engine={repr(engine)})",
f"df.eval(s, engine={engine!r})",
setup=setup_common % (n, setup_with),
repeat=repeat,
number=times,
Expand All @@ -34,7 +34,7 @@ def bench_subset(n, times=20, repeat=3, engine="numexpr"):
return (
np.array(
timeit(
f"df.query(s, engine={repr(engine)})",
f"df.query(s, engine={engine!r})",
setup=setup_common % (n, setup_subset),
repeat=repeat,
number=times,
Expand All @@ -55,7 +55,7 @@ def bench(mn=3, mx=7, num=100, engines=("python", "numexpr"), verbose=False):
for engine in engines:
for i, n in enumerate(r):
if verbose & (i % 10 == 0):
print(f"engine: {repr(engine)}, i == {i:d}")
print(f"engine: {engine!r}, i == {i:d}")
ev_times = bench_with(n, times=1, repeat=1, engine=engine)
ev.loc[i, engine] = np.mean(ev_times)
qu_times = bench_subset(n, times=1, repeat=1, engine=engine)
Expand Down
2 changes: 1 addition & 1 deletion pandas/_config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def _get_single_key(pat: str, silent: bool) -> str:
if len(keys) == 0:
if not silent:
_warn_if_deprecated(pat)
raise OptionError(f"No such keys(s): {repr(pat)}")
raise OptionError(f"No such keys(s): {pat!r}")
if len(keys) > 1:
raise OptionError("Pattern matched multiple keys")
key = keys[0]
Expand Down
7 changes: 3 additions & 4 deletions pandas/_testing/_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,13 +152,12 @@ def _assert_caught_expected_warning(

if not saw_warning:
raise AssertionError(
f"Did not see expected warning of class "
f"{repr(expected_warning.__name__)}"
f"Did not see expected warning of class {expected_warning.__name__!r}"
)

if match and not matched_message:
raise AssertionError(
f"Did not see warning {repr(expected_warning.__name__)} "
f"Did not see warning {expected_warning.__name__!r} "
f"matching '{match}'. The emitted warning messages are "
f"{unmatched_messages}"
)
Expand Down Expand Up @@ -200,7 +199,7 @@ def _assert_caught_no_extra_warnings(
)

if extra_warnings:
raise AssertionError(f"Caused unexpected warning(s): {repr(extra_warnings)}")
raise AssertionError(f"Caused unexpected warning(s): {extra_warnings!r}")


def _is_unexpected_warning(
Expand Down
18 changes: 9 additions & 9 deletions pandas/_testing/asserters.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,14 +432,14 @@ def assert_is_valid_plot_return_object(objs) -> None:
for el in objs.ravel():
msg = (
"one of 'objs' is not a matplotlib Axes instance, "
f"type encountered {repr(type(el).__name__)}"
f"type encountered {type(el).__name__!r}"
)
assert isinstance(el, (Axes, dict)), msg
else:
msg = (
"objs is neither an ndarray of Artist instances nor a single "
"ArtistArtist instance, tuple, or dict, 'objs' is a "
f"{repr(type(objs).__name__)}"
f"{type(objs).__name__!r}"
)
assert isinstance(objs, (Artist, tuple, dict)), msg

Expand Down Expand Up @@ -661,10 +661,10 @@ def _get_base(obj):

if check_same == "same":
if left_base is not right_base:
raise AssertionError(f"{repr(left_base)} is not {repr(right_base)}")
raise AssertionError(f"{left_base!r} is not {right_base!r}")
elif check_same == "copy":
if left_base is right_base:
raise AssertionError(f"{repr(left_base)} is {repr(right_base)}")
raise AssertionError(f"{left_base!r} is {right_base!r}")

def _raise(left, right, err_msg) -> NoReturn:
if err_msg is None:
Expand Down Expand Up @@ -935,7 +935,7 @@ def assert_series_equal(
raise_assert_detail(obj, "Series length are different", msg1, msg2)

if check_flags:
assert left.flags == right.flags, f"{repr(left.flags)} != {repr(right.flags)}"
assert left.flags == right.flags, f"{left.flags!r} != {right.flags!r}"

if check_index:
# GH #38183
Expand Down Expand Up @@ -1215,11 +1215,11 @@ def assert_frame_equal(
# shape comparison
if left.shape != right.shape:
raise_assert_detail(
obj, f"{obj} shape mismatch", f"{repr(left.shape)}", f"{repr(right.shape)}"
obj, f"{obj} shape mismatch", f"{left.shape!r}", f"{right.shape!r}"
)

if check_flags:
assert left.flags == right.flags, f"{repr(left.flags)} != {repr(right.flags)}"
assert left.flags == right.flags, f"{left.flags!r} != {right.flags!r}"

# index comparison
assert_index_equal(
Expand Down Expand Up @@ -1369,7 +1369,7 @@ def assert_sp_array_equal(left, right) -> None:

def assert_contains_all(iterable, dic) -> None:
for k in iterable:
assert k in dic, f"Did not contain item: {repr(k)}"
assert k in dic, f"Did not contain item: {k!r}"


def assert_copy(iter1, iter2, **eql_kwargs) -> None:
Expand All @@ -1384,7 +1384,7 @@ def assert_copy(iter1, iter2, **eql_kwargs) -> None:
for elem1, elem2 in zip(iter1, iter2):
assert_almost_equal(elem1, elem2, **eql_kwargs)
msg = (
f"Expected object {repr(type(elem1))} and object {repr(type(elem2))} to be "
f"Expected object {type(elem1)!r} and object {type(elem2)!r} to be "
"different objects, but they were the same object."
)
assert elem1 is not elem2, msg
Expand Down
2 changes: 1 addition & 1 deletion pandas/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):

if verbose:
print(
f"Tried directories {str(rootdirs)} \
f"Tried directories {rootdirs!s} \
but none started with prefix {parentdir_prefix}"
)
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
Expand Down
2 changes: 1 addition & 1 deletion pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def configure_tests() -> None:
# ----------------------------------------------------------------
# Common arguments
# ----------------------------------------------------------------
@pytest.fixture(params=[0, 1, "index", "columns"], ids=lambda x: f"axis={repr(x)}")
@pytest.fixture(params=[0, 1, "index", "columns"], ids=lambda x: f"axis={x!r}")
def axis(request):
"""
Fixture for returning the axis numbers of a DataFrame.
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,8 +306,8 @@ def plot(self):
def decorator(accessor):
if hasattr(cls, name):
warnings.warn(
f"registration of accessor {repr(accessor)} under name "
f"{repr(name)} for type {repr(cls)} is overriding a preexisting "
f"registration of accessor {accessor!r} under name "
f"{name!r} for type {cls!r} is overriding a preexisting "
f"attribute with the same name.",
UserWarning,
stacklevel=find_stack_level(),
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/array_algos/replace.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def _check_comparison_types(
type_names[0] = f"ndarray(dtype={a.dtype})"

raise TypeError(
f"Cannot compare types {repr(type_names[0])} and {repr(type_names[1])}"
f"Cannot compare types {type_names[0]!r} and {type_names[1]!r}"
)

if not regex or not should_use_regex(regex, b):
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/arrays/arrow/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -1109,7 +1109,7 @@ def fillna(
try:
fill_value = self._box_pa(value, pa_type=self._pa_array.type)
except pa.ArrowTypeError as err:
msg = f"Invalid value '{str(value)}' for dtype {self.dtype}"
msg = f"Invalid value '{value!s}' for dtype {self.dtype}"
raise TypeError(msg) from err

try:
Expand Down Expand Up @@ -2065,7 +2065,7 @@ def _maybe_convert_setitem_value(self, value):
try:
value = self._box_pa(value, self._pa_array.type)
except pa.ArrowTypeError as err:
msg = f"Invalid value '{str(value)}' for dtype {self.dtype}"
msg = f"Invalid value '{value!s}' for dtype {self.dtype}"
raise TypeError(msg) from err
return value

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -2024,7 +2024,7 @@ def sort_values(
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if na_position not in ["last", "first"]:
raise ValueError(f"invalid na_position: {repr(na_position)}")
raise ValueError(f"invalid na_position: {na_position!r}")

sorted_idx = nargsort(self, ascending=ascending, na_position=na_position)

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/arrays/masked.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def _validate_setitem_value(self, value):

# Note: without the "str" here, the f-string rendering raises in
# py38 builds.
raise TypeError(f"Invalid value '{str(value)}' for dtype {self.dtype}")
raise TypeError(f"Invalid value '{value!s}' for dtype {self.dtype}")

def __setitem__(self, key, value) -> None:
key = check_array_indexer(self, key)
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/computation/align.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _align_core(terms):
if ordm >= 1 and reindexer_size >= 10000:
w = (
f"Alignment difference on axis {axis} is larger "
f"than an order of magnitude on term {repr(terms[i].name)}, "
f"than an order of magnitude on term {terms[i].name!r}, "
f"by more than {ordm:.4g}; performance may suffer."
)
warnings.warn(
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/computation/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,9 +214,9 @@ def _bool_arith_fallback(op_str, a, b) -> bool:
if _has_bool_dtype(a) and _has_bool_dtype(b):
if op_str in _BOOL_OP_UNSUPPORTED:
warnings.warn(
f"evaluating in Python space because the {repr(op_str)} "
f"evaluating in Python space because the {op_str!r} "
"operator is not supported by numexpr for the bool dtype, "
f"use {repr(_BOOL_OP_UNSUPPORTED[op_str])} instead.",
f"use {_BOOL_OP_UNSUPPORTED[op_str]!r} instead.",
stacklevel=find_stack_level(),
)
return True
Expand Down
6 changes: 3 additions & 3 deletions pandas/core/computation/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def type(self):

@property
def raw(self) -> str:
return f"{type(self).__name__}(name={repr(self.name)}, type={self.type})"
return f"{type(self).__name__}(name={self.name!r}, type={self.type})"

@property
def is_datetime(self) -> bool:
Expand Down Expand Up @@ -387,7 +387,7 @@ def __init__(self, op: str, lhs, rhs) -> None:
# has to be made a list for python3
keys = list(_binary_ops_dict.keys())
raise ValueError(
f"Invalid binary operator {repr(op)}, valid operators are {keys}"
f"Invalid binary operator {op!r}, valid operators are {keys}"
) from err

def __call__(self, env):
Expand Down Expand Up @@ -571,7 +571,7 @@ def __init__(self, op: Literal["+", "-", "~", "not"], operand) -> None:
self.func = _unary_ops_dict[op]
except KeyError as err:
raise ValueError(
f"Invalid unary operator {repr(op)}, "
f"Invalid unary operator {op!r}, "
f"valid operators are {UNARY_OPS_SYMS}"
) from err

Expand Down
6 changes: 2 additions & 4 deletions pandas/core/computation/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def _resolve_name(self):
if self.side == "left":
# Note: The behavior of __new__ ensures that self.name is a str here
if self.name not in self.env.queryables:
raise NameError(f"name {repr(self.name)} is not defined")
raise NameError(f"name {self.name!r} is not defined")
return self.name

# resolve the rhs (and allow it to be None)
Expand Down Expand Up @@ -467,9 +467,7 @@ def visit_Subscript(self, node, **kwargs) -> ops.Term:
try:
return self.const_type(value[slobj], self.env)
except TypeError as err:
raise ValueError(
f"cannot subscript {repr(value)} with {repr(slobj)}"
) from err
raise ValueError(f"cannot subscript {value!r} with {slobj!r}") from err

def visit_Attribute(self, node, **kwargs):
attr = node.attr
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/cast.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ def _disallow_mismatched_datetimelike(value, dtype: DtypeObj) -> None:
elif (vdtype.kind == "m" and dtype.kind == "M") or (
vdtype.kind == "M" and dtype.kind == "m"
):
raise TypeError(f"Cannot cast {repr(value)} to {dtype}")
raise TypeError(f"Cannot cast {value!r} to {dtype}")


@overload
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/dtypes/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1548,8 +1548,8 @@ def _validate_date_like_dtype(dtype) -> None:
raise TypeError(e) from e
if typ not in ["generic", "ns"]:
raise ValueError(
f"{repr(dtype.name)} is too specific of a frequency, "
f"try passing {repr(dtype.type.__name__)}"
f"{dtype.name!r} is too specific of a frequency, "
f"try passing {dtype.type.__name__!r}"
)


Expand Down
12 changes: 6 additions & 6 deletions pandas/core/dtypes/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,7 +318,7 @@ def _from_values_or_dtype(

dtype = CategoricalDtype(categories, ordered)
else:
raise ValueError(f"Unknown dtype {repr(dtype)}")
raise ValueError(f"Unknown dtype {dtype!r}")
elif categories is not None or ordered is not None:
raise ValueError(
"Cannot specify `categories` or `ordered` together with `dtype`."
Expand Down Expand Up @@ -566,7 +566,7 @@ def validate_categories(categories, fastpath: bool = False) -> Index:

if not fastpath and not is_list_like(categories):
raise TypeError(
f"Parameter 'categories' must be list-like, was {repr(categories)}"
f"Parameter 'categories' must be list-like, was {categories!r}"
)
if not isinstance(categories, ABCIndex):
categories = Index._with_infer(categories, tupleize_cols=False)
Expand Down Expand Up @@ -602,7 +602,7 @@ def update_dtype(self, dtype: str_type | CategoricalDtype) -> CategoricalDtype:
elif not self.is_dtype(dtype):
raise ValueError(
f"a CategoricalDtype must be passed to perform an update, "
f"got {repr(dtype)}"
f"got {dtype!r}"
)
else:
# from here on, dtype is a CategoricalDtype
Expand Down Expand Up @@ -1458,7 +1458,7 @@ def __init__(self, dtype: npt.DTypeLike | NumpyEADtype | None) -> None:
self._dtype = np.dtype(dtype)

def __repr__(self) -> str:
return f"NumpyEADtype({repr(self.name)})"
return f"NumpyEADtype({self.name!r})"

@property
def numpy_dtype(self) -> np.dtype:
Expand Down Expand Up @@ -1814,7 +1814,7 @@ def subtype(self):

@property
def name(self) -> str:
return f"Sparse[{self.subtype.name}, {repr(self.fill_value)}]"
return f"Sparse[{self.subtype.name}, {self.fill_value!r}]"

def __repr__(self) -> str:
return self.name
Expand Down Expand Up @@ -2173,7 +2173,7 @@ def name(self) -> str: # type: ignore[override]
"""
A string identifying the data type.
"""
return f"{str(self.pyarrow_dtype)}[{self.storage}]"
return f"{self.pyarrow_dtype!s}[{self.storage}]"

@cache_readonly
def numpy_dtype(self) -> np.dtype:
Expand Down
6 changes: 2 additions & 4 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -10395,9 +10395,7 @@ def map(
1 11.262736 20.857489
"""
if na_action not in {"ignore", None}:
raise ValueError(
f"na_action must be 'ignore' or None. Got {repr(na_action)}"
)
raise ValueError(f"na_action must be 'ignore' or None. Got {na_action!r}")

if self.empty:
return self.copy()
Expand Down Expand Up @@ -11860,7 +11858,7 @@ def _get_agg_axis(self, axis_num: int) -> Index:
elif axis_num == 1:
return self.index
else:
raise ValueError(f"Axis must be 0 or 1 (got {repr(axis_num)})")
raise ValueError(f"Axis must be 0 or 1 (got {axis_num!r})")

def mode(
self, axis: Axis = 0, numeric_only: bool = False, dropna: bool = True
Expand Down
Loading

0 comments on commit 0dade0b

Please sign in to comment.