Skip to content

Apply new ruff rules #10428

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Jun 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
)
from functools import partial
from io import BytesIO
from itertools import starmap
from numbers import Number
from typing import (
TYPE_CHECKING,
Expand Down Expand Up @@ -2109,10 +2110,9 @@ def save_mfdataset(
import dask

return dask.delayed(
[
dask.delayed(_finalize_store)(w, s)
for w, s in zip(writes, stores, strict=True)
]
list(
starmap(dask.delayed(_finalize_store), zip(writes, stores, strict=True))
)
)


Expand Down
4 changes: 2 additions & 2 deletions xarray/coding/cftimeindex.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@

import math
from datetime import timedelta
from typing import TYPE_CHECKING, Any, Optional
from typing import TYPE_CHECKING, Any

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -549,7 +549,7 @@ def __rsub__(self, other):
) from err

def to_datetimeindex(
self, unsafe: bool = False, time_unit: Optional[PDDatetimeUnitOptions] = None
self, unsafe: bool = False, time_unit: PDDatetimeUnitOptions | None = None
) -> pd.DatetimeIndex:
"""If possible, convert this index to a pandas.DatetimeIndex.

Expand Down
10 changes: 5 additions & 5 deletions xarray/computation/apply_ufunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,8 +529,10 @@ def apply_dataset_vfunc(
out: Dataset | tuple[Dataset, ...]
if signature.num_outputs > 1:
out = tuple(
_fast_dataset(*args)
for args in zip(result_vars, list_of_coords, list_of_indexes, strict=True)
itertools.starmap(
_fast_dataset,
zip(result_vars, list_of_coords, list_of_indexes, strict=True),
)
)
else:
(coord_vars,) = list_of_coords
Expand Down Expand Up @@ -600,9 +602,7 @@ def apply_groupby_func(func, *args):
iterator = itertools.repeat(arg)
iterators.append(iterator)

applied: Iterator = (
func(*zipped_args) for zipped_args in zip(*iterators, strict=False)
)
applied: Iterator = itertools.starmap(func, zip(*iterators, strict=False))
applied_example, applied = peek_at(applied)
combine = first_groupby._combine # type: ignore[attr-defined]
if isinstance(applied_example, tuple):
Expand Down
2 changes: 1 addition & 1 deletion xarray/computation/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def _cov_corr(
weights: T_DataArray | None = None,
dim: Dims = None,
ddof: int = 0,
method: Literal["cov", "corr", None] = None,
method: Literal["cov", "corr"] | None = None,
) -> T_DataArray:
"""
Internal method for xr.cov() and xr.corr() so only have to
Expand Down
9 changes: 5 additions & 4 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2334,9 +2334,10 @@ def info(self, buf: IO | None = None) -> None:
if buf is None: # pragma: no cover
buf = sys.stdout

lines = []
lines.append("xarray.Dataset {")
lines.append("dimensions:")
lines = [
"xarray.Dataset {",
"dimensions:",
]
for name, size in self.sizes.items():
lines.append(f"\t{name} = {size} ;")
lines.append("\nvariables:")
Expand Down Expand Up @@ -9708,7 +9709,7 @@ def convert_calendar(
self,
calendar: CFCalendar,
dim: Hashable = "time",
align_on: Literal["date", "year", None] = None,
align_on: Literal["date", "year"] | None = None,
missing: Any | None = None,
use_cftime: bool | None = None,
) -> Self:
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1955,7 +1955,7 @@ def _wrap_index_equals(
f"the signature ``{index_cls_name}.equals(self, other)`` is deprecated. "
f"Please update it to "
f"``{index_cls_name}.equals(self, other, *, exclude=None)`` "
"or kindly ask the maintainers of ``{index_cls_name}`` to do it. "
f"or kindly ask the maintainers of ``{index_cls_name}`` to do it. "
"See documentation of xarray.Index.equals() for more info.",
FutureWarning,
)
Expand Down
16 changes: 8 additions & 8 deletions xarray/core/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,16 @@ class T_Options(TypedDict):
display_values_threshold: int
display_style: Literal["text", "html"]
display_width: int
display_expand_attrs: Literal["default", True, False]
display_expand_coords: Literal["default", True, False]
display_expand_data_vars: Literal["default", True, False]
display_expand_data: Literal["default", True, False]
display_expand_groups: Literal["default", True, False]
display_expand_indexes: Literal["default", True, False]
display_default_indexes: Literal["default", True, False]
display_expand_attrs: Literal["default"] | bool
display_expand_coords: Literal["default"] | bool
display_expand_data_vars: Literal["default"] | bool
display_expand_data: Literal["default"] | bool
display_expand_groups: Literal["default"] | bool
display_expand_indexes: Literal["default"] | bool
display_default_indexes: Literal["default"] | bool
enable_cftimeindex: bool
file_cache_maxsize: int
keep_attrs: Literal["default", True, False]
keep_attrs: Literal["default"] | bool
warn_for_unclosed_files: bool
use_bottleneck: bool
use_flox: bool
Expand Down
18 changes: 9 additions & 9 deletions xarray/core/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,16 +253,16 @@ def copy(
InterpnOptions = Literal["linear", "nearest", "slinear", "cubic", "quintic", "pchip"]
InterpOptions = Union[Interp1dOptions, InterpolantOptions, InterpnOptions]

DatetimeUnitOptions = Literal[
"W", "D", "h", "m", "s", "ms", "us", "μs", "ns", "ps", "fs", "as", None
]
DatetimeUnitOptions = (
Literal["W", "D", "h", "m", "s", "ms", "us", "μs", "ns", "ps", "fs", "as"] | None
)
NPDatetimeUnitOptions = Literal["D", "h", "m", "s", "ms", "us", "ns"]
PDDatetimeUnitOptions = Literal["s", "ms", "us", "ns"]

QueryEngineOptions = Literal["python", "numexpr", None]
QueryEngineOptions = Literal["python", "numexpr"] | None
QueryParserOptions = Literal["pandas", "python"]

ReindexMethodOptions = Literal["nearest", "pad", "ffill", "backfill", "bfill", None]
ReindexMethodOptions = Literal["nearest", "pad", "ffill", "backfill", "bfill"] | None

PadModeOptions = Literal[
"constant",
Expand All @@ -281,7 +281,7 @@ def copy(
T_DatasetPadConstantValues = (
T_VarPadConstantValues | Mapping[Any, T_VarPadConstantValues]
)
PadReflectOptions = Literal["even", "odd", None]
PadReflectOptions = Literal["even", "odd"] | None

CFCalendar = Literal[
"standard",
Expand All @@ -299,10 +299,10 @@ def copy(
SideOptions = Literal["left", "right"]
InclusiveOptions = Literal["both", "neither", "left", "right"]

ScaleOptions = Literal["linear", "symlog", "log", "logit", None]
HueStyleOptions = Literal["continuous", "discrete", None]
ScaleOptions = Literal["linear", "symlog", "log", "logit"] | None
HueStyleOptions = Literal["continuous", "discrete"] | None
AspectOptions = Union[Literal["auto", "equal"], float, None]
ExtendOptions = Literal["neither", "both", "min", "max", None]
ExtendOptions = Literal["neither", "both", "min", "max"] | None


_T_co = TypeVar("_T_co", covariant=True)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def equivalent(first: T, second: T) -> bool:
def list_equiv(first: Sequence[T], second: Sequence[T]) -> bool:
if len(first) != len(second):
return False
return all(equivalent(f, s) for f, s in zip(first, second, strict=True))
return all(itertools.starmap(equivalent, zip(first, second, strict=True)))


def peek_at(iterable: Iterable[T]) -> tuple[T, Iterator[T]]:
Expand Down
3 changes: 1 addition & 2 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2232,8 +2232,7 @@ def coarsen_reshape(self, windows, boundary, side):
for i, d in enumerate(variable.dims):
if d in windows:
size = variable.shape[i]
shape.append(int(size / windows[d]))
shape.append(windows[d])
shape.extend((int(size / windows[d]), windows[d]))
axis_count += 1
axes.append(i + axis_count)
else:
Expand Down
2 changes: 1 addition & 1 deletion xarray/groupers.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def factorize(self, group: T_Group) -> EncodedGroups:
full_index, first_items, codes_ = self._get_index_and_items()
sbins = first_items.values.astype(np.int64)
group_indices: GroupIndices = tuple(
[slice(i, j) for i, j in pairwise(sbins)] + [slice(sbins[-1], None)]
list(itertools.starmap(slice, pairwise(sbins))) + [slice(sbins[-1], None)]
)

unique_coord = Variable(
Expand Down
3 changes: 2 additions & 1 deletion xarray/namedarray/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import sys
import warnings
from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence
from itertools import starmap
from types import EllipsisType
from typing import (
TYPE_CHECKING,
Expand Down Expand Up @@ -849,7 +850,7 @@ def chunk(
ndata = ImplicitToExplicitIndexingAdapter(data_old, OuterIndexer) # type: ignore[assignment]

if is_dict_like(chunks):
chunks = tuple(chunks.get(n, s) for n, s in enumerate(ndata.shape))
chunks = tuple(starmap(chunks.get, enumerate(ndata.shape)))

data_chunked = chunkmanager.from_array(ndata, chunks, **from_array_kwargs) # type: ignore[arg-type]

Expand Down
4 changes: 2 additions & 2 deletions xarray/plot/dataarray_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -1479,7 +1479,7 @@ def newplotfunc(
if subplot_kws is None:
subplot_kws = dict()

if plotfunc.__name__ == "surface" and not kwargs.get("_is_facetgrid", False):
if plotfunc.__name__ == "surface" and not kwargs.get("_is_facetgrid"):
if ax is None:
# TODO: Importing Axes3D is no longer necessary in matplotlib >= 3.2.
# Remove when minimum requirement of matplotlib is 3.2:
Expand Down Expand Up @@ -1511,7 +1511,7 @@ def newplotfunc(

if (
plotfunc.__name__ == "surface"
and not kwargs.get("_is_facetgrid", False)
and not kwargs.get("_is_facetgrid")
and ax is not None
):
import mpl_toolkits
Expand Down
15 changes: 9 additions & 6 deletions xarray/structure/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from collections import defaultdict
from collections.abc import Callable, Hashable, Iterable, Mapping
from contextlib import suppress
from itertools import starmap
from typing import TYPE_CHECKING, Any, Final, Generic, TypeVar, cast, overload

import numpy as np
Expand Down Expand Up @@ -610,12 +611,14 @@ def _reindex_one(

def reindex_all(self) -> None:
self.results = tuple(
self._reindex_one(obj, matching_indexes, matching_index_vars)
for obj, matching_indexes, matching_index_vars in zip(
self.objects,
self.objects_matching_indexes,
self.objects_matching_index_vars,
strict=True,
starmap(
self._reindex_one,
zip(
self.objects,
self.objects_matching_indexes,
self.objects_matching_index_vars,
strict=True,
),
)
)

Expand Down
2 changes: 1 addition & 1 deletion xarray/structure/chunks.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def _get_chunk(var: Variable, chunks, chunkmanager: ChunkManagerEntrypoint):
# Determine the explicit requested chunks.
preferred_chunks = var.encoding.get("preferred_chunks", {})
preferred_chunk_shape = tuple(
preferred_chunks.get(dim, size) for dim, size in zip(dims, shape, strict=True)
itertools.starmap(preferred_chunks.get, zip(dims, shape, strict=True))
)
if isinstance(chunks, Number) or (chunks == "auto"):
chunks = dict.fromkeys(dims, chunks)
Expand Down
6 changes: 3 additions & 3 deletions xarray/tests/test_cftime_offsets.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import warnings
from itertools import product
from itertools import product, starmap
from typing import TYPE_CHECKING, Literal

import numpy as np
Expand Down Expand Up @@ -1220,7 +1220,7 @@ def test_cftime_range(
start, end, periods, freq, inclusive, normalize, calendar, expected_date_args
):
date_type = get_date_type(calendar)
expected_dates = [date_type(*args) for args in expected_date_args]
expected_dates = list(starmap(date_type, expected_date_args))

if isinstance(start, tuple):
start = date_type(*start)
Expand Down Expand Up @@ -1277,7 +1277,7 @@ def test_invalid_date_range_cftime_inputs(
end: str | None,
periods: int | None,
freq: str | None,
inclusive: Literal["up", None],
inclusive: Literal["up"] | None,
) -> None:
with pytest.raises(ValueError):
date_range(start, end, periods, freq, inclusive=inclusive, use_cftime=True) # type: ignore[arg-type]
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_coding_times.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import warnings
from datetime import datetime, timedelta
from itertools import product
from itertools import product, starmap
from typing import Literal

import numpy as np
Expand Down Expand Up @@ -576,7 +576,7 @@ def test_infer_datetime_units_with_NaT(dates, expected) -> None:
@pytest.mark.parametrize(("date_args", "expected"), _CFTIME_DATETIME_UNITS_TESTS)
def test_infer_cftime_datetime_units(calendar, date_args, expected) -> None:
date_type = _all_cftime_date_types()[calendar]
dates = [date_type(*args) for args in date_args]
dates = list(starmap(date_type, date_args))
assert expected == infer_datetime_units(dates)


Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -2789,7 +2789,7 @@ def test_accessor(self) -> None:
def test_add_guide(
self,
add_guide: bool | None,
hue_style: Literal["continuous", "discrete", None],
hue_style: Literal["continuous", "discrete"] | None,
legend: bool,
colorbar: bool,
) -> None:
Expand Down
5 changes: 3 additions & 2 deletions xarray/tests/test_plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import sys
from importlib.metadata import EntryPoint, EntryPoints
from itertools import starmap
from unittest import mock

import pytest
Expand Down Expand Up @@ -48,7 +49,7 @@ def dummy_duplicated_entrypoints():
["engine2", "xarray.tests.test_plugins:backend_1", "xarray.backends"],
["engine2", "xarray.tests.test_plugins:backend_2", "xarray.backends"],
]
eps = [EntryPoint(name, value, group) for name, value, group in specs]
eps = list(starmap(EntryPoint, specs))
return eps


Expand Down Expand Up @@ -91,7 +92,7 @@ def test_backends_dict_from_pkg() -> None:
["engine1", "xarray.tests.test_plugins:backend_1", "xarray.backends"],
["engine2", "xarray.tests.test_plugins:backend_2", "xarray.backends"],
]
entrypoints = [EntryPoint(name, value, group) for name, value, group in specs]
entrypoints = list(starmap(EntryPoint, specs))
engines = plugins.backends_dict_from_pkg(entrypoints)
assert len(engines) == 2
assert engines.keys() == {"engine1", "engine2"}
Expand Down
3 changes: 1 addition & 2 deletions xarray/util/generate_aggregations.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,8 +692,7 @@ def write_methods(filepath, generators, preamble):
f.write(preamble)
for gen in generators:
for lines in gen.generate_methods():
for line in lines:
f.write(line + "\n")
f.writelines(line + "\n" for line in lines)


if __name__ == "__main__":
Expand Down
Loading