Skip to content

Commit

Permalink
ruff compliance for D401. (#5687)
Browse files Browse the repository at this point in the history
* ruff complliance for D205 (wip)

* wip

* wip

* wip

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* various minor fixes.

* fix doctest.

* gallery noqa and minor fixes.

* removed comments

* ruff compliance for D401.

* removed temp noqa's

* wip

* wip

* fix typo.

* Update lib/iris/fileformats/_ff.py

Co-authored-by: Bill Little <[email protected]>

* review actions.

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Bill Little <[email protected]>
  • Loading branch information
3 people authored Jan 18, 2024
1 parent 54f9e98 commit 67bb5ae
Show file tree
Hide file tree
Showing 52 changed files with 154 additions and 155 deletions.
1 change: 0 additions & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ lint.ignore = [
"D102", # Missing docstring in public method
# (D-3) Temporary, before an initial review, either fix ocurrences or move to (2).
"D103", # Missing docstring in public function
"D401", # First line of docstring should be in imperative mood: ...

# pyupgrade (UP)
# https://docs.astral.sh/ruff/rules/#pyupgrade-up
Expand Down
8 changes: 5 additions & 3 deletions benchmarks/benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


def disable_repeat_between_setup(benchmark_object):
"""Decorator for benchmarks where object persistence would be inappropriate.
"""Benchmarks where object persistence would be inappropriate (decorator).
E.g:
* Benchmarking data realisation
Expand Down Expand Up @@ -86,7 +86,7 @@ def addedmem_mb(self):

@staticmethod
def decorator(decorated_func):
"""Decorates this benchmark to track growth in resident memory during execution.
"""Benchmark to track growth in resident memory during execution.
Intended for use on ASV ``track_`` benchmarks. Applies the
:class:`TrackAddedMemoryAllocation` context manager to the benchmark
Expand All @@ -107,7 +107,9 @@ def _wrapper(*args, **kwargs):


def on_demand_benchmark(benchmark_object):
"""Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set.
"""Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set.
This is a decorator.
For benchmarks that, for whatever reason, should not be run by default.
E.g:
Expand Down
4 changes: 2 additions & 2 deletions benchmarks/benchmarks/cperf/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
@on_demand_benchmark
class SingleDiagnosticLoad(SingleDiagnosticMixin):
def time_load(self, _, __, ___):
"""The 'real world comparison'.
"""Perform a 'real world comparison'.
* UM coords are always realised (DimCoords).
* LFRic coords are not realised by default (MeshCoords).
Expand All @@ -26,7 +26,7 @@ def time_load(self, _, __, ___):
assert coord.has_lazy_bounds() == expecting_lazy_coords

def time_load_w_realised_coords(self, _, __, ___):
"""A valuable extra comparison where both UM and LFRic coords are realised."""
"""Valuable extra comparison where both UM and LFRic coords are realised."""
cube = self.load()
for coord_name in "longitude", "latitude":
coord = cube.coord(coord_name)
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmarks/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def setup(self):
raise NotImplementedError

def create(self):
"""Generic cube creation.
"""Create a cube (generic).
cube_kwargs allow dynamic inclusion of different components;
specified in subclasses.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def setup_cache(self):
)

def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True):
"""The combine-tests "standard" setup operation.
"""Combine-tests "standard" setup operation.
Load the source cubes (full-mesh + region) from disk.
These are specific to the cubesize parameter.
Expand Down
8 changes: 4 additions & 4 deletions benchmarks/benchmarks/generate_data/stock.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _external(func_name_, temp_file_dir, **kwargs_):
def create_file__xios_2d_face_half_levels(
temp_file_dir, dataset_name, n_faces=866, n_times=1
):
"""Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`.
"""Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`.
Have taken control of temp_file_dir
Expand All @@ -67,7 +67,7 @@ def create_file__xios_2d_face_half_levels(
def create_file__xios_3d_face_half_levels(
temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38
):
"""Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`.
"""Create file wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`.
Have taken control of temp_file_dir
Expand All @@ -84,7 +84,7 @@ def create_file__xios_3d_face_half_levels(


def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False):
"""Wrapper for :meth:iris.tests.stock.mesh.sample_mesh`."""
"""Sample mesh wrapper for :meth:iris.tests.stock.mesh.sample_mesh`."""

def _external(*args, **kwargs):
from iris.experimental.ugrid import save_mesh
Expand Down Expand Up @@ -112,7 +112,7 @@ def _external(*args, **kwargs):


def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"):
"""Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`.
"""Sample meshcoord wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`.
Parameters deviate from the original as cannot pass a
:class:`iris.experimental.ugrid.Mesh to the separate Python instance - must
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/benchmarks/sperf/combine_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def setup_cache(self):
)

def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True):
"""The combine-tests "standard" setup operation.
"""Combine-tests "standard" setup operation.
Load the source cubes (full-mesh + region) from disk.
These are specific to the cubesize parameter.
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/bm_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ def add_asv_arguments(self) -> None:
@staticmethod
@abstractmethod
def func(args: argparse.Namespace):
"""The function to return when the subparser is parsed.
"""Return when the subparser is parsed.
`func` is then called, performing the user's selected sub-command.
Expand Down
4 changes: 2 additions & 2 deletions docs/gallery_code/general/plot_custom_file_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@


def load_NAME_III(filename):
"""Loads the Met Office's NAME III grid output files.
"""Load the Met Office's NAME III grid output files.
Loads the Met Office's NAME III grid output files returning headers, column
definitions and data arrays as 3 separate lists.
Expand Down Expand Up @@ -180,7 +180,7 @@ def load_NAME_III(filename):


def NAME_to_cube(filenames, callback):
"""Returns a generator of cubes given a list of filenames and a callback."""
"""Return a generator of cubes given a list of filenames and a callback."""
for filename in filenames:
header, column_headings, data_arrays = load_NAME_III(filename)

Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_code/meteorology/plot_COP_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@


def cop_metadata_callback(cube, field, filename):
"""Function which adds an "Experiment" coordinate which comes from the filename."""
"""Add an "Experiment" coordinate which comes from the filename."""
# Extract the experiment name (such as A1B or E1) from the filename (in
# this case it is just the start of the file name, before the first ".").
fname = os.path.basename(filename) # filename without path.
Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

@pytest.fixture
def image_setup_teardown():
"""Setup and teardown fixture.
"""Perform setup and teardown fixture.
Ensures all figures are closed before and after test to prevent one test
polluting another if it fails with a figure unclosed.
Expand Down
2 changes: 1 addition & 1 deletion docs/gallery_tests/test_gallery_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@


def gallery_examples():
"""Generator to yield all current gallery examples."""
"""Entry point for generator to yield all current gallery examples."""
for example_file in GALLERY_DIR.glob("*/plot*.py"):
yield example_file.stem

Expand Down
12 changes: 3 additions & 9 deletions lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def name(self):
class _DerivedCoordAndDims(
namedtuple("DerivedCoordAndDims", ["coord", "dims", "aux_factory"])
):
"""Container for a derived coordinate and dimnesions(s).
"""Container for a derived coordinate and dimensions(s).
Container for a derived coordinate, the associated AuxCoordFactory, and the
associated data dimension(s) spanned over a :class:`iris.cube.Cube`.
Expand Down Expand Up @@ -385,10 +385,7 @@ class _CubeSignature:
"""

def __init__(self, cube):
"""Represents the cube metadata and associated coordinate metadata.
Represents the cube metadata and associated coordinate metadata that
allows suitable cubes for concatenation to be identified.
"""Represent the cube metadata and associated coordinate metadata.
Parameters
----------
Expand Down Expand Up @@ -733,10 +730,7 @@ class _ProtoCube:
"""Framework for concatenating multiple source-cubes over one common dimension."""

def __init__(self, cube):
"""Create a new _ProtoCube and record the cube as a source-cube.
Create a new _ProtoCube from the given cube and record the cube
as a source-cube.
"""Create a new _ProtoCube from the given cube and record the cube as a source-cube.
Parameters
----------
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/_data_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def __ne__(self, other):
return result

def __repr__(self):
"""Returns an string representation of the instance."""
"""Return an string representation of the instance."""
fmt = "{cls}({data!r})"
result = fmt.format(data=self.core_data(), cls=type(self).__name__)

Expand Down Expand Up @@ -269,7 +269,7 @@ def shape(self):
return self.core_data().shape

def copy(self, data=None):
"""Returns a deep copy of this :class:`~iris._data_manager.DataManager` instance.
"""Return a deep copy of this :class:`~iris._data_manager.DataManager` instance.
Parameters
----------
Expand Down
6 changes: 3 additions & 3 deletions lib/iris/_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -1182,7 +1182,7 @@ def _report_duplicate(self, nd_indexes, group_by_nd_index):
raise iris.exceptions.DuplicateDataError(msg)

def merge(self, unique=True):
"""Returns the list of cubes resulting from merging the registered source-cubes.
"""Return the list of cubes resulting from merging the registered source-cubes.
Parameters
----------
Expand Down Expand Up @@ -1319,7 +1319,7 @@ def register(self, cube, error_on_mismatch=False):
return match

def _guess_axis(self, name):
"""Returns a "best guess" axis name of the candidate dimension.
"""Return a "best guess" axis name of the candidate dimension.
Heuristic categoration of the candidate dimension
(i.e. scalar_defn index) into either label 'T', 'Z', 'Y', 'X'
Expand Down Expand Up @@ -1564,7 +1564,7 @@ def _get_cube(self, data):
return cube

def _nd_index(self, position):
"""Returns the n-dimensional index of thr source-cube, within the merged cube."""
"""Return the n-dimensional index of thr source-cube, within the merged cube."""
index = []

# Determine the index of the source-cube cell for each dimension.
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/_representation/cube_printout.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ class CubePrinter:
N_INDENT_EXTRA = 4

def __init__(self, cube_or_summary):
"""An object that provides a printout of a cube.
"""Object that provides a printout of a cube.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def get_xy_coords(cube, dim_coords=False):


def snapshot_grid(cube):
"""Helper function that returns deep copies of lateral (dimension) coordinates from a cube."""
"""Return deep copies of lateral (dimension) coordinates from a cube."""
x, y = get_xy_dim_coords(cube)
return x.copy(), y.copy()

Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/_regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def __init__(self, src_grid_cube, target_grid_cube, weights=None):

@staticmethod
def _get_horizontal_coord(cube, axis):
"""Gets the horizontal coordinate on the supplied cube along the specified axis.
"""Get the horizontal coordinate on the supplied cube along the specified axis.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth):


def area_weights(cube, normalize=False):
r"""Returns an array of area weights, with the same dimensions as the cube.
r"""Return an array of area weights, with the same dimensions as the cube.
This is a 2D lat/lon area weights array, repeated over the non lat/lon
dimensions.
Expand Down
8 changes: 4 additions & 4 deletions lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def _add_subtract_common(
dim=None,
in_place=False,
):
"""Function which shares common code between addition and subtraction of cubes.
"""Share common code between addition and subtraction of cubes.
Parameters
----------
Expand Down Expand Up @@ -532,7 +532,7 @@ def divide(cube, other, dim=None, in_place=False):


def exponentiate(cube, exponent, in_place=False):
"""Returns the result of the given cube to the power of a scalar.
"""Return the result of the given cube to the power of a scalar.
Parameters
----------
Expand Down Expand Up @@ -821,7 +821,7 @@ def _binary_op_common(
in_place=False,
sanitise_metadata=True,
):
"""Function which shares common code between binary operations.
"""Share common code between binary operations.
Parameters
----------
Expand Down Expand Up @@ -1172,7 +1172,7 @@ def __call__(
new_name=None,
**kwargs_data_func,
):
"""Applies the ifunc to the cube(s).
"""Apply the ifunc to the cube(s).
Parameters
----------
Expand Down
6 changes: 3 additions & 3 deletions lib/iris/analysis/trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class Trajectory:
"""A series of given waypoints with pre-calculated sample points."""

def __init__(self, waypoints, sample_count=10):
"""Defines a trajectory using a sequence of waypoints.
"""Define a trajectory using a sequence of waypoints.
Parameters
----------
Expand Down Expand Up @@ -128,7 +128,7 @@ def _get_interp_points(self):
return [(k, v) for k, v in points.items()]

def _src_cube_anon_dims(self, cube):
"""A helper method to locate the index of anonymous dimensions.
"""Locate the index of anonymous dimensions.
A helper method to locate the index of anonymous dimensions on the
interpolation target, ``cube``.
Expand Down Expand Up @@ -712,7 +712,7 @@ class UnstructuredNearestNeigbourRegridder:
# TODO: cache the necessary bits of the operation so reuse can actually
# be more efficient.
def __init__(self, src_cube, target_grid_cube):
"""A nearest-neighbour regridder.
"""Nearest-neighbour regridder.
A nearest-neighbour regridder to perform regridding from the source
grid to the target grid.
Expand Down
5 changes: 4 additions & 1 deletion lib/iris/common/_split_attribute_dicts.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,9 @@ def _convert_pairedkeys_dict_to_splitattrs(dic):


def adjust_for_split_attribute_dictionaries(operation):
"""Decorator to make a function of attribute-dictionaries work with split attributes.
"""Generate attribute-dictionaries to work with split attributes.
Decorator to make a function of attribute-dictionaries work with split attributes.
The wrapped function of attribute-dictionaries is currently always one of "equals",
"combine" or "difference", with signatures like :
Expand All @@ -91,6 +93,7 @@ def adjust_for_split_attribute_dictionaries(operation):
"Split" dictionaries are all of class :class:`~iris.cube.CubeAttrsDict`, since
the only usage of 'split' attribute dictionaries is in Cubes (i.e. they are not
used for cube components).
"""

@wraps(operation)
Expand Down
Loading

0 comments on commit 67bb5ae

Please sign in to comment.