Skip to content

Commit

Permalink
ruff complliance for rule D202. (#5674)
Browse files Browse the repository at this point in the history
  • Loading branch information
tkknight authored Jan 3, 2024
1 parent adb97f0 commit cd41438
Show file tree
Hide file tree
Showing 38 changed files with 11 additions and 89 deletions.
1 change: 0 additions & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ lint.ignore = [
"D100", # Missing docstring in public module
"D103", # Missing docstring in public function
"D200", # One-line docstring should fit on one line
"D202", # No blank lines allowed after function docstring
"D205", # 1 blank line required between summary line and description
"D401", # First line of docstring should be in imperative mood: ...

Expand Down
3 changes: 0 additions & 3 deletions benchmarks/benchmarks/experimental/ugrid/regions_combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ def _make_region_cubes(self, full_mesh_cube):

def setup_cache(self):
"""Cache all the necessary source data on disk."""

# Control dask, to minimise memory usage + allow largest data.
self.fix_dask_settings()

Expand Down Expand Up @@ -106,7 +105,6 @@ def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True):
NOTE: various test classes override + extend this.
"""

# Load source cubes (full-mesh and regions)
with PARSE_UGRID_ON_LOAD.context():
self.full_mesh_cube = load_cube(
Expand Down Expand Up @@ -143,7 +141,6 @@ def fix_dask_settings(self):
which is optimised for space saving so we can test largest data.
"""

import dask.config as dcfg

# Use single-threaded, to avoid process-switching costs and minimise memory usage.
Expand Down
2 changes: 0 additions & 2 deletions benchmarks/benchmarks/generate_data/stock.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def create_file__xios_2d_face_half_levels(
todo: is create_file__xios_2d_face_half_levels still appropriate now we can
properly save Mesh Cubes?
"""

return _create_file__xios_common(
func_name="create_file__xios_2d_face_half_levels",
dataset_name=dataset_name,
Expand All @@ -75,7 +74,6 @@ def create_file__xios_3d_face_half_levels(
todo: is create_file__xios_3d_face_half_levels still appropriate now we can
properly save Mesh Cubes?
"""

return _create_file__xios_common(
func_name="create_file__xios_3d_face_half_levels",
dataset_name=dataset_name,
Expand Down
3 changes: 0 additions & 3 deletions benchmarks/benchmarks/sperf/combine_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ def _make_region_cubes(self, full_mesh_cube):

def setup_cache(self):
"""Cache all the necessary source data on disk."""

# Control dask, to minimise memory usage + allow largest data.
self.fix_dask_settings()

Expand Down Expand Up @@ -102,7 +101,6 @@ def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True):
NOTE: various test classes override + extend this.
"""

# Load source cubes (full-mesh and regions)
with PARSE_UGRID_ON_LOAD.context():
self.full_mesh_cube = load_cube(
Expand Down Expand Up @@ -142,7 +140,6 @@ def fix_dask_settings(self):
which is optimised for space saving so we can test largest data.
"""

import dask.config as dcfg

# Use single-threaded, to avoid process-switching costs and minimise memory usage.
Expand Down
1 change: 0 additions & 1 deletion benchmarks/bm_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ def _check_requirements(package: str) -> None:

def _prep_data_gen_env() -> None:
"""Create/access a separate, unchanging environment for generating test data."""

python_version = "3.11"
data_gen_var = "DATA_GEN_PYTHON"
if data_gen_var in environ:
Expand Down
6 changes: 3 additions & 3 deletions docs/gallery_code/general/plot_custom_file_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,12 @@


def load_NAME_III(filename):
"""Loads the Met Office's NAME III grid output files returning headers, column
"""Loads the Met Office's NAME III grid output files.
Loads the Met Office's NAME III grid output files returning headers, column
definitions and data arrays as 3 separate lists.
"""

# Loading a file gives a generator of lines which can be progressed using
# the next() function. This will come in handy as we wish to progress
# through the file line by line.
Expand Down Expand Up @@ -179,7 +180,6 @@ def load_NAME_III(filename):

def NAME_to_cube(filenames, callback):
"""Returns a generator of cubes given a list of filenames and a callback."""

for filename in filenames:
header, column_headings, data_arrays = load_NAME_III(filename)

Expand Down
5 changes: 1 addition & 4 deletions docs/gallery_code/meteorology/plot_COP_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,7 @@


def cop_metadata_callback(cube, field, filename):
"""A function which adds an "Experiment" coordinate which comes from the
filename.
"""

"""Function which adds an "Experiment" coordinate which comes from the filename."""
# Extract the experiment name (such as A1B or E1) from the filename (in
# this case it is just the start of the file name, before the first ".").
fname = os.path.basename(filename) # filename without path.
Expand Down
2 changes: 0 additions & 2 deletions docs/gallery_tests/test_gallery_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

def gallery_examples():
"""Generator to yield all current gallery examples."""

for example_file in GALLERY_DIR.glob("*/plot*.py"):
yield example_file.stem

Expand All @@ -30,7 +29,6 @@ def test_plot_example(
iris_future_defaults,
):
"""Test that all figures from example code match KGO."""

module = importlib.import_module(example)

# Run example.
Expand Down
1 change: 0 additions & 1 deletion lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -759,7 +759,6 @@ def __init__(self, cube):
@property
def axis(self):
"""Return the nominated dimension of concatenation."""

return self._axis

def concatenate(self):
Expand Down
5 changes: 3 additions & 2 deletions lib/iris/_lazy_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ def _optimum_chunksize_internals(
dims_fixed=None,
dask_array_chunksize=dask.config.get("array.chunk-size"),
):
"""Reduce or increase an initial chunk shape to get close to a chosen ideal
"""Reduce or increase an initial chunk shap.
Reduce or increase an initial chunk shape to get close to a chosen ideal
size, while prioritising the splitting of the earlier (outer) dimensions
and keeping intact the later (inner) ones.
Expand Down Expand Up @@ -100,7 +102,6 @@ def _optimum_chunksize_internals(
"chunks = [c[0] for c in normalise_chunks('auto', ...)]".
"""

# Set the chunksize limit.
if limit is None:
# Fetch the default 'optimal' chunksize from the dask config.
Expand Down
10 changes: 4 additions & 6 deletions lib/iris/_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,6 @@ class _CoordPayload(namedtuple("CoordPayload", ["scalar", "vector", "factory_def

def as_signature(self):
"""Construct and return a :class:`_CoordSignature` from the payload."""

return _CoordSignature(
self.scalar.defns,
self.vector.dim_coords_and_dims,
Expand Down Expand Up @@ -1072,11 +1071,12 @@ class ProtoCube:
"""

def __init__(self, cube):
"""Create a new ProtoCube from the given cube and record the cube
as a source-cube.
"""Create a new ProtoCube from the given cube.
"""
Create a new ProtoCube from the given cube and record the cube as a
source-cube.
"""
# Default hint ordering for candidate dimension coordinates.
self._hints = [
"time",
Expand Down Expand Up @@ -1533,7 +1533,6 @@ def _nd_index(self, position):
within the merged cube.
"""

index = []

# Determine the index of the source-cube cell for each dimension.
Expand Down Expand Up @@ -1634,7 +1633,6 @@ def _build_signature(self, cube):
The cube signature.
"""

return _CubeSignature(
cube.metadata,
cube.shape,
Expand Down
1 change: 0 additions & 1 deletion lib/iris/_representation/cube_printout.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,6 @@ def _decorated_table(table, name_padding=None):
Note: 'name_padding' sets a minimum width for the name column (#0).
"""

# Copy the input table + extract the header + its columns.
table = table.copy()
header = table.rows[0]
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/_regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare(src_cube, weights, grid
The 'regrid info' returned can be re-used over many cubes.
"""

# Get the source cube x and y 2D auxiliary coordinates.
sx, sy = src_cube.coord(axis="x"), src_cube.coord(axis="y")
# Get the target grid cube x and y dimension coordinates.
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/cartography.py
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,6 @@ def _transform_distance_vectors(u_dist, v_dist, ds, dx2, dy2):
along the x and y directions of the target crs at each location.
"""

# Scale input distance vectors --> source-coordinate differentials.
u1, v1 = u_dist / ds.dx1, v_dist / ds.dy1
# Transform vectors into the target system.
Expand Down
2 changes: 0 additions & 2 deletions lib/iris/analysis/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ def _extract_relevant_cube_slice(cube, geometry):
If cube and geometry don't overlap, returns None.
"""

# Validate the input parameters
if not cube.coords(axis="x") or not cube.coords(axis="y"):
raise ValueError("The cube must contain x and y axes.")
Expand Down Expand Up @@ -177,7 +176,6 @@ def geometry_area_weights(cube, geometry, normalize=False):
Default is False.
"""

# extract smallest subcube containing geometry
shape = cube.shape
extraction_results = _extract_relevant_cube_slice(cube, geometry)
Expand Down
2 changes: 0 additions & 2 deletions lib/iris/analysis/maths.py
Original file line number Diff line number Diff line change
Expand Up @@ -741,7 +741,6 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place=
See more at :doc:`/userguide/real_and_lazy_data`.
"""

if not isinstance(ufunc, np.ufunc):
ufunc_name = getattr(ufunc, "__name__", "function passed to apply_ufunc")
emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc"
Expand Down Expand Up @@ -1090,7 +1089,6 @@ def ws_units_func(u_cube, v_cube):
cs_cube = cs_ifunc(cube, axis=1)
"""

self._data_func_name = getattr(
data_func, "__name__", "data_func argument passed to IFunc"
)
Expand Down
1 change: 0 additions & 1 deletion lib/iris/analysis/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def pearsonr(
This operation is non-lazy.
"""

# Assign larger cube to cube_1
if cube_b.ndim > cube_a.ndim:
cube_1 = cube_b
Expand Down
1 change: 0 additions & 1 deletion lib/iris/common/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -1295,7 +1295,6 @@ def _metadata_resolve(self):
'aux_coords' or 'dim_coords' of the participating cubes.
"""

# Determine the cube dim, aux and scalar coordinate items
# for each individual cube.
self.lhs_cube_category = self._categorise_items(self.lhs_cube)
Expand Down
3 changes: 0 additions & 3 deletions lib/iris/coord_systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -1025,7 +1025,6 @@ def __init__(
It is only valid to provide one of true_scale_lat and scale_factor_at_projection_origin
"""

#: True latitude of planar origin in degrees.
self.central_lat = float(central_lat)

Expand Down Expand Up @@ -1140,7 +1139,6 @@ def __init__(
"""

super().__init__(
central_lat=central_lat,
central_lon=central_lon,
Expand Down Expand Up @@ -1200,7 +1198,6 @@ def __init__(
secant_latitudes=(33, 45)
"""

#: True latitude of planar origin in degrees.
self.central_lat = _arg_default(central_lat, 39.0)

Expand Down
3 changes: 0 additions & 3 deletions lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -1335,7 +1335,6 @@ def __common_cmp__(self, other, operator_method):
Non-Cell vs Cell comparison is used to define Constraint matching.
"""

if (isinstance(other, list) and len(other) == 1) or (
isinstance(other, np.ndarray) and other.shape == (1,)
):
Expand Down Expand Up @@ -2019,7 +2018,6 @@ def contiguous_bounds(self):

def is_monotonic(self):
"""Return True if, and only if, this Coord is monotonic."""

if self.ndim != 1:
raise iris.exceptions.CoordinateMultiDimError(self)

Expand Down Expand Up @@ -2120,7 +2118,6 @@ def collapsed(self, dims_to_collapse=None):
Replaces the points & bounds with a simple bounded region.
"""

# Ensure dims_to_collapse is a tuple to be able to pass
# through to numpy
if isinstance(dims_to_collapse, (int, np.integer)):
Expand Down
4 changes: 0 additions & 4 deletions lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,6 @@ def insert(self, index, cube):

def xml(self, checksum=False, order=True, byteorder=True):
"""Return a string of the XML that this list of cubes represents."""

doc = Document()
cubes_xml_element = doc.createElement("cubes")
cubes_xml_element.setAttribute("xmlns", XML_NAMESPACE_URI)
Expand Down Expand Up @@ -1152,7 +1151,6 @@ def _sort_xml_attrs(cls, doc):

def _walk_nodes(node):
"""Note: _walk_nodes is called recursively on child elements."""

# we don't want to copy the children here, so take a shallow copy
new_node = node.cloneNode(deep=False)

Expand Down Expand Up @@ -1665,7 +1663,6 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None):
Raises a ValueError if an ancillary variable with identical metadata
already exists on the cube.
"""

if self.ancillary_variables(ancillary_variable):
raise iris.exceptions.CannotAddError(
"Duplicate ancillary variables not permitted"
Expand Down Expand Up @@ -4548,7 +4545,6 @@ def rolling_window(self, coord, aggregator, window, **kwargs):
possible windows of size 3 from the original cube.
""" # noqa: D214, D406, D407, D410, D411

# Update weights kwargs (if necessary) to handle different types of
# weights
weights_info = None
Expand Down
1 change: 0 additions & 1 deletion lib/iris/experimental/ugrid/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -1440,7 +1440,6 @@ def connectivity(
:class:`Mesh` that matched the given criteria.
"""

result = self._connectivity_manager.filter(
item=item,
standard_name=standard_name,
Expand Down
4 changes: 0 additions & 4 deletions lib/iris/fileformats/_ff.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,6 @@ def __init__(self, filename, word_depth=DEFAULT_FF_WORD_DEPTH):
FFHeader object.
"""

#: File name of the FieldsFile.
self.ff_filename = filename
self._word_depth = word_depth
Expand Down Expand Up @@ -411,7 +410,6 @@ def shape(self, name):
Dimension tuple.
"""

if name in _FF_HEADER_POINTERS:
value = getattr(self, name)[1:]
else:
Expand Down Expand Up @@ -465,7 +463,6 @@ def __init__(self, filename, read_data=False, word_depth=DEFAULT_FF_WORD_DEPTH):
... print(field)
"""

self._ff_header = FFHeader(filename, word_depth=word_depth)
self._word_depth = word_depth
self._filename = filename
Expand Down Expand Up @@ -823,7 +820,6 @@ def _parse_binary_stream(file_like, dtype=np.float64, count=-1):
object and generate as many values as possible.
"""

# There are a wide range of types supported, we just need to know the byte
# size of the object, so we just make sure we've go an instance of a
# np.dtype
Expand Down
1 change: 0 additions & 1 deletion lib/iris/fileformats/_nc_load_rules/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,6 @@ def run_actions(engine):
The specific cube being translated is "engine.cube".
"""

# default (all cubes) action, always runs
action_default(engine) # This should run the default rules.

Expand Down
Loading

0 comments on commit cd41438

Please sign in to comment.