From dcff29dd9d8e1842b9987301f28462181d2892e0 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:50:17 +0000 Subject: [PATCH] ruff checks for numpydocs (#5630) * wip * ruff compliant. * enabled ignore --- .ruff.toml | 8 ++ lib/iris/config.py | 18 ++--- lib/iris/fileformats/netcdf/_dask_locks.py | 9 +-- lib/iris/fileformats/netcdf/loader.py | 33 +++----- lib/iris/fileformats/netcdf/saver.py | 94 +++++++--------------- lib/iris/io/__init__.py | 27 +++---- 6 files changed, 70 insertions(+), 119 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index f3f9f1d3f0..0702e77757 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -28,6 +28,14 @@ lint.ignore = [ # pydocstyle (D) # https://docs.astral.sh/ruff/rules/#pydocstyle-d "D", + # Permanent + "D105", # Missing docstring in magic method + + # Temporary, to be removed when we are more compliant + "D417", # Missing argument descriptions in the docstring + "D101", # Missing docstring in public class + "D102", # Missing docstring in public method + "D106", # Missing docstring in public nested class # pyupgrade (UP) # https://docs.astral.sh/ruff/rules/#pyupgrade-up diff --git a/lib/iris/config.py b/lib/iris/config.py index 22fb93a06a..25aeffdb33 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Provides access to Iris-specific configuration values. +"""Provides access to Iris-specific configuration values. The default configuration values can be overridden by creating the file ``iris/etc/site.cfg``. If it exists, this file must conform to the format @@ -42,8 +41,7 @@ def get_logger( name, datefmt=None, fmt=None, level=None, propagate=None, handler=True ): - """ - Create a custom class for logging. + """Create a custom class for logging. Create a :class:`logging.Logger` with a :class:`logging.StreamHandler` and custom :class:`logging.Formatter`. @@ -114,8 +112,7 @@ def get_logger( # Returns simple string options def get_option(section, option, default=None): - """ - Return the option value for the given section. + """Return the option value for the given section. Returns the option value for the given section, or the default value if the section/option is not present. @@ -129,8 +126,7 @@ def get_option(section, option, default=None): # Returns directory path options def get_dir_option(section, option, default=None): - """ - Return the directory path from the given option and section. + """Return the directory path from the given option and section. Returns the directory path from the given option and section, or returns the given default value if the section/option is not present @@ -194,8 +190,7 @@ class NetCDF: """Control Iris NetCDF options.""" def __init__(self, conventions_override=None): - """ - Set up NetCDF processing options for Iris. + """Set up NetCDF processing options for Iris. Parameters ---------- @@ -274,8 +269,7 @@ def _defaults_dict(self): @contextlib.contextmanager def context(self, **kwargs): - """ - Allow temporary modification of the options via a context manager. + """Allow temporary modification of the options via a context manager. Accepted kwargs are the same as can be supplied to the Option. diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py index 82edbf202e..eb60afcf8a 100644 --- a/lib/iris/fileformats/netcdf/_dask_locks.py +++ b/lib/iris/fileformats/netcdf/_dask_locks.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Module containing code to create locks enabling dask workers to co-operate. +"""Module containing code to create locks enabling dask workers to co-operate. This matter is complicated by needing different solutions for different dask scheduler types, i.e. local 'threads' scheduler, local 'processes' or @@ -81,8 +80,7 @@ def dask_scheduler_is_distributed(): def get_dask_array_scheduler_type(): - """ - Work out what type of scheduler an array.compute*() will use. + """Work out what type of scheduler an array.compute*() will use. Returns one of 'distributed', 'threads' or 'processes'. The return value is a valid argument for dask.config.set(scheduler=). @@ -117,8 +115,7 @@ def get_dask_array_scheduler_type(): def get_worker_lock(identity: str): - """ - Return a mutex Lock which can be shared by multiple Dask workers. + """Return a mutex Lock which can be shared by multiple Dask workers. The type of Lock generated depends on the dask scheduler type, which must therefore be set up before this is called. diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 84e04c1589..1488c0afd3 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. +"""Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. See : `NetCDF User's Guide `_ and `netCDF4 python module `_. @@ -159,8 +158,7 @@ def _set_attributes(attributes, key, value): def _add_unused_attributes(iris_object, cf_var): - """ - Populate the attributes of a cf element with the "unused" attributes. + """Populate the attributes of a cf element with the "unused" attributes. Populate the attributes of a cf element with the "unused" attributes from the associated CF-netCDF variable. That is, all those that aren't CF @@ -200,8 +198,7 @@ def _get_actual_dtype(cf_var): def _get_cf_var_data(cf_var, filename): - """ - Get an array representing the data of a CF variable. + """Get an array representing the data of a CF variable. This is typically a lazy array based around a NetCDFDataProxy, but if the variable is "sufficiently small", we instead fetch the data as a real (numpy) array. @@ -292,8 +289,8 @@ def _get_cf_var_data(cf_var, filename): class _OrderedAddableList(list): - """ - A custom container object for actions recording. + """A custom container object for actions recording. + Used purely in actions debugging, to accumulate a record of which actions were activated. @@ -521,8 +518,7 @@ def coord_from_term(term): def _translate_constraints_to_var_callback(constraints): - """ - Translate load constraints into a simple data-var filter function, if possible. + """Translate load constraints into a simple data-var filter function, if possible. Returns ------- @@ -566,8 +562,7 @@ def inner(cf_datavar): def load_cubes(file_sources, callback=None, constraints=None): - """ - Load cubes from a list of NetCDF filenames/OPeNDAP URLs. + """Load cubes from a list of NetCDF filenames/OPeNDAP URLs. Parameters ---------- @@ -578,6 +573,8 @@ def load_cubes(file_sources, callback=None, constraints=None): callback : function, optional Function which can be passed on to :func:`iris.io.run_callback`. + constraints : optional + Returns ------- Generator of loaded NetCDF :class:`iris.cube.Cube`. @@ -678,8 +675,7 @@ class Modes(Enum): AS_DASK = auto() def __init__(self, var_dim_chunksizes=None): - """ - Provide user control of Dask chunking. + """Provide user control of Dask chunking. The NetCDF loader is controlled by the single instance of this: the :data:`~iris.fileformats.netcdf.loader.CHUNK_CONTROL` object. @@ -709,8 +705,7 @@ def set( var_names: Union[str, Iterable[str]] = None, **dimension_chunksizes: Mapping[str, int], ) -> None: - """ - Control the Dask chunk sizes applied to NetCDF variables during loading. + r"""Control the Dask chunk sizes applied to NetCDF variables during loading. Parameters ---------- @@ -784,8 +779,7 @@ def set( @contextmanager def from_file(self) -> None: - """ - Ensures the chunk sizes are loaded in from NetCDF file variables. + r"""Ensure the chunk sizes are loaded in from NetCDF file variables. Raises ------ @@ -808,8 +802,7 @@ def from_file(self) -> None: @contextmanager def as_dask(self) -> None: - """ - Relies on Dask :external+dask:doc:`array` to control chunk sizes. + """Relies on Dask :external+dask:doc:`array` to control chunk sizes. Notes ----- diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5bfc8754fb..3c154b8511 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -2,8 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Module to support the saving of Iris cubes to a NetCDF file. +"""Module to support the saving of Iris cubes to a NetCDF file. Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. @@ -186,7 +185,6 @@ def append(self, name, coord): ---------- name: CF name of the associated coordinate. - coord: The coordinate of the associated CF name. @@ -248,8 +246,7 @@ def coord(self, name): def _bytes_if_ascii(string): - """ - Convert string to a byte string (str in py2k, bytes in py3k). + """Convert string to a byte string (str in py2k, bytes in py3k). Convert the given string to a byte string (str in py2k, bytes in py3k) if the given string can be encoded to ascii, else maintain the type @@ -268,8 +265,7 @@ def _bytes_if_ascii(string): def _setncattr(variable, name, attribute): - """ - Put the given attribute on the given netCDF4 Data type. + """Put the given attribute on the given netCDF4 Data type. Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. @@ -293,8 +289,7 @@ def _setncattr(variable, name, attribute): def _data_fillvalue_check(arraylib, data, check_value): - """ - Check whether an array is masked, and whether it contains a fill-value. + """Check whether an array is masked, and whether it contains a fill-value. Parameters ---------- @@ -331,8 +326,7 @@ class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning): def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): - """ - Work out whether there was a possible or actual fill-value collision. + """Work out whether there was a possible or actual fill-value collision. From the given information, work out whether there was a possible or actual fill-value collision, and if so construct a warning. @@ -390,8 +384,7 @@ class Saver: """A manager for saving netcdf files.""" def __init__(self, filename, netcdf_format, compute=True): - """ - Manage saving netcdf files. + """Manage saving netcdf files. Parameters ---------- @@ -549,8 +542,7 @@ def write( packing=None, fill_value=None, ): - """ - Wrap for saving cubes to a NetCDF file. + """Wrap for saving cubes to a NetCDF file. Parameters ---------- @@ -854,8 +846,7 @@ def _create_cf_dimensions( self._dataset.createDimension(dim_name, size) def _add_mesh(self, cube_or_mesh): - """ - Add the cube's mesh, and all related variables to the dataset. + """Add the cube's mesh, and all related variables to the dataset. Add the cube's mesh, and all related variables to the dataset. Includes all the mesh-element coordinate and connectivity variables. @@ -991,8 +982,7 @@ def _add_mesh(self, cube_or_mesh): def _add_inner_related_vars( self, cube, cf_var_cube, dimension_names, coordlike_elements ): - """ - Create a set of variables for aux-coords, ancillaries or cell-measures. + """Create a set of variables for aux-coords, ancillaries or cell-measures. Create a set of variables for aux-coords, ancillaries or cell-measures, and attach them to the parent data variable. @@ -1037,8 +1027,7 @@ def _add_inner_related_vars( _setncattr(cf_var_cube, role_attribute_name, variable_names) def _add_aux_coords(self, cube, cf_var_cube, dimension_names): - """ - Add aux. coordinate to the dataset and associate with the data variable. + """Add aux. coordinate to the dataset and associate with the data variable. Parameters ---------- @@ -1078,8 +1067,7 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): ) def _add_cell_measures(self, cube, cf_var_cube, dimension_names): - """ - Add cell measures to the dataset and associate with the data variable. + """Add cell measures to the dataset and associate with the data variable. Parameters ---------- @@ -1098,8 +1086,7 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names): ) def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): - """ - Add ancillary variables measures to the dataset and associate with the data variable. + """Add ancillary variables measures to the dataset and associate with the data variable. Parameters ---------- @@ -1118,8 +1105,7 @@ def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): ) def _add_dim_coords(self, cube, dimension_names): - """ - Add coordinate variables to NetCDF dataset. + """Add coordinate variables to NetCDF dataset. Parameters ---------- @@ -1139,8 +1125,7 @@ def _add_dim_coords(self, cube, dimension_names): self._name_coord_map.append(cf_name, coord) def _add_aux_factories(self, cube, cf_var_cube, dimension_names): - """ - Represent the presence of dimensionless vertical coordinates. + """Represent the presence of dimensionless vertical coordinates. Modify the variables of the NetCDF dataset to represent the presence of dimensionless vertical coordinates based on @@ -1236,8 +1221,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): _setncattr(cf_var, "formula_terms", formula_terms) def _get_dim_names(self, cube_or_mesh): - """ - Determine suitable CF-netCDF data dimension names. + """Determine suitable CF-netCDF data dimension names. Parameters ---------- @@ -1262,8 +1246,7 @@ def _get_dim_names(self, cube_or_mesh): def record_dimension( names_list, dim_name, length, matching_coords=None ): - """ - Record a file dimension, its length and associated "coordinates". + """Record a file dimension, its length and associated "coordinates". Record a file dimension, its length and associated "coordinates" (which may in fact also be connectivities). @@ -1485,8 +1468,7 @@ def cf_valid_var_name(var_name): @staticmethod def _cf_coord_standardised_units(coord): - """ - Determine a suitable units from a given coordinate. + """Determine a suitable units from a given coordinate. Parameters ---------- @@ -1547,8 +1529,7 @@ def _ensure_valid_dtype(self, values, src_name, src_object): return values def _create_cf_bounds(self, coord, cf_var, cf_name): - """ - Create the associated CF-netCDF bounds variable. + """Create the associated CF-netCDF bounds variable. Parameters ---------- @@ -1608,8 +1589,7 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): ) def _get_cube_variable_name(self, cube): - """ - Return a CF-netCDF variable name for the given cube. + """Return a CF-netCDF variable name for the given cube. Parameters ---------- @@ -1633,8 +1613,7 @@ def _get_cube_variable_name(self, cube): return cf_name def _get_coord_variable_name(self, cube_or_mesh, coord): - """ - Return a CF-netCDF variable name for a given coordinate-like element. + """Return a CF-netCDF variable name for a given coordinate-like element. Parameters ---------- @@ -1696,8 +1675,7 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): return cf_name def _get_mesh_variable_name(self, mesh): - """ - Return a CF-netCDF variable name for the mesh. + """Return a CF-netCDF variable name for the mesh. Parameters ---------- @@ -1723,8 +1701,7 @@ def _get_mesh_variable_name(self, mesh): return cf_name def _create_mesh(self, mesh): - """ - Create a mesh variable in the netCDF dataset. + """Create a mesh variable in the netCDF dataset. Parameters ---------- @@ -1807,8 +1784,7 @@ def _create_generic_cf_array_var( element_dims=None, fill_value=None, ): - """ - Create theCF-netCDF variable given dimensional_metadata. + """Create theCF-netCDF variable given dimensional_metadata. Create the associated CF-netCDF variable in the netCDF dataset for the given dimensional_metadata. @@ -1955,8 +1931,7 @@ def _create_generic_cf_array_var( return cf_name def _create_cf_cell_methods(self, cube, dimension_names): - """ - Create CF-netCDF string representation of a cube cell methods. + """Create CF-netCDF string representation of a cube cell methods. Parameters ---------- @@ -2007,8 +1982,7 @@ def _create_cf_cell_methods(self, cube, dimension_names): return " ".join(cell_methods) def _create_cf_grid_mapping(self, cube, cf_var_cube): - """ - Create CF-netCDF grid mapping and associated CF-netCDF variable. + """Create CF-netCDF grid mapping and associated CF-netCDF variable. Create CF-netCDF grid mapping variable and associated CF-netCDF data variable grid mapping attribute. @@ -2278,8 +2252,7 @@ def _create_cf_data_variable( fill_value=None, **kwargs, ): - """ - Create CF-netCDF data variable for the cube and any associated grid mapping. + """Create CF-netCDF data variable for the cube and any associated grid mapping. # TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can # be removed. @@ -2352,8 +2325,7 @@ def _create_cf_data_variable( dtype = data.dtype.newbyteorder("=") def set_packing_ncattrs(cfvar): - """ - Set netCDF packing attributes. + """Set netCDF packing attributes. NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. @@ -2447,8 +2419,7 @@ def set_packing_ncattrs(cfvar): return cf_var def _increment_name(self, varname): - """ - Increment string name or begin increment. + """Increment string name or begin increment. Avoidance of conflicts between variable names, where the name is incremented to distinguish it from others. @@ -2566,8 +2537,7 @@ def store(data, cf_var, fill_info): ) def delayed_completion(self) -> Delayed: - """ - Perform file completion for delayed saves. + """Perform file completion for delayed saves. Create and return a :class:`dask.delayed.Delayed` to perform file completion for delayed saves. @@ -2638,8 +2608,7 @@ def no_op(): return result def complete(self, issue_warnings=True) -> List[Warning]: - """ - Complete file by computing any delayed variable saves. + """Complete file by computing any delayed variable saves. This requires that the Saver has closed the dataset (exited its context). @@ -2692,8 +2661,7 @@ def save( fill_value=None, compute=True, ): - r""" - Save cube(s) to a netCDF file, given the cube and the filename. + r"""Save cube(s) to a netCDF file, given the cube and the filename. * Iris will write CF 1.7 compliant NetCDF files. * **If split-attribute saving is disabled**, i.e. diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 08586c81b7..6dde73fb68 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -36,8 +36,7 @@ def __setitem__(self, key, value): def run_callback(callback, cube, field, filename): - """ - Run the callback mechanism given the appropriate arguments. + """Run the callback mechanism given the appropriate arguments. Parameters ---------- @@ -83,8 +82,7 @@ def run_callback(callback, cube, field, filename): def decode_uri(uri, default="file"): - r""" - Decode a single URI into scheme and scheme-specific parts. + r"""Decode a single URI into scheme and scheme-specific parts. In addition to well-formed URIs, it also supports bare file paths as strings or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are @@ -146,8 +144,7 @@ def decode_uri(uri, default="file"): def expand_filespecs(file_specs, files_expected=True): - """ - Find all matching file paths from a list of file-specs. + """Find all matching file paths from a list of file-specs. Parameters ---------- @@ -201,8 +198,7 @@ def expand_filespecs(file_specs, files_expected=True): def load_files(filenames, callback, constraints=None): - """ - Create a generator of Cubes from given files. + """Create a generator of Cubes from given files. Take a list of filenames which may also be globs, and optionally a constraint set and a callback function, and returns a @@ -241,8 +237,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): - """ - Create generator of Cubes from the given OPeNDAP URLs. + """Create generator of Cubes from the given OPeNDAP URLs. Take a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. @@ -276,8 +271,7 @@ def load_http(urls, callback): def load_data_objects(urls, callback): - """ - Take a list of data-source objects and a callback function, returns a generator of Cubes. + """Take a list of data-source objects and a callback function, returns a generator of Cubes. The 'objects' take the place of 'uris' in the load calls. The appropriate types of the data-source objects are expected to be @@ -343,8 +337,7 @@ def _check_init_savers(): def add_saver(file_extension, new_saver): - """ - Add a custom saver to the Iris session. + """Add a custom saver to the Iris session. Parameters ---------- @@ -370,8 +363,7 @@ def add_saver(file_extension, new_saver): def find_saver(filespec): - """ - Find the saver function appropriate to the given filename or extension. + """Find the saver function appropriate to the given filename or extension. Parameters ---------- @@ -401,8 +393,7 @@ def find_saver(filespec): def save(source, target, saver=None, **kwargs): - """ - Save one or more Cubes to file (or other writeable). + """Save one or more Cubes to file (or other writeable). Iris currently supports three file formats for saving, which it can recognise by filename extension: