diff --git a/docs/src/conf.py b/docs/src/conf.py
index e349000862..c59aca4909 100644
--- a/docs/src/conf.py
+++ b/docs/src/conf.py
@@ -15,7 +15,6 @@
#
# All configuration values have a default; values that are commented out
# serve to show the default.
-
# ----------------------------------------------------------------------------
import datetime
@@ -195,7 +194,7 @@ def _dotv(version):
todo_include_todos = True
# api generation configuration
-autodoc_member_order = "groupwise"
+autodoc_member_order = "alphabetical"
autodoc_default_flags = ["show-inheritance"]
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints
diff --git a/lib/iris/config.py b/lib/iris/config.py
index c1d1de5793..22fb93a06a 100644
--- a/lib/iris/config.py
+++ b/lib/iris/config.py
@@ -27,6 +27,7 @@
The [optional] name of the logger to notify when first imported.
----------
+
"""
import configparser
@@ -42,41 +43,37 @@ def get_logger(
name, datefmt=None, fmt=None, level=None, propagate=None, handler=True
):
"""
+ Create a custom class for logging.
+
Create a :class:`logging.Logger` with a :class:`logging.StreamHandler`
and custom :class:`logging.Formatter`.
- Args:
-
- * name:
+ Parameters
+ ----------
+ name
The name of the logger. Typically this is the module filename that
owns the logger.
-
- Kwargs:
-
- * datefmt:
+ datefmt: optional
The date format string of the :class:`logging.Formatter`.
Defaults to ``%d-%m-%Y %H:%M:%S``.
-
- * fmt:
+ fmt: optional
The additional format string of the :class:`logging.Formatter`.
This is appended to the default format string
``%(asctime)s %(name)s %(levelname)s - %(message)s``.
-
- * level:
+ level: optional
The threshold level of the logger. Defaults to ``INFO``.
-
- * propagate:
+ propagate: optional
Sets the ``propagate`` attribute of the :class:`logging.Logger`,
which determines whether events logged to this logger will be
passed to the handlers of higher level loggers. Defaults to
``False``.
-
- * handler:
+ handler: optional
Create and attach a :class:`logging.StreamHandler` to the
logger. Defaults to ``True``.
- Returns:
- A :class:`logging.Logger`.
+ Returns
+ -------
+ :class:`logging.Logger`.
"""
if level is None:
@@ -118,6 +115,8 @@ def get_logger(
# Returns simple string options
def get_option(section, option, default=None):
"""
+ Return the option value for the given section.
+
Returns the option value for the given section, or the default value
if the section/option is not present.
@@ -131,6 +130,8 @@ def get_option(section, option, default=None):
# Returns directory path options
def get_dir_option(section, option, default=None):
"""
+ Return the directory path from the given option and section.
+
Returns the directory path from the given option and section, or
returns the given default value if the section/option is not present
or does not represent a valid directory.
@@ -196,20 +197,19 @@ def __init__(self, conventions_override=None):
"""
Set up NetCDF processing options for Iris.
- Currently accepted kwargs:
-
- * conventions_override (bool):
+ Parameters
+ ----------
+ conventions_override : bool, optional
Define whether the CF Conventions version (e.g. `CF-1.6`) set when
saving a cube to a NetCDF file should be defined by
- Iris (the default) or the cube being saved.
-
- If `False` (the default), specifies that Iris should set the
+ Iris (the default) or the cube being saved. If `False`
+ (the default), specifies that Iris should set the
CF Conventions version when saving cubes as NetCDF files.
If `True`, specifies that the cubes being saved to NetCDF should
set the CF Conventions version for the saved NetCDF files.
- Example usages:
-
+ Examples
+ --------
* Specify, for the lifetime of the session, that we want all cubes
written to NetCDF to define their own CF Conventions versions::
@@ -276,6 +276,7 @@ def _defaults_dict(self):
def context(self, **kwargs):
"""
Allow temporary modification of the options via a context manager.
+
Accepted kwargs are the same as can be supplied to the Option.
"""
diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py
index 99817c5921..cf550fbb57 100644
--- a/lib/iris/fileformats/netcdf/__init__.py
+++ b/lib/iris/fileformats/netcdf/__init__.py
@@ -3,8 +3,7 @@
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
"""
-Module to support the loading and saving of NetCDF files, also using the CF conventions
-for metadata interpretation.
+Support loading and saving NetCDF files using CF conventions for metadata interpretation.
See : `NetCDF User's Guide `_
and `netCDF4 python module `_.
diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py
index b7727a1ab7..82edbf202e 100644
--- a/lib/iris/fileformats/netcdf/_dask_locks.py
+++ b/lib/iris/fileformats/netcdf/_dask_locks.py
@@ -5,45 +5,49 @@
"""
Module containing code to create locks enabling dask workers to co-operate.
-This matter is complicated by needing different solutions for different dask scheduler
-types, i.e. local 'threads' scheduler, local 'processes' or distributed.
+This matter is complicated by needing different solutions for different dask
+scheduler types, i.e. local 'threads' scheduler, local 'processes' or
+distributed.
-In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset
-targeting an output file, and creates a Saver.file_write_lock object to serialise
-write-accesses to the file from dask tasks : All dask-task file writes go via a
-"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link
-to the Saver.file_write_lock, and uses it to prevent workers from fouling each other.
+In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a
+netCDF4.Dataset targeting an output file, and creates a Saver.file_write_lock
+object to serialise write-accesses to the file from dask tasks : All dask-task
+file writes go via a "iris.fileformats.netcdf.saver.NetCDFWriteProxy" object,
+which also contains a link to the Saver.file_write_lock, and uses it to prevent
+workers from fouling each other.
For each chunk written, the NetCDFWriteProxy acquires the common per-file lock;
-opens a Dataset on the file; performs a write to the relevant variable; closes the
-Dataset and then releases the lock. This process is obviously very similar to what the
-NetCDFDataProxy does for reading lazy chunks.
+opens a Dataset on the file; performs a write to the relevant variable; closes
+the Dataset and then releases the lock. This process is obviously very similar
+to what the NetCDFDataProxy does for reading lazy chunks.
-For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers
-(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those
-contain the common lock, and this is simply **the same object** for all workers, since
-they share an address space.
+For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The
+workers (threads) execute tasks which contain a NetCDFWriteProxy, as above.
+All of those contain the common lock, and this is simply **the same object**
+for all workers, since they share an address space.
For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is
identified with the output filepath. This is distributed to the workers by
-serialising the task function arguments, which will include the NetCDFWriteProxy.
-A worker behaves like a process, though it may execute on a remote machine. When a
-distributed.Lock is deserialised to reconstruct the worker task, this creates an object
-that communicates with the scheduler. These objects behave as a single common lock,
-as they all have the same string 'identity', so the scheduler implements inter-process
-communication so that they can mutually exclude each other.
+serialising the task function arguments, which will include the
+NetCDFWriteProxy. A worker behaves like a process, though it may execute on a
+remote machine. When a distributed.Lock is deserialised to reconstruct the
+worker task, this creates an object that communicates with the scheduler.
+These objects behave as a single common lock, as they all have the same string
+'identity', so the scheduler implements inter-process communication so that
+they can mutually exclude each other.
It is also *conceivable* that multiple processes could write to the same file in
-parallel, if the operating system supports it. However, this also requires that the
-libnetcdf C library is built with parallel access option, which is not common.
-With the "ordinary" libnetcdf build, a process which attempts to open for writing a file
-which is _already_ open for writing simply raises an access error.
-In any case, Iris netcdf saver will not support this mode of operation, at present.
+parallel, if the operating system supports it. However, this also requires
+that the libnetcdf C library is built with parallel access option, which is
+not common. With the "ordinary" libnetcdf build, a process which attempts to
+open for writing a file which is _already_ open for writing simply raises an
+access error. In any case, Iris netcdf saver will not support this mode of
+operation, at present.
We don't currently support a local "processes" type scheduler. If we did, the
-behaviour should be very similar to a distributed scheduler. It would need to use some
-other serialisable shared-lock solution in place of 'distributed.Lock', which requires
-a distributed scheduler to function.
+behaviour should be very similar to a distributed scheduler. It would need to
+use some other serialisable shared-lock solution in place of
+'distributed.Lock', which requires a distributed scheduler to function.
"""
import threading
@@ -55,7 +59,7 @@
# A dedicated error class, allowing filtering and testing of errors raised here.
-class DaskSchedulerTypeError(ValueError):
+class DaskSchedulerTypeError(ValueError): # noqa: D101
pass
@@ -82,11 +86,13 @@ def get_dask_array_scheduler_type():
Returns one of 'distributed', 'threads' or 'processes'.
The return value is a valid argument for dask.config.set(scheduler=).
- This cannot distinguish between distributed local and remote clusters -- both of
- those simply return 'distributed'.
+ This cannot distinguish between distributed local and remote clusters --
+ both of those simply return 'distributed'.
- NOTE: this takes account of how dask is *currently* configured. It will be wrong
- if the config changes before the compute actually occurs.
+ Notes
+ -----
+ This takes account of how dask is *currently* configured. It will
+ be wrong if the config changes before the compute actually occurs.
"""
if dask_scheduler_is_distributed():
@@ -114,8 +120,12 @@ def get_worker_lock(identity: str):
"""
Return a mutex Lock which can be shared by multiple Dask workers.
- The type of Lock generated depends on the dask scheduler type, which must therefore
- be set up before this is called.
+ The type of Lock generated depends on the dask scheduler type, which must
+ therefore be set up before this is called.
+
+ Parameters
+ ----------
+ identity : str
"""
scheduler_type = get_dask_array_scheduler_type()
diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
index b5226b8e42..5abffb896f 100644
--- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py
+++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
@@ -24,7 +24,10 @@
class _ThreadSafeWrapper(ABC):
"""
- Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK.
+ Contains a netCDF4 class instance, ensuring wrapping all API calls.
+
+ Contains a netCDF4 class instance, ensuring wrapping all API calls within
+ _GLOBAL_NETCDF4_LOCK.
Designed to 'gate keep' all the instance's API calls, but allowing the
same API as if working directly with the instance itself.
@@ -117,7 +120,7 @@ class VariableWrapper(_ThreadSafeWrapper):
def setncattr(self, *args, **kwargs) -> None:
"""
- Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK.
Only defined explicitly in order to get some mocks to work.
"""
@@ -141,11 +144,12 @@ def dimensions(self) -> typing.List[str]:
def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]:
"""
- Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+ Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimensions are simply replaced with their
- respective DimensionWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK,
+ returning DimensionWrappers. The original returned netCDF4.Dimensions
+ are simply replaced with their respective DimensionWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
dimensions_ = list(
@@ -171,11 +175,12 @@ class GroupWrapper(_ThreadSafeWrapper):
@property
def dimensions(self) -> typing.Dict[str, DimensionWrapper]:
"""
- Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+ Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimensions are simply replaced with their
- respective DimensionWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning DimensionWrappers. The original returned netCDF4.Dimensions
+ are simply replaced with their respective DimensionWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
dimensions_ = self._contained_instance.dimensions
@@ -186,11 +191,13 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]:
def createDimension(self, *args, **kwargs) -> DimensionWrapper:
"""
- Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper.
+ Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimension is simply replaced with its
- respective DimensionWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createDimension() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. The original returned
+ netCDF4.Dimension is simply replaced with its respective
+ DimensionWrapper, ensuring that downstream calls are also performed
+ within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_dimension = self._contained_instance.createDimension(
@@ -204,11 +211,12 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper:
@property
def variables(self) -> typing.Dict[str, VariableWrapper]:
"""
- Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+ Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Variables are simply replaced with their
- respective VariableWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning VariableWrappers. The original returned netCDF4.Variables
+ are simply replaced with their respective VariableWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
variables_ = self._contained_instance.variables
@@ -218,11 +226,13 @@ def variables(self) -> typing.Dict[str, VariableWrapper]:
def createVariable(self, *args, **kwargs) -> VariableWrapper:
"""
- Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper.
+ Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Variable is simply replaced with its
- respective VariableWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createVariable() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. The original
+ returned netCDF4.Variable is simply replaced with its respective
+ VariableWrapper, ensuring that downstream calls are also performed
+ within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_variable = self._contained_instance.createVariable(
@@ -234,7 +244,10 @@ def get_variables_by_attributes(
self, *args, **kwargs
) -> typing.List[VariableWrapper]:
"""
- Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+ Call get_variables_by_attributes() from netCDF4.Group/Dataset.
+
+ Call get_variables_by_attributes() from netCDF4.Group/Dataset
+ within_GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
The original returned netCDF4.Variables are simply replaced with their
respective VariableWrappers, ensuring that downstream calls are
@@ -254,7 +267,10 @@ def get_variables_by_attributes(
@property
def groups(self):
"""
- Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers.
+ Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
+
+ Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning GroupWrappers.
The original returned netCDF4.Groups are simply replaced with their
respective GroupWrappers, ensuring that downstream calls are
@@ -267,7 +283,10 @@ def groups(self):
@property
def parent(self):
"""
- Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper.
+ Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
+
+ Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning a GroupWrapper.
The original returned netCDF4.Group is simply replaced with its
respective GroupWrapper, ensuring that downstream calls are
@@ -279,11 +298,13 @@ def parent(self):
def createGroup(self, *args, **kwargs):
"""
- Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper.
+ Call createGroup() from netCDF4.Group/Dataset.
- The original returned netCDF4.Group is simply replaced with its
- respective GroupWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createGroup() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. The original returned
+ netCDF4.Group is simply replaced with its respective GroupWrapper,
+ ensuring that downstream calls are also performed within
+ _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_group = self._contained_instance.createGroup(*args, **kwargs)
@@ -304,11 +325,12 @@ class DatasetWrapper(GroupWrapper):
@classmethod
def fromcdl(cls, *args, **kwargs):
"""
- Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper.
+ Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dataset is simply replaced with its
- respective DatasetWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK,
+ returning a DatasetWrapper. The original returned netCDF4.Dataset is
+ simply replaced with its respective DatasetWrapper, ensuring that
+ downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs)
@@ -329,12 +351,13 @@ def __init__(self, shape, dtype, path, variable_name, fill_value):
@property
def ndim(self):
+ # noqa: D102
return len(self.shape)
def __getitem__(self, keys):
# Using a DatasetWrapper causes problems with invalid ID's and the
- # netCDF4 library, presumably because __getitem__ gets called so many
- # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead.
+ # netCDF4 library, presumably because __getitem__ gets called so many
+ # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead.
with _GLOBAL_NETCDF4_LOCK:
dataset = netCDF4.Dataset(self.path)
try:
@@ -363,11 +386,14 @@ def __setstate__(self, state):
class NetCDFWriteProxy:
"""
- The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a
- netCDF4.Variable, but where the data is to be ***written to***.
+ An object mimicking the data access of a netCDF4.Variable.
+
+ The "opposite" of a NetCDFDataProxy : An object mimicking the data access
+ of a netCDF4.Variable, but where the data is to be ***written to***.
- It encapsulates the netcdf file and variable which are actually to be written to.
- This opens the file each time, to enable writing the data chunk, then closes it.
+ It encapsulates the netcdf file and variable which are actually to be
+ written to. This opens the file each time, to enable writing the data
+ chunk, then closes it.
TODO: could be improved with a caching scheme, but this just about works.
"""
diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py
index 623d1eb6c7..84e04c1589 100644
--- a/lib/iris/fileformats/netcdf/loader.py
+++ b/lib/iris/fileformats/netcdf/loader.py
@@ -3,8 +3,7 @@
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
"""
-Module to support the loading of Iris cubes from NetCDF files, also using the CF
-conventions for metadata interpretation.
+Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation.
See : `NetCDF User's Guide `_
and `netCDF4 python module `_.
@@ -150,7 +149,6 @@ def _actions_activation_stats(engine, cf_name):
def _set_attributes(attributes, key, value):
"""Set attributes dictionary, converting unicode strings appropriately."""
-
if isinstance(value, str):
try:
attributes[str(key)] = str(value)
@@ -162,6 +160,8 @@ def _set_attributes(attributes, key, value):
def _add_unused_attributes(iris_object, cf_var):
"""
+ Populate the attributes of a cf element with the "unused" attributes.
+
Populate the attributes of a cf element with the "unused" attributes
from the associated CF-netCDF variable. That is, all those that aren't CF
reserved terms.
@@ -398,10 +398,7 @@ def fix_attributes_all_elements(role_name):
def _load_aux_factory(engine, cube):
- """
- Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.
-
- """
+ """Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory."""
formula_type = engine.requires.get("formula_type")
if formula_type in [
"atmosphere_sigma_coordinate",
@@ -527,9 +524,10 @@ def _translate_constraints_to_var_callback(constraints):
"""
Translate load constraints into a simple data-var filter function, if possible.
- Returns:
- * function(cf_var:CFDataVariable): --> bool,
- or None.
+ Returns
+ -------
+ function : (cf_var:CFDataVariable)
+ bool, or None.
For now, ONLY handles a single NameConstraint with no 'STASH' component.
@@ -569,25 +567,24 @@ def inner(cf_datavar):
def load_cubes(file_sources, callback=None, constraints=None):
"""
- Loads cubes from a list of NetCDF filenames/OPeNDAP URLs.
-
- Args:
+ Load cubes from a list of NetCDF filenames/OPeNDAP URLs.
- * file_sources (string/list):
+ Parameters
+ ----------
+ file_sources : str or list
One or more NetCDF filenames/OPeNDAP URLs to load from.
OR open datasets.
- Kwargs:
-
- * callback (callable function):
+ callback : function, optional
Function which can be passed on to :func:`iris.io.run_callback`.
- Returns:
- Generator of loaded NetCDF :class:`iris.cube.Cube`.
+ Returns
+ -------
+ Generator of loaded NetCDF :class:`iris.cube.Cube`.
"""
# TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded
- # into standard behaviour.
+ # into standard behaviour.
# Deferred import to avoid circular imports.
from iris.experimental.ugrid.cf import CFUGridReader
from iris.experimental.ugrid.load import (
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index fcbc9a5383..b0bff313e9 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -3,6 +3,8 @@
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
"""
+Module to support the saving of Iris cubes to a NetCDF file.
+
Module to support the saving of Iris cubes to a NetCDF file, also using the CF
conventions for metadata interpretation.
@@ -178,19 +180,19 @@ def __init__(self):
self._map = []
def append(self, name, coord):
- """
- Append the given name and coordinate pair to the mapping.
+ """Append the given name and coordinate pair to the mapping.
- Args:
-
- * name:
+ Parameters
+ ----------
+ name:
CF name of the associated coordinate.
- * coord:
+ coord:
The coordinate of the associated CF name.
- Returns:
- None.
+ Returns
+ -------
+ None.
"""
self._map.append(CFNameCoordMap._Map(name, coord))
@@ -198,26 +200,24 @@ def append(self, name, coord):
@property
def names(self):
"""Return all the CF names."""
-
return [pair.name for pair in self._map]
@property
def coords(self):
"""Return all the coordinates."""
-
return [pair.coord for pair in self._map]
def name(self, coord):
- """
- Return the CF name, given a coordinate, or None if not recognised.
+ """Return the CF name, given a coordinate, or None if not recognised.
- Args:
-
- * coord:
+ Parameters
+ ----------
+ coord:
The coordinate of the associated CF name.
- Returns:
- Coordinate or None.
+ Returns
+ -------
+ Coordinate or None.
"""
result = None
@@ -228,17 +228,16 @@ def name(self, coord):
return result
def coord(self, name):
- """
- Return the coordinate, given a CF name, or None if not recognised.
-
- Args:
+ """Return the coordinate, given a CF name, or None if not recognised.
- * name:
+ Parameters
+ ----------
+ name:
CF name of the associated coordinate, or None if not recognised.
- Returns:
- CF name or None.
-
+ Returns
+ -------
+ CF name or None.
"""
result = None
for pair in self._map:
@@ -250,6 +249,8 @@ def coord(self, name):
def _bytes_if_ascii(string):
"""
+ Convert string to a byte string (str in py2k, bytes in py3k).
+
Convert the given string to a byte string (str in py2k, bytes in py3k)
if the given string can be encoded to ascii, else maintain the type
of the inputted string.
@@ -268,6 +269,8 @@ def _bytes_if_ascii(string):
def _setncattr(variable, name, attribute):
"""
+ Put the given attribute on the given netCDF4 Data type.
+
Put the given attribute on the given netCDF4 Data type, casting
attributes as we go to bytes rather than unicode.
@@ -321,9 +324,7 @@ def _data_fillvalue_check(arraylib, data, check_value):
class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning):
- """
- Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`.
- """
+ """Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`."""
# TODO: remove at the next major release.
pass
@@ -331,6 +332,8 @@ class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning):
def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False):
"""
+ Work out whether there was a possible or actual fill-value collision.
+
From the given information, work out whether there was a possible or actual
fill-value collision, and if so construct a warning.
@@ -342,12 +345,12 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False):
whether the data array was masked
contains_fill_value : bool
whether the data array contained the fill-value
- warn : bool
+ warn : bool, optional
if True, also issue any resulting warning immediately.
Returns
-------
- None or :class:`Warning`
+ None or :class:`Warning`
If not None, indicates a known or possible problem with filling
"""
@@ -388,15 +391,15 @@ class Saver:
def __init__(self, filename, netcdf_format, compute=True):
"""
- A manager for saving netcdf files.
+ Manage saving netcdf files.
Parameters
----------
- filename : string or netCDF4.Dataset
+ filename : str or netCDF4.Dataset
Name of the netCDF file to save the cube.
OR a writeable object supporting the :class:`netCF4.Dataset` api.
- netcdf_format : string
+ netcdf_format : str
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
@@ -547,16 +550,13 @@ def write(
fill_value=None,
):
"""
- Wrapper for saving cubes to a NetCDF file.
-
- Args:
+ Wrap for saving cubes to a NetCDF file.
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
-
- Kwargs:
-
- * local_keys (iterable of strings):
+ local_keys : iterable of str
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
@@ -565,46 +565,38 @@ def write(
Has no effect if :attr:`iris.FUTURE.save_split_attrs` is ``True``.
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord`
List of coordinate names (or coordinate objects)
corresponding to coordinate dimensions of `cube` to save with the
NetCDF dimension variable length 'UNLIMITED'. By default, no
unlimited dimensions are saved. Only the 'NETCDF4' format
supports multiple 'UNLIMITED' dimensions.
-
- * zlib (bool):
+ zlib : bool
If `True`, the data will be compressed in the netCDF file using
gzip compression (default `False`).
-
- * complevel (int):
+ complevel : int
An integer between 1 and 9 describing the level of compression
desired (default 4). Ignored if `zlib=False`.
-
- * shuffle (bool):
+ shuffle : bool
If `True`, the HDF5 shuffle filter will be applied before
compressing the data (default `True`). This significantly improves
compression. Ignored if `zlib=False`.
-
- * fletcher32 (bool):
+ fletcher32 : bool
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
-
- * contiguous (bool):
+ contiguous : bool
If `True`, the variable data is stored contiguously on disk.
Default `False`. Setting to `True` for a variable with an unlimited
dimension will trigger an error.
-
- * chunksizes (tuple of int):
+ chunksizes : tuple of int
Used to manually specify the HDF5 chunksizes for each dimension of
the variable. A detailed discussion of HDF chunking and I/O
- performance is available here:
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
+ performance is available
+ `here `__.
Basically, you want the chunk size for each dimension to match
as closely as possible the size of the data block that users will
read from the file. `chunksizes` cannot be set if `contiguous=True`.
-
- * endian (string):
+ endian : str
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
@@ -612,8 +604,7 @@ def write(
on a computer with the opposite format as the one used to create
the file, there may be some performance advantage to be gained by
setting the endian-ness.
-
- * least_significant_digit (int):
+ least_significant_digit : int
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this
produces 'lossy', but significantly more efficient compression. For
@@ -621,17 +612,16 @@ def write(
using `numpy.around(scale*data)/scale`, where `scale = 2**bits`,
and `bits` is determined so that a precision of 0.1 is retained (in
this case `bits=4`). From
- http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml:
+ `here `__:
"least_significant_digit -- power of ten of the smallest decimal
place in unpacked data that is a reliable value". Default is
`None`, or no quantization, or 'lossless' compression.
-
- * packing (type or string or dict or list): A numpy integer datatype
- (signed or unsigned) or a string that describes a numpy integer
- dtype(i.e. 'i2', 'short', 'u4') or a dict of packing parameters as
- described below. This provides support for netCDF data packing as
- described in
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values
+ packing : type or str or dict or list
+ A numpy integer datatype (signed or unsigned) or a string that
+ describes a numpy integer dtype(i.e. 'i2', 'short', 'u4') or a
+ dict of packing parameters as described below. This provides
+ support for netCDF data packing as described
+ `here `__.
If this argument is a type (or type string), appropriate values of
scale_factor and add_offset will be automatically calculated based
on `cube.data` and possible masking. For more control, pass a dict
@@ -641,20 +631,20 @@ def write(
manually using a dict to avoid this. The default is `None`, in
which case the datatype is determined from the cube and no packing
will occur.
-
- * fill_value:
+ fill_value:
The value to use for the `_FillValue` attribute on the netCDF
variable. If `packing` is specified the value of `fill_value`
should be in the domain of the packed data.
- Returns:
- None.
-
- .. note::
+ Returns
+ -------
+ None.
- The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
- `chunksizes` and `endian` keywords are silently ignored for netCDF
- 3 files that do not use HDF5.
+ Notes
+ -----
+ The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
+ `chunksizes` and `endian` keywords are silently ignored for netCDF
+ 3 files that do not use HDF5.
"""
# TODO: when iris.FUTURE.save_split_attrs defaults to True, we can deprecate the
@@ -767,6 +757,8 @@ def write(
@staticmethod
def check_attribute_compliance(container, data_dtype):
+ """Check attributte complliance."""
+
def _coerce_value(val_attr, val_attr_value, data_dtype):
val_attr_tmp = np.array(val_attr_value, dtype=data_dtype)
if (val_attr_tmp != val_attr_value).any():
@@ -798,15 +790,15 @@ def _coerce_value(val_attr, val_attr_value, data_dtype):
container.attributes[val_attr] = new_val
def update_global_attributes(self, attributes=None, **kwargs):
- """
+ """Update the CF global attributes.
+
Update the CF global attributes based on the provided
iterable/dictionary and/or keyword arguments.
- Args:
-
- * attributes (dict or iterable of key, value pairs):
+ Parameters
+ ----------
+ attributes : dict or iterable of key, value pairs
CF global attributes to be updated.
-
"""
# TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will
# only be called once: it can reasonably be renamed "_set_global_attributes",
@@ -825,23 +817,18 @@ def update_global_attributes(self, attributes=None, **kwargs):
def _create_cf_dimensions(
self, cube, dimension_names, unlimited_dimensions=None
):
- """
- Create the CF-netCDF data dimensions.
-
- Args:
+ """Create the CF-netCDF data dimensions.
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` in which to lookup coordinates.
-
- Kwargs:
-
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ unlimited_dimensions : iterable of strings and/or :class:`iris.coords.Coord` objects):
List of coordinates to make unlimited (None by default).
- Returns:
- None.
-
+ Returns
+ -------
+ None.
"""
unlimited_dim_names = []
if unlimited_dimensions is not None:
@@ -868,6 +855,8 @@ def _create_cf_dimensions(
def _add_mesh(self, cube_or_mesh):
"""
+ Add the cube's mesh, and all related variables to the dataset.
+
Add the cube's mesh, and all related variables to the dataset.
Includes all the mesh-element coordinate and connectivity variables.
@@ -876,17 +865,16 @@ def _add_mesh(self, cube_or_mesh):
Here, we do *not* add the relevant referencing attributes to the
data-variable, because we want to create the data-variable later.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh : :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- Returns:
- * cf_mesh_name (string or None):
+ Returns
+ -------
+ cf_mesh_name : str or None
The name of the mesh variable created, or None if the cube does not
have a mesh.
-
"""
cf_mesh_name = None
@@ -1004,6 +992,8 @@ def _add_inner_related_vars(
self, cube, cf_var_cube, dimension_names, coordlike_elements
):
"""
+ Create a set of variables for aux-coords, ancillaries or cell-measures.
+
Create a set of variables for aux-coords, ancillaries or cell-measures,
and attach them to the parent data variable.
@@ -1048,17 +1038,16 @@ def _add_inner_related_vars(
def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
"""
- Add aux. coordinate to the dataset and associate with the data variable
-
- Args:
+ Add aux. coordinate to the dataset and associate with the data variable.
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
-
"""
from iris.experimental.ugrid.mesh import (
Mesh,
@@ -1090,17 +1079,16 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
def _add_cell_measures(self, cube, cf_var_cube, dimension_names):
"""
- Add cell measures to the dataset and associate with the data variable
+ Add cell measures to the dataset and associate with the data variable.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
-
"""
return self._add_inner_related_vars(
cube,
@@ -1111,18 +1099,16 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names):
def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names):
"""
- Add ancillary variables measures to the dataset and associate with the
- data variable
+ Add ancillary variables measures to the dataset and associate with the data variable.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
-
"""
return self._add_inner_related_vars(
cube,
@@ -1135,13 +1121,12 @@ def _add_dim_coords(self, cube, dimension_names):
"""
Add coordinate variables to NetCDF dataset.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
-
"""
# Ensure we create the netCDF coordinate variables first.
for coord in cube.dim_coords:
@@ -1155,19 +1140,20 @@ def _add_dim_coords(self, cube, dimension_names):
def _add_aux_factories(self, cube, cf_var_cube, dimension_names):
"""
- Modifies the variables of the NetCDF dataset to represent
+ Represent the presence of dimensionless vertical coordinates.
+
+ Modify the variables of the NetCDF dataset to represent
the presence of dimensionless vertical coordinates based on
the aux factories of the cube (if any).
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`)
+ cf_var_cube: :class:`netcdf.netcdf_variable`
CF variable cube representation.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
-
"""
primaries = []
for factory in cube.aux_factories:
@@ -1253,23 +1239,23 @@ def _get_dim_names(self, cube_or_mesh):
"""
Determine suitable CF-netCDF data dimension names.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- Returns:
- mesh_dimensions, cube_dimensions
- * mesh_dimensions (list of string):
- A list of the mesh dimensions of the attached mesh, if any.
- * cube_dimensions (list of string):
- A lists of dimension names for each dimension of the cube
+ Returns
+ -------
+ mesh_dimensions : list of str
+ A list of the mesh dimensions of the attached mesh, if any.
+ cube_dimensions : list of str
+ A lists of dimension names for each dimension of the cube
- ..note::
- The returned lists are in the preferred file creation order.
- One of the mesh dimensions will typically also appear in the cube
- dimensions.
+ Notes
+ -----
+ The returned lists are in the preferred file creation order.
+ One of the mesh dimensions will typically also appear in the cube
+ dimensions.
"""
@@ -1277,6 +1263,8 @@ def record_dimension(
names_list, dim_name, length, matching_coords=None
):
"""
+ Record a file dimension, its length and associated "coordinates".
+
Record a file dimension, its length and associated "coordinates"
(which may in fact also be connectivities).
@@ -1475,16 +1463,17 @@ def record_dimension(
@staticmethod
def cf_valid_var_name(var_name):
- """
- Return a valid CF var_name given a potentially invalid name.
-
- Args:
+ """Return a valid CF var_name given a potentially invalid name.
- * var_name (str):
+ Parameters
+ ----------
+ var_name : str
The var_name to normalise
- Returns:
- A var_name suitable for passing through for variable creation.
+ Returns
+ -------
+ str
+ var_name suitable for passing through for variable creation.
"""
# Replace invalid characters with an underscore ("_").
@@ -1499,17 +1488,17 @@ def _cf_coord_standardised_units(coord):
"""
Determine a suitable units from a given coordinate.
- Args:
-
- * coord (:class:`iris.coords.Coord`):
+ Parameters
+ ----------
+ coord : :class:`iris.coords.Coord`
A coordinate of a cube.
- Returns:
+ Returns
+ -------
+ units
The (standard_name, long_name, unit) of the given
:class:`iris.coords.Coord` instance.
-
"""
-
units = str(coord.units)
# Set the 'units' of 'latitude' and 'longitude' coordinates specified
# in 'degrees' to 'degrees_north' and 'degrees_east' respectively,
@@ -1561,17 +1550,18 @@ def _create_cf_bounds(self, coord, cf_var, cf_name):
"""
Create the associated CF-netCDF bounds variable.
- Args:
-
- * coord (:class:`iris.coords.Coord`):
+ Parameters
+ ----------
+ coord : :class:`iris.coords.Coord`
A coordinate of a cube.
- * cf_var:
+ cf_var:
CF-netCDF variable
- * cf_name (string):
+ cf_name : str
name of the CF-NetCDF variable.
- Returns:
- None
+ Returns
+ -------
+ None
"""
if hasattr(coord, "has_bounds") and coord.has_bounds():
@@ -1619,15 +1609,17 @@ def _create_cf_bounds(self, coord, cf_var, cf_name):
def _get_cube_variable_name(self, cube):
"""
- Returns a CF-netCDF variable name for the given cube.
-
- Args:
+ Return a CF-netCDF variable name for the given cube.
- * cube (class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
An instance of a cube for which a CF-netCDF variable
name is required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1642,18 +1634,19 @@ def _get_cube_variable_name(self, cube):
def _get_coord_variable_name(self, cube_or_mesh, coord):
"""
- Returns a CF-netCDF variable name for a given coordinate-like element.
+ Return a CF-netCDF variable name for a given coordinate-like element.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- * coord (:class:`iris.coords._DimensionalMetadata`):
+ coord : :class:`iris.coords._DimensionalMetadata`
An instance of a coordinate (or similar), for which a CF-netCDF
variable name is required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1704,15 +1697,17 @@ def _get_coord_variable_name(self, cube_or_mesh, coord):
def _get_mesh_variable_name(self, mesh):
"""
- Returns a CF-netCDF variable name for the mesh.
-
- Args:
+ Return a CF-netCDF variable name for the mesh.
- * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`):
+ Parameters
+ ----------
+ mesh : :class:`iris.experimental.ugrid.mesh.Mesh`
An instance of a Mesh for which a CF-netCDF variable name is
required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1731,12 +1726,14 @@ def _create_mesh(self, mesh):
"""
Create a mesh variable in the netCDF dataset.
- Args:
-
- * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`):
+ Parameters
+ ----------
+ mesh : :class:`iris.experimental.ugrid.mesh.Mesh`
The Mesh to be saved to CF-netCDF file.
- Returns:
+ Returns
+ -------
+ str
The string name of the associated CF-netCDF variable saved.
"""
@@ -1811,6 +1808,8 @@ def _create_generic_cf_array_var(
fill_value=None,
):
"""
+ Create theCF-netCDF variable given dimensional_metadata.
+
Create the associated CF-netCDF variable in the netCDF dataset for the
given dimensional_metadata.
@@ -1818,33 +1817,32 @@ def _create_generic_cf_array_var(
If the metadata element is a coord, it may also contain bounds.
In which case, an additional var is created and linked to it.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- * cube_dim_names (list of string):
+ cube_dim_names : list of str
The name of each dimension of the cube.
- * element:
+ element : :class:`iris.coords._DimensionalMetadata`
An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the
cube. Provides data, units and standard/long/var names.
Not used if 'element_dims' is not None.
- * element_dims (list of string, or None):
+ element_dims : list of str, or None
If set, contains the variable dimension (names),
otherwise these are taken from `element.cube_dims[cube]`.
For Mesh components (element coordinates and connectivities), this
*must* be passed in, as "element.cube_dims" does not function.
- * fill_value (number or None):
+ fill_value : number or None
If set, create the variable with this fill-value, and fill any
masked data points with this value.
If not set, standard netcdf4-python behaviour : the variable has no
'_FillValue' property, and uses the "standard" fill-value for its
type.
- Returns:
- var_name (string):
- The name of the CF-netCDF variable created.
-
+ Returns
+ -------
+ str
+ The name of the CF-netCDF variable created.
"""
# Support cube or mesh save.
from iris.cube import Cube
@@ -1960,16 +1958,17 @@ def _create_cf_cell_methods(self, cube, dimension_names):
"""
Create CF-netCDF string representation of a cube cell methods.
- Args:
-
- * cube (:class:`iris.cube.Cube`) or cubelist
- (:class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
- Returns:
+ Returns
+ -------
+ str
CF-netCDF string representation of a cube cell methods.
"""
@@ -2009,20 +2008,22 @@ def _create_cf_cell_methods(self, cube, dimension_names):
def _create_cf_grid_mapping(self, cube, cf_var_cube):
"""
+ Create CF-netCDF grid mapping and associated CF-netCDF variable.
+
Create CF-netCDF grid mapping variable and associated CF-netCDF
data variable grid mapping attribute.
- Args:
-
- * cube (:class:`iris.cube.Cube`) or cubelist
- (:class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- Returns:
- None
+ Returns
+ -------
+ None
"""
cs = cube.coord_system("CoordSystem")
@@ -2278,32 +2279,30 @@ def _create_cf_data_variable(
**kwargs,
):
"""
- Create CF-netCDF data variable for the cube and any associated grid
- mapping.
+ Create CF-netCDF data variable for the cube and any associated grid mapping.
+
# TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can
# be removed.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
The associated cube being saved to CF-netCDF file.
- * dimension_names (list):
+ dimension_names : list
String names for each dimension of the cube.
-
- Kwargs:
-
- * local_keys (iterable of strings):
- * see :func:`iris.fileformats.netcdf.Saver.write`
- * packing (type or string or dict or list):
- * see :func:`iris.fileformats.netcdf.Saver.write`
- * fill_value:
- * see :func:`iris.fileformats.netcdf.Saver.write`
+ local_keys : iterable of str, optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
+ packing : type or str or dict or list, optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
+ fill_value : optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
All other keywords are passed through to the dataset's `createVariable`
method.
- Returns:
- The newly created CF-netCDF data variable.
+ Returns
+ -------
+ The newly created CF-netCDF data variable.
"""
# TODO: when iris.FUTURE.save_split_attrs is removed, the 'local_keys' arg can
@@ -2454,13 +2453,14 @@ def _increment_name(self, varname):
Avoidance of conflicts between variable names, where the name is
incremented to distinguish it from others.
- Args:
-
- * varname (string):
+ Parameters
+ ----------
+ varname : str
Variable name to increment.
- Returns:
- Incremented varname.
+ Returns
+ -------
+ Incremented varname.
"""
num = 0
@@ -2567,18 +2567,19 @@ def store(data, cf_var, fill_info):
def delayed_completion(self) -> Delayed:
"""
- Create and return a :class:`dask.delayed.Delayed` to perform file completion
- for delayed saves.
+ Perform file completion for delayed saves.
+
+ Create and return a :class:`dask.delayed.Delayed` to perform file
+ completion for delayed saves.
- This contains all the delayed writes, which complete the file by filling out
- the data of variables initially created empty, and also the checks for
- potential fill-value collisions.
- When computed, it returns a list of any warnings which were generated in the
- save operation.
+ This contains all the delayed writes, which complete the file by
+ filling out the data of variables initially created empty, and also the
+ checks for potential fill-value collisions. When computed, it returns
+ a list of any warnings which were generated in the save operation.
Returns
-------
- completion : :class:`dask.delayed.Delayed`
+ :class:`dask.delayed.Delayed`
Notes
-----
@@ -2691,7 +2692,7 @@ def save(
fill_value=None,
compute=True,
):
- """
+ r"""
Save cube(s) to a netCDF file, given the cube and the filename.
* Iris will write CF 1.7 compliant NetCDF files.
@@ -2712,13 +2713,12 @@ def save(
status of the cube's data payload, unless the netcdf_format is explicitly
specified to be 'NETCDF3' or 'NETCDF3_CLASSIC'.
- Args:
-
- * cube (:class:`iris.cube.Cube` or :class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other
iterable of cubes to be saved to a netCDF file.
-
- * filename (string):
+ filename : str
Name of the netCDF file to save the cube(s).
**Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object.
@@ -2726,58 +2726,50 @@ def save(
When saving to a dataset, ``compute`` **must** be ``False`` :
See the ``compute`` parameter.
- Kwargs:
-
- * netcdf_format (string):
+ netcdf_format : str
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
-
- * local_keys (iterable of strings):
+ local_keys : iterable of str, optional
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
- **NOTE:** this is *ignored* if 'split-attribute saving' is **enabled**,
- i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``.
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ .. note::
+ This is *ignored* if 'split-attribute saving' is **enabled**,
+ i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``.
+
+ unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` objects, optional
List of coordinate names (or coordinate objects) corresponding
to coordinate dimensions of `cube` to save with the NetCDF dimension
variable length 'UNLIMITED'. By default, no unlimited dimensions are
saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED'
dimensions.
-
- * zlib (bool):
+ zlib : bool, optional
If `True`, the data will be compressed in the netCDF file using gzip
compression (default `False`).
-
- * complevel (int):
+ complevel : int
An integer between 1 and 9 describing the level of compression desired
(default 4). Ignored if `zlib=False`.
-
- * shuffle (bool):
+ shuffle : bool, optional
If `True`, the HDF5 shuffle filter will be applied before compressing
the data (default `True`). This significantly improves compression.
Ignored if `zlib=False`.
-
- * fletcher32 (bool):
+ fletcher32 : bool, optional
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
-
- * contiguous (bool):
+ contiguous : bool, optional
If `True`, the variable data is stored contiguously on disk. Default
`False`. Setting to `True` for a variable with an unlimited dimension
will trigger an error.
-
- * chunksizes (tuple of int):
+ chunksizes : tuple of int, optional
Used to manually specify the HDF5 chunksizes for each dimension of the
variable. A detailed discussion of HDF chunking and I/O performance is
- available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
+ available
+ `here `__.
Basically, you want the chunk size for each dimension to match as
closely as possible the size of the data block that users will read
from the file. `chunksizes` cannot be set if `contiguous=True`.
-
- * endian (string):
+ endian : str
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
@@ -2785,8 +2777,7 @@ def save(
computer with the opposite format as the one used to create the file,
there may be some performance advantage to be gained by setting the
endian-ness.
-
- * least_significant_digit (int):
+ least_significant_digit : int, optional
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this produces
'lossy', but significantly more efficient compression. For example, if
@@ -2794,17 +2785,17 @@ def save(
`numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits`
is determined so that a precision of 0.1 is retained (in this case
`bits=4`). From
- http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml:
+
"least_significant_digit -- power of ten of the smallest decimal place
in unpacked data that is a reliable value". Default is `None`, or no
quantization, or 'lossless' compression.
-
- * packing (type or string or dict or list): A numpy integer datatype
- (signed or unsigned) or a string that describes a numpy integer dtype
- (i.e. 'i2', 'short', 'u4') or a dict of packing parameters as described
- below or an iterable of such types, strings, or dicts.
- This provides support for netCDF data packing as described in
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values
+ packing : type or str or dict or list, optional
+ A numpy integer datatype (signed or unsigned) or a string that
+ describes a numpy integer dtype (i.e. 'i2', 'short', 'u4') or a dict
+ of packing parameters as described below or an iterable of such types,
+ strings, or dicts. This provides support for netCDF data packing as
+ described in
+ `here `__
If this argument is a type (or type string), appropriate values of
scale_factor and add_offset will be automatically calculated based
on `cube.data` and possible masking. For more control, pass a dict with
@@ -2814,18 +2805,16 @@ def save(
avoid this. The default is `None`, in which case the datatype is
determined from the cube and no packing will occur. If this argument is
a list it must have the same number of elements as `cube` if `cube` is
- a `:class:`iris.cube.CubeList`, or one element, and each element of
+ a :class:`iris.cube.CubeList`, or one element, and each element of
this argument will be applied to each cube separately.
-
- * fill_value (numeric or list):
+ fill_value : numeric or list, optional
The value to use for the `_FillValue` attribute on the netCDF variable.
If `packing` is specified the value of `fill_value` should be in the
domain of the packed data. If this argument is a list it must have the
same number of elements as `cube` if `cube` is a
- `:class:`iris.cube.CubeList`, or a single element, and each element of
+ :class:`iris.cube.CubeList`, or a single element, and each element of
this argument will be applied to each cube separately.
-
- * compute (bool):
+ compute : bool, optional
Default is ``True``, meaning complete the file immediately, and return ``None``.
When ``False``, create the output file but don't write any lazy array content to
@@ -2837,7 +2826,7 @@ def save(
.. Note::
when computed, the returned :class:`dask.delayed.Delayed` object returns
- a list of :class:`Warning`\\s : These are any warnings which *would* have
+ a list of :class:`Warning` : These are any warnings which *would* have
been issued in the save call, if ``compute`` had been ``True``.
.. Note::
@@ -2848,21 +2837,18 @@ def save(
must (re-)open the dataset for writing, which will fail if the file is
still open for writing by the caller.
- Returns:
- result (None, or dask.delayed.Delayed):
- If `compute=True`, returns `None`.
- Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed
- writing to fill in the variables data.
-
- .. note::
-
- The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
- `chunksizes` and `endian` keywords are silently ignored for netCDF 3
- files that do not use HDF5.
-
- .. seealso::
-
- NetCDF Context manager (:class:`~Saver`).
+ Returns
+ -------
+ result: None or dask.delayed.Delayed
+ If `compute=True`, returns `None`.
+ Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed
+ writing to fill in the variables data.
+
+ Notes
+ -----
+ The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
+ `chunksizes` and `endian` keywords are silently ignored for netCDF 3
+ files that do not use HDF5.
"""
from iris.cube import Cube, CubeList
diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py
index c8e02a40cf..87725789e5 100644
--- a/lib/iris/io/__init__.py
+++ b/lib/iris/io/__init__.py
@@ -2,10 +2,8 @@
#
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
-"""
-Provides an interface to manage URI scheme support in iris.
-"""
+"""Provides an interface to manage URI scheme support in iris."""
import collections
from collections import OrderedDict
@@ -39,29 +37,27 @@ def __setitem__(self, key, value):
def run_callback(callback, cube, field, filename):
"""
- Runs the callback mechanism given the appropriate arguments.
+ Run the callback mechanism given the appropriate arguments.
- Args:
-
- * callback:
+ Parameters
+ ----------
+ callback :
A function to add metadata from the originating field and/or URI which
obeys the following rules:
- 1. Function signature must be: ``(cube, field, filename)``.
- 2. Modifies the given cube inplace, unless a new cube is
- returned by the function.
- 3. If the cube is to be rejected the callback must raise
- an :class:`iris.exceptions.IgnoreCubeException`.
-
- .. note::
+ 1. Function signature must be: ``(cube, field, filename)``.
+ 2. Modifies the given cube inplace, unless a new cube is
+ returned by the function.
+ 3. If the cube is to be rejected the callback must raise
+ an :class:`iris.exceptions.IgnoreCubeException`.
- It is possible that this function returns None for certain callbacks,
- the caller of this function should handle this case.
-
- .. note::
+ Notes
+ -----
+ It is possible that this function returns None for certain callbacks,
+ the caller of this function should handle this case.
- This function maintains laziness when called; it does not realise data.
- See more at :doc:`/userguide/real_and_lazy_data`.
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
from iris.cube import Cube
@@ -88,7 +84,7 @@ def run_callback(callback, cube, field, filename):
def decode_uri(uri, default="file"):
r"""
- Decodes a single URI into scheme and scheme-specific parts.
+ Decode a single URI into scheme and scheme-specific parts.
In addition to well-formed URIs, it also supports bare file paths as strings
or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are
@@ -100,25 +96,26 @@ def decode_uri(uri, default="file"):
from iris.io import *
- Examples:
- >>> from iris.io import decode_uri
- >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b'))
- ('http', '//www.thing.com:8080/resource?id=a:b')
+ Examples
+ --------
+ >>> from iris.io import decode_uri
+ >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b'))
+ ('http', '//www.thing.com:8080/resource?id=a:b')
- >>> print(decode_uri('file:///data/local/dataZoo/...'))
- ('file', '///data/local/dataZoo/...')
+ >>> print(decode_uri('file:///data/local/dataZoo/...'))
+ ('file', '///data/local/dataZoo/...')
- >>> print(decode_uri('/data/local/dataZoo/...'))
- ('file', '/data/local/dataZoo/...')
+ >>> print(decode_uri('/data/local/dataZoo/...'))
+ ('file', '/data/local/dataZoo/...')
- >>> print(decode_uri('file:///C:\data\local\dataZoo\...'))
- ('file', '///C:\\data\\local\\dataZoo\\...')
+ >>> print(decode_uri('file:///C:\data\local\dataZoo\...'))
+ ('file', '///C:\\data\\local\\dataZoo\\...')
- >>> print(decode_uri('C:\data\local\dataZoo\...'))
- ('file', 'C:\\data\\local\\dataZoo\\...')
+ >>> print(decode_uri('C:\data\local\dataZoo\...'))
+ ('file', 'C:\\data\\local\\dataZoo\\...')
- >>> print(decode_uri('dataZoo/...'))
- ('file', 'dataZoo/...')
+ >>> print(decode_uri('dataZoo/...'))
+ ('file', 'dataZoo/...')
>>> print(decode_uri({}))
('data', {})
@@ -156,7 +153,7 @@ def expand_filespecs(file_specs, files_expected=True):
----------
file_specs : iterable of str
File paths which may contain ``~`` elements or wildcards.
- files_expected : bool, default=True
+ files_expected : bool, optional, default=True
Whether file is expected to exist (i.e. for load).
Returns
@@ -205,14 +202,16 @@ def expand_filespecs(file_specs, files_expected=True):
def load_files(filenames, callback, constraints=None):
"""
- Takes a list of filenames which may also be globs, and optionally a
+ Create a generator of Cubes from given files.
+
+ Take a list of filenames which may also be globs, and optionally a
constraint set and a callback function, and returns a
generator of Cubes from the given files.
- .. note::
-
- Typically, this function should not be called directly; instead, the
- intended interface for loading is :func:`iris.load`.
+ Notes
+ -----
+ Typically, this function should not be called directly; instead, the
+ intended interface for loading is :func:`iris.load`.
"""
from iris.fileformats import FORMAT_AGENT
@@ -243,13 +242,15 @@ def load_files(filenames, callback, constraints=None):
def load_http(urls, callback):
"""
- Takes a list of OPeNDAP URLs and a callback function, and returns a generator
- of Cubes from the given URLs.
+ Create generator of Cubes from the given OPeNDAP URLs.
- .. note::
+ Take a list of OPeNDAP URLs and a callback function, and returns a generator
+ of Cubes from the given URLs.
- Typically, this function should not be called directly; instead, the
- intended interface for loading is :func:`iris.load`.
+ Notes
+ -----
+ Typically, this function should not be called directly; instead, the
+ intended interface for loading is :func:`iris.load`.
"""
#
@@ -276,8 +277,8 @@ def load_http(urls, callback):
def load_data_objects(urls, callback):
"""
- Takes a list of data-source objects and a callback function, and returns a
- generator of Cubes.
+ Take a list of data-source objects and a callback function, returns a generator of Cubes.
+
The 'objects' take the place of 'uris' in the load calls.
The appropriate types of the data-source objects are expected to be
recognised by the handlers : This is done in the usual way by passing the
@@ -345,12 +346,16 @@ def add_saver(file_extension, new_saver):
"""
Add a custom saver to the Iris session.
- Args:
-
- * file_extension: A string such as "pp" or "my_format".
- * new_saver: A function of the form ``my_saver(cube, target)``.
+ Parameters
+ ----------
+ file_extension : str
+ A string such as "pp" or "my_format".
+ new_saver : function
+ A function of the form ``my_saver(cube, target)``.
- See also :func:`iris.io.save`
+ See Also
+ --------
+ :func:`iris.io.save`
"""
# Make sure it's a func with 2+ args
@@ -368,14 +373,16 @@ def find_saver(filespec):
"""
Find the saver function appropriate to the given filename or extension.
- Args:
-
- * filespec
- A string such as "my_file.pp" or "PP".
+ Parameters
+ ----------
+ filespec : str
+ A string such as "my_file.pp" or "PP".
- Returns:
- A save function or None.
- Save functions can be passed to :func:`iris.io.save`.
+ Returns
+ -------
+ Save function
+ Save functions can be passed to :func:`iris.io.save`. Value may also
+ be None.
"""
_check_init_savers()
@@ -400,12 +407,12 @@ def save(source, target, saver=None, **kwargs):
Iris currently supports three file formats for saving, which it can
recognise by filename extension:
- * netCDF - the Unidata network Common Data Format:
- * see :func:`iris.fileformats.netcdf.save`
- * GRIB2 - the WMO GRIdded Binary data format:
- * see :func:`iris_grib.save_grib2`.
- * PP - the Met Office UM Post Processing Format:
- * see :func:`iris.fileformats.pp.save`
+ * netCDF - the Unidata network Common Data Format:
+ * see :func:`iris.fileformats.netcdf.save`
+ * GRIB2 - the WMO GRIdded Binary data format:
+ * see :func:`iris_grib.save_grib2`.
+ * PP - the Met Office UM Post Processing Format:
+ * see :func:`iris.fileformats.pp.save`
A custom saver can be provided to the function to write to a different
file format.
@@ -469,8 +476,7 @@ def save(source, target, saver=None, **kwargs):
>>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC")
Notes
- ------
-
+ -----
This function maintains laziness when called; it does not realise data.
See more at :doc:`/userguide/real_and_lazy_data`.
diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py
index d2d3b5fd41..da64345cf3 100644
--- a/lib/iris/io/format_picker.py
+++ b/lib/iris/io/format_picker.py
@@ -3,9 +3,10 @@
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
"""
-A module to provide convenient file format identification through a combination of filename extension
-and file based *magic* numbers.
+Provide convenient file format identification.
+A module to provide convenient file format identification through a combination
+of filename extension and file based *magic* numbers.
To manage a collection of FormatSpecifications for loading::
@@ -24,9 +25,11 @@
with open(png_filename, 'rb') as png_fh:
handling_spec = fagent.get_spec(png_filename, png_fh)
-In the example, handling_spec will now be the png_spec previously added to the agent.
+In the example, handling_spec will now be the png_spec previously added to the
+agent.
-Now that a specification has been found, if a handler has been given with the specification, then the file can be handled::
+Now that a specification has been found, if a handler has been given with the
+specification, then the file can be handled::
handler = handling_spec.handler
if handler is None:
@@ -34,8 +37,8 @@
else:
result = handler(filename)
-The calling sequence of handler is dependent on the function given in the original specification and can be customised to your project's needs.
-
+The calling sequence of handler is dependent on the function given in the
+original specification and can be customised to your project's needs.
"""
@@ -47,10 +50,14 @@
class FormatAgent:
"""
- The FormatAgent class is the containing object which is responsible for identifying the format of a given file
- by interrogating its children FormatSpecification instances.
+ Identifies format of a given file by interrogating its children instances.
+
+ The FormatAgent class is the containing object which is responsible for
+ identifying the format of a given file by interrogating its children
+ FormatSpecification instances.
- Typically a FormatAgent will be created empty and then extended with the :meth:`FormatAgent.add_spec` method::
+ Typically a FormatAgent will be created empty and then extended with the
+ :meth:`FormatAgent.add_spec` method::
agent = FormatAgent()
agent.add_spec(NetCDF_specification)
@@ -62,12 +69,11 @@ class FormatAgent:
"""
def __init__(self, format_specs=None):
- """ """
self._format_specs = list(format_specs or [])
self._format_specs.sort()
def add_spec(self, format_spec):
- """Add a FormatSpecification instance to this agent for format consideration."""
+ """Add a FormatSpecification instance to this agent for format."""
self._format_specs.append(format_spec)
self._format_specs.sort()
@@ -82,15 +88,22 @@ def __str__(self):
def get_spec(self, basename, buffer_obj):
"""
+ Pick the first FormatSpecification.
+
Pick the first FormatSpecification which can handle the given
filename and file/buffer object.
- .. note::
+ Parameters
+ ----------
+ basename : TBD
+ buffer_obj : TBD
- ``buffer_obj`` may be ``None`` when a seekable file handle is not
- feasible (such as over the http protocol). In these cases only the
- format specifications which do not require a file handle are
- tested.
+ Notes
+ -----
+ ``buffer_obj`` may be ``None`` when a seekable file handle is not
+ feasible (such as over the http protocol). In these cases only the
+ format specifications which do not require a file handle are
+ tested.
"""
element_cache = {}
@@ -145,8 +158,10 @@ class FormatSpecification:
"""
Provides the base class for file type definition.
- Every FormatSpecification instance has a name which can be accessed with the :attr:`FormatSpecification.name` property and
- a FileElement, such as filename extension or 32-bit magic number, with an associated value for format identification.
+ Every FormatSpecification instance has a name which can be accessed with
+ the :attr:`FormatSpecification.name` property and a FileElement, such as
+ filename extension or 32-bit magic number, with an associated value for
+ format identification.
"""
@@ -160,20 +175,26 @@ def __init__(
constraint_aware_handler=False,
):
"""
- Constructs a new FormatSpecification given the format_name and particular FileElements
-
- Args:
-
- * format_name - string name of fileformat being described
- * file_element - FileElement instance of the element which identifies this FormatSpecification
- * file_element_value - The value that the file_element should take if a file matches this FormatSpecification
-
- Kwargs:
-
- * handler - function which will be called when the specification has been identified and is required to handler a format.
- If None, then the file can still be identified but no handling can be done.
- * priority - Integer giving a priority for considering this specification where higher priority means sooner consideration.
-
+ Construct a new FormatSpecification.
+
+ Parameters
+ ----------
+ format_name : str
+ string name of fileformat being described
+ file_element :
+ FileElement instance of the element which identifies this
+ FormatSpecification
+ file_element_value :
+ The value that the file_element should take if a file matches this
+ FormatSpecification
+ handler : optional
+ function which will be called when the specification has been
+ identified and is required to handler a format. If None, then the
+ file can still be identified but no handling can be done.
+ priority: int
+ Integer giving a priority for considering this specification where
+ higher priority means sooner consideration.
+ constraint_aware_handler: optional, default=False
"""
if not isinstance(file_element, FileElement):
raise ValueError(
@@ -189,26 +210,29 @@ def __init__(
self.constraint_aware_handler = constraint_aware_handler
def __hash__(self):
- # Hashed by specification for consistent ordering in FormatAgent (including self._handler in this hash
- # for example would order randomly according to object id)
+ # Hashed by specification for consistent ordering in FormatAgent
+ # (including self._handler in this hash for example would order
+ # randomly according to object id)
return hash(self._file_element)
@property
def file_element(self):
+ # noqa D102
return self._file_element
@property
def file_element_value(self):
+ # noqa D102
return self._file_element_value
@property
def name(self):
- """The name of this FileFormat. (Read only)"""
+ """The name of this FileFormat. (Read only)."""
return self._format_name
@property
def handler(self):
- """The handler function of this FileFormat. (Read only)"""
+ """The handler function of this FileFormat. (Read only)."""
return self._handler
def _sort_key(self):
@@ -230,7 +254,8 @@ def __ne__(self, other):
return not (self == other)
def __repr__(self):
- # N.B. loader is not always going to provide a nice repr if it is a lambda function, hence a prettier version is available in __str__
+ # N.B. loader is not always going to provide a nice repr if it is a
+ # lambda function, hence a prettier version is available in __str__
return "FormatSpecification(%r, %r, %r, handler=%r, priority=%s)" % (
self._format_name,
self._file_element,
@@ -249,23 +274,27 @@ def __str__(self):
class FileElement:
"""
- Represents a specific aspect of a FileFormat which can be identified using the given element getter function.
+ Represents a specific aspect of a FileFormat.
+
+ Represents a specific aspect of a FileFormat which can be identified using
+ the given element getter function.
"""
def __init__(self, requires_fh=True):
"""
- Constructs a new file element, which may require a file buffer.
+ Construct a new file element, which may require a file buffer.
- Kwargs:
-
- * requires_fh - Whether this FileElement needs a file buffer.
+ Parameters
+ ----------
+ requires_fh : optional
+ Whether this FileElement needs a file buffer.
"""
self.requires_fh = requires_fh
def get_element(self, basename, file_handle):
- """Called when identifying the element of a file that this FileElement is representing."""
+ """Identify the element of a file that this FileElement is representing."""
raise NotImplementedError("get_element must be defined in a subclass")
def __hash__(self):
@@ -286,6 +315,7 @@ def __init__(self, num_bytes, offset=None):
self._offset = offset
def get_element(self, basename, file_handle):
+ # noqa D102
if self._offset is not None:
file_handle.seek(self._offset)
bytes = file_handle.read(self._num_bytes)
@@ -306,6 +336,7 @@ class FileExtension(FileElement):
"""A :class:`FileElement` that returns the extension from the filename."""
def get_element(self, basename, file_handle):
+ # noqa D102
return os.path.splitext(basename)[1]
@@ -313,11 +344,14 @@ class LeadingLine(FileElement):
"""A :class:`FileElement` that returns the first line from the file."""
def get_element(self, basename, file_handle):
+ # noqa: D102
return file_handle.readline()
class UriProtocol(FileElement):
"""
+ Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`.
+
A :class:`FileElement` that returns the "scheme" and "part" from a URI,
using :func:`~iris.io.decode_uri`.
@@ -327,6 +361,7 @@ def __init__(self):
FileElement.__init__(self, requires_fh=False)
def get_element(self, basename, file_handle):
+ # noqa: D102
from iris.io import decode_uri
return decode_uri(basename)[0]
@@ -345,7 +380,10 @@ def __init__(self):
super().__init__(requires_fh=False)
def get_element(self, basename, file_handle):
- # In this context, there should *not* be a file opened by the handler.
- # Just return 'basename', which in this case is not a name, or even a
- # string, but a passed 'data object'.
+ """
+ In this context, there should *not* be a file opened by the handler.
+
+ Just return 'basename', which in this case is not a name, or even a
+ string, but a passed 'data object'.
+ """
return basename
diff --git a/lib/iris/time.py b/lib/iris/time.py
index ddedeedd91..6ba85a0051 100644
--- a/lib/iris/time.py
+++ b/lib/iris/time.py
@@ -2,17 +2,16 @@
#
# This file is part of Iris and is released under the BSD license.
# See LICENSE in the root of the repository for full licensing details.
-"""
-Time handling.
-"""
+"""Time handling."""
import functools
@functools.total_ordering
class PartialDateTime:
- """
+ """Allow partial comparisons against datetime-like objects.
+
A :class:`PartialDateTime` object specifies values for some subset of
the calendar/time fields (year, month, hour, etc.) for comparing
with :class:`datetime.datetime`-like instances.
@@ -44,7 +43,7 @@ class PartialDateTime:
#: A dummy value provided as a workaround to allow comparisons with
#: :class:`datetime.datetime`.
#: See http://bugs.python.org/issue8005.
- # NB. It doesn't even matter what this value is.
+ #: NB. It doesn't even matter what this value is.
timetuple = None
def __init__(
@@ -57,20 +56,28 @@ def __init__(
second=None,
microsecond=None,
):
- """
- Allows partial comparisons against datetime-like objects.
-
- Args:
-
- * year (int):
- * month (int):
- * day (int):
- * hour (int):
- * minute (int):
- * second (int):
- * microsecond (int):
-
- For example, to select any days of the year after the 3rd of April:
+ """Allow partial comparisons against datetime-like objects.
+
+ Parameters
+ ----------
+ year : int
+ The year number as an integer, or None.
+ month : int
+ The month number as an integer, or None.
+ day : int
+ The day number as an integer, or None.
+ hour : int
+ The hour number as an integer, or None.
+ minute : int
+ The minute number as an integer, or None.
+ second : int
+ The second number as an integer, or None.
+ microsecond : int
+ The microsecond number as an integer, or None.
+
+ Examples
+ --------
+ To select any days of the year after the 3rd of April:
>>> from iris.time import PartialDateTime
>>> import datetime
@@ -85,20 +92,12 @@ def __init__(
False
"""
-
- #: The year number as an integer, or None.
self.year = year
- #: The month number as an integer, or None.
self.month = month
- #: The day number as an integer, or None.
self.day = day
- #: The hour number as an integer, or None.
self.hour = hour
- #: The minute number as an integer, or None.
self.minute = minute
- #: The second number as an integer, or None.
self.second = second
- #: The microsecond number as an integer, or None.
self.microsecond = microsecond
def __repr__(self):