|
5 | 5 | from torch.utils._python_dispatch import TorchDispatchMode
|
6 | 6 | from collections import defaultdict
|
7 | 7 | import weakref
|
| 8 | +import abc |
8 | 9 |
|
9 | 10 | __all__ = [
|
10 | 11 | "saved_tensors_hooks",
|
11 | 12 | "save_on_cpu",
|
12 | 13 | "disable_saved_tensors_hooks",
|
13 | 14 | "register_multi_grad_hook",
|
14 | 15 | "allow_mutation_on_saved_tensors",
|
| 16 | + "Node", |
15 | 17 | ]
|
16 | 18 |
|
| 19 | +class Node(abc.ABC): |
| 20 | + @abc.abstractmethod |
| 21 | + def name(self) -> str: |
| 22 | + r"""Returns the name. |
| 23 | +
|
| 24 | + Example:: |
| 25 | +
|
| 26 | + >>> import torch |
| 27 | + >>> a = torch.tensor([0., 0., 0.], requires_grad=True) |
| 28 | + >>> b = a.clone() |
| 29 | + >>> assert isinstance(b.grad_fn, torch.autograd.graph.Node) |
| 30 | + >>> print(b.grad_fn.name()) |
| 31 | + CloneBackward0 |
| 32 | + """ |
| 33 | + ... |
| 34 | + |
| 35 | + @property |
| 36 | + @abc.abstractmethod |
| 37 | + def next_functions(self) -> Tuple[Tuple[Optional['Node'], int], ...]: |
| 38 | + ... |
| 39 | + |
| 40 | + @abc.abstractmethod |
| 41 | + def metadata(self) -> dict: |
| 42 | + r"""Returns the metadata.""" |
| 43 | + ... |
| 44 | + |
| 45 | + @abc.abstractmethod |
| 46 | + def _register_hook_dict(self, tensor: torch.Tensor) -> None: |
| 47 | + ... |
| 48 | + |
| 49 | + @abc.abstractmethod |
| 50 | + def register_hook(self, fn: Callable[..., Any]) -> RemovableHandle: |
| 51 | + r"""Registers a backward hook. |
| 52 | +
|
| 53 | + The hook will be called every time a gradient with respect to the |
| 54 | + Node is computed. The hook should have the following signature:: |
| 55 | +
|
| 56 | + hook(grad_inputs: Tuple[Tensor], grad_outputs: Tuple[Tensor]) -> Tuple[Tensor] or None |
| 57 | +
|
| 58 | +
|
| 59 | + The hook should not modify its argument, but it can optionally return |
| 60 | + a new gradient which will be used in place of :attr:`grad_outputs`. |
| 61 | +
|
| 62 | + This function returns a handle with a method ``handle.remove()`` |
| 63 | + that removes the hook from the module. |
| 64 | +
|
| 65 | + Example:: |
| 66 | +
|
| 67 | + >>> import torch |
| 68 | + >>> a = torch.tensor([0., 0., 0.], requires_grad=True) |
| 69 | + >>> b = a.clone() |
| 70 | + >>> assert isinstance(b.grad_fn, torch.autograd.graph.Node) |
| 71 | + >>> handle = b.grad_fn.register_hook(lambda gI, gO: (gO[0] * 2,)) |
| 72 | + >>> b.sum().backward(retain_graph=True) |
| 73 | + >>> print(a.grad) |
| 74 | + tensor([2., 2., 2.]) |
| 75 | + >>> handle.remove() # Removes the hook |
| 76 | + >>> a.grad = None |
| 77 | + >>> b.sum().backward(retain_graph=True) |
| 78 | + >>> print(a.grad) |
| 79 | + tensor([1., 1., 1.]) |
| 80 | + """ |
| 81 | + ... |
| 82 | + |
| 83 | + @abc.abstractmethod |
| 84 | + def register_prehook(self, fn: Callable[..., Any]) -> RemovableHandle: |
| 85 | + r"""Registers a backward pre-hook. |
| 86 | +
|
| 87 | + The hook will be called every time a gradient with respect to the |
| 88 | + Node is computed. The hook should have the following signature:: |
| 89 | +
|
| 90 | + hook(grad_outputs: Tuple[Tensor]) -> Tuple[Tensor] or None |
| 91 | +
|
| 92 | +
|
| 93 | + The hook should not modify its argument, but it can optionally return |
| 94 | + a new gradient which will be used in place of :attr:`grad_outputs`. |
| 95 | +
|
| 96 | + This function returns a handle with a method ``handle.remove()`` |
| 97 | + that removes the hook from the module. |
| 98 | +
|
| 99 | + Example:: |
| 100 | +
|
| 101 | + >>> a = torch.tensor([0., 0., 0.], requires_grad=True) |
| 102 | + >>> b = a.clone() |
| 103 | + >>> assert isinstance(b.grad_fn, torch.autograd.graph.Node) |
| 104 | + >>> handle = b.grad_fn.register_prehook(lambda gI: (gI[0] * 2,)) |
| 105 | + >>> b.sum().backward(retain_graph=True) |
| 106 | + >>> print(a.grad) |
| 107 | + tensor([2., 2., 2.]) |
| 108 | + >>> handle.remove() |
| 109 | + >>> a.grad = None |
| 110 | + >>> b.sum().backward(retain_graph=True) |
| 111 | + >>> print(a.grad) |
| 112 | + tensor([1., 1., 1.]) |
| 113 | + """ |
| 114 | + ... |
| 115 | + |
| 116 | + @classmethod |
| 117 | + def __subclasshook__(cls, C): |
| 118 | + if cls is Node: |
| 119 | + if ((C is not None and C is getattr(torch._C._functions, C.__name__, None)) |
| 120 | + or issubclass(C, torch.autograd.function.BackwardCFunction)): |
| 121 | + return True |
| 122 | + return NotImplemented |
| 123 | + |
17 | 124 | class saved_tensors_hooks():
|
18 | 125 | """Context-manager that sets a pair of pack / unpack hooks for saved tensors.
|
19 | 126 |
|
|
0 commit comments