Skip to content

Commit

Permalink
Removed six
Browse files Browse the repository at this point in the history
Sonnet 2 does not support 2.X, so it makes little sense to keep six around.

PiperOrigin-RevId: 380235488
  • Loading branch information
superbobry authored and copybara-github committed Jun 18, 2021
1 parent cf15370 commit c87468e
Show file tree
Hide file tree
Showing 14 changed files with 18 additions and 55 deletions.
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
absl-py>=0.7.1
numpy>=1.16.3
six>=1.12.0
dm-tree>=0.1.1
wrapt>=1.11.1
tabulate>=0.7.5
7 changes: 0 additions & 7 deletions sonnet/src/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ snt_py_library(
":once",
":types",
":utils",
# pip: six
# pip: tensorflow
],
)
Expand All @@ -24,7 +23,6 @@ snt_py_test(
":test_utils",
# pip: absl/testing:parameterized
# pip: numpy
# pip: six
# pip: tensorflow
# pip: wrapt
],
Expand Down Expand Up @@ -239,7 +237,6 @@ snt_py_library(
deps = [
":types",
# pip: numpy
# pip: six
# pip: tensorflow
],
)
Expand Down Expand Up @@ -333,7 +330,6 @@ snt_py_library(
deps = [
":base",
":once",
# pip: six
# pip: tensorflow
],
)
Expand Down Expand Up @@ -375,7 +371,6 @@ snt_py_library(
":once",
":types",
":utils",
# pip: six
# pip: tensorflow
# pip: tree
],
Expand All @@ -400,7 +395,6 @@ snt_py_library(
srcs = ["regularizers.py"],
deps = [
":types",
# pip: six
# pip: tensorflow
],
)
Expand Down Expand Up @@ -513,7 +507,6 @@ snt_py_library(
deps = [
":initializers",
# pip: absl/logging
# pip: six
# pip: tabulate
# pip: tensorflow
],
Expand Down
7 changes: 3 additions & 4 deletions sonnet/src/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import sys
from typing import Any, Callable, Dict, Optional, Sequence, Tuple, Type, TypeVar

import six
from sonnet.src import once
from sonnet.src import types
from sonnet.src import utils
Expand Down Expand Up @@ -181,7 +180,7 @@ def auto_repr(cls: Type[Any], *args, **kwargs) -> str:
Returns:
A string representing a call equivalent to `cls(*args, **kwargs)`.
"""
argspec = utils.getfullargspec(cls.__init__)
argspec = inspect.getfullargspec(cls.__init__)
arg_names = argspec.args
# Keep used positionals minus self.
arg_names = arg_names[1:(len(args) + 1)]
Expand Down Expand Up @@ -255,7 +254,7 @@ def wrap_with_name_scope(
"The super constructor must be called before any other methods in "
"your constructor. If this is not possible then annotate all the "
"methods called with `@snt.no_name_scope`.")
six.raise_from(exc_value, exc_value_from)
raise exc_value from exc_value_from

with module_name_scope:
# snt.Module enters the module name scope for all methods. To disable this
Expand Down Expand Up @@ -366,7 +365,7 @@ def assert_tf2():
assert_tf2.checked = False


class Module(six.with_metaclass(ModuleMetaclass, tf.Module)):
class Module(tf.Module, metaclass=ModuleMetaclass):
"""Base class for Sonnet modules.
A Sonnet module is a lightweight container for variables and other modules.
Expand Down
7 changes: 2 additions & 5 deletions sonnet/src/base_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

from absl.testing import parameterized
import numpy as np
import six
from sonnet.src import base
from sonnet.src import test_utils
import tensorflow as tf
Expand Down Expand Up @@ -409,8 +408,7 @@ def __init__(self, depth, trainable=True):
self.w = tf.Variable(1.0, trainable=trainable, name="mushroom")


@six.add_metaclass(abc.ABCMeta)
class AbstractModule(base.Module):
class AbstractModule(base.Module, metaclass=abc.ABCMeta):

@abc.abstractmethod
def __call__(self, x):
Expand Down Expand Up @@ -632,8 +630,7 @@ def __new__(cls, name, bases, clsdict):
return new_type


@six.add_metaclass(CombiningMetaclass)
class ModuleWithCustomMetaclass(base.Module):
class ModuleWithCustomMetaclass(base.Module, metaclass=CombiningMetaclass):

def __init__(self):
super(ModuleWithCustomMetaclass, self).__init__()
Expand Down
2 changes: 0 additions & 2 deletions sonnet/src/conformance/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ snt_py_library(
deps = [
# pip: absl/testing:parameterized
# pip: numpy
# pip: six
"//sonnet",
# pip: tensorflow
],
Expand All @@ -23,7 +22,6 @@ snt_py_test(
name = "api_test",
srcs = ["api_test.py"],
deps = [
# pip: six
"//sonnet",
"//sonnet/src:test_utils",
# pip: tensorflow
Expand Down
5 changes: 3 additions & 2 deletions sonnet/src/conformance/api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
# ============================================================================
"""Tests for Sonnet's public API."""

from six.moves import reload_module as reload
import importlib

import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
Expand All @@ -28,7 +29,7 @@ def test_src_not_exported(self):
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))


Expand Down
5 changes: 1 addition & 4 deletions sonnet/src/conformance/goldens.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

from absl.testing import parameterized
import numpy as np
import six
import sonnet as snt
import tensorflow as tf

Expand Down Expand Up @@ -75,8 +74,7 @@ def range_like(t, start=0):
t.shape)


@six.add_metaclass(abc.ABCMeta)
class Golden:
class Golden(abc.ABC):
"""Represents a golden checkpoint file."""

@abc.abstractmethod
Expand All @@ -95,7 +93,6 @@ def forward(self, module, x=None):
pass


@six.add_metaclass(abc.ABCMeta)
class AbstractGolden(Golden):
"""Abstract base class for golden tests of single input modules."""

Expand Down
4 changes: 1 addition & 3 deletions sonnet/src/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,11 @@
import collections
from typing import Iterable, Mapping, Optional, Union
import numpy as np
import six
from sonnet.src import types
import tensorflow as tf


@six.add_metaclass(abc.ABCMeta)
class Initializer:
class Initializer(abc.ABC):
"""Initializer base class, all initializers must implement a call method."""

@abc.abstractmethod
Expand Down
4 changes: 1 addition & 3 deletions sonnet/src/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,12 @@
import abc
from typing import Optional

import six
from sonnet.src import base
from sonnet.src import once
import tensorflow as tf


@six.add_metaclass(abc.ABCMeta)
class Metric(base.Module):
class Metric(base.Module, metaclass=abc.ABCMeta):
"""Metric base class."""

@abc.abstractmethod
Expand Down
1 change: 0 additions & 1 deletion sonnet/src/optimizers/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ snt_py_library(
srcs = ["rmsprop.py"],
deps = [
":optimizer_utils",
# pip: six
"//sonnet/src:base",
"//sonnet/src:once",
"//sonnet/src:types",
Expand Down
4 changes: 2 additions & 2 deletions sonnet/src/optimizers/rmsprop.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@
# ============================================================================
"""RMSProp module."""

import itertools
from typing import Optional, Sequence, Union

import six
from sonnet.src import base
from sonnet.src import once
from sonnet.src import types
Expand Down Expand Up @@ -127,7 +127,7 @@ def apply(self, updates: Sequence[types.ParameterUpdate],
optimizer_utils.check_distribution_strategy()
optimizer_utils.check_updates_parameters(updates, parameters)
self._initialize(parameters)
for update, parameter, mom_var, ms_var, mg_var in six.moves.zip_longest(
for update, parameter, mom_var, ms_var, mg_var in itertools.zip_longest(
updates, parameters, self.mom, self.ms, self.mg):
if update is None:
continue
Expand Down
9 changes: 3 additions & 6 deletions sonnet/src/recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from typing import Optional, Sequence, Tuple, Union
import uuid

import six
from sonnet.src import base
from sonnet.src import conv
from sonnet.src import initializers
Expand All @@ -39,8 +38,7 @@
# pylint: enable=g-direct-tensorflow-import


@six.add_metaclass(abc.ABCMeta)
class RNNCore(base.Module):
class RNNCore(base.Module, metaclass=abc.ABCMeta):
"""Base class for Recurrent Neural Network cores.
This class defines the basic functionality that every core should
Expand Down Expand Up @@ -84,8 +82,7 @@ def initial_state(self, batch_size: types.IntegerLike, **kwargs):
"""


@six.add_metaclass(abc.ABCMeta)
class UnrolledRNN(base.Module):
class UnrolledRNN(base.Module, metaclass=abc.ABCMeta):
"""Base class for unrolled Recurrent Neural Networks.
This class is a generalization of :class:`RNNCore` which operates on
Expand Down Expand Up @@ -261,7 +258,7 @@ def static_unroll(
outputs = None
state = initial_state
output_accs = None
for t in six.moves.range(num_steps):
for t in range(num_steps):
outputs, state = _rnn_step(
core,
input_tas,
Expand Down
4 changes: 1 addition & 3 deletions sonnet/src/regularizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@
import abc
from typing import Sequence

import six
from sonnet.src import types
import tensorflow as tf


@six.add_metaclass(abc.ABCMeta)
class Regularizer:
class Regularizer(abc.ABC):
"""Base regularizer class."""

@abc.abstractmethod
Expand Down
13 changes: 1 addition & 12 deletions sonnet/src/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from typing import Any, Callable, Dict, Generic, Optional, Sequence, Tuple, TypeVar, Union

from absl import logging
import six
from sonnet.src import initializers
import tabulate
import tensorflow as tf
Expand Down Expand Up @@ -74,7 +73,7 @@ def _decorate_bound_method(*args, **kwargs):

return _decorate_bound_method

argspec = getfullargspec(f)
argspec = inspect.getfullargspec(f)
if argspec.args and argspec.args[0] == "self":

@functools.wraps(f)
Expand Down Expand Up @@ -183,16 +182,6 @@ def smart_autograph_wrapper(*args, **kwargs):
return smart_autograph_wrapper


def getfullargspec(func):
"""Gets the names and default values of a function's parameters."""
if six.PY2:
# Assume that we are running with PyType patched Python 2.7 and getargspec
# will not barf if `func` has type annotations.
return inspect.getargspec(func) # pylint: disable=deprecated-method
else:
return inspect.getfullargspec(func)


def variable_like(inputs: Union[tf.Tensor, tf.Variable],
initializer: initializers.Initializer = initializers.Zeros(),
trainable: Optional[bool] = None,
Expand Down

0 comments on commit c87468e

Please sign in to comment.