Skip to content

Commit 06d686e

Browse files
WindQAQseanpmorgan
authored andcommitted
FIX: GELU name (#843)
* FIX: GELU name
1 parent 8d768db commit 06d686e

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

tensorflow_addons/layers/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@
1818
from __future__ import division
1919
from __future__ import print_function
2020

21-
from tensorflow_addons.layers.gelu import GeLU
21+
from tensorflow_addons.layers.gelu import GELU
2222
from tensorflow_addons.layers.maxout import Maxout
2323
from tensorflow_addons.layers.normalizations import GroupNormalization
2424
from tensorflow_addons.layers.normalizations import InstanceNormalization
2525
from tensorflow_addons.layers.optical_flow import CorrelationCost
2626
from tensorflow_addons.layers.poincare import PoincareNormalize
2727
from tensorflow_addons.layers.sparsemax import Sparsemax
28-
from tensorflow_addons.layers.wrappers import WeightNormalization
28+
from tensorflow_addons.layers.wrappers import WeightNormalization

tensorflow_addons/layers/gelu.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
# ==============================================================================
15-
"""Implements GeLU activation."""
15+
"""Implements GELU activation."""
1616

1717
from __future__ import absolute_import
1818
from __future__ import division
@@ -23,7 +23,7 @@
2323

2424

2525
@tf.keras.utils.register_keras_serializable(package='Addons')
26-
class GeLU(tf.keras.layers.Layer):
26+
class GELU(tf.keras.layers.Layer):
2727
"""Gaussian Error Linear Unit.
2828
2929
A smoother version of ReLU generally used
@@ -40,7 +40,7 @@ class GeLU(tf.keras.layers.Layer):
4040
"""
4141

4242
def __init__(self, approximate=True, **kwargs):
43-
super(GeLU, self).__init__(**kwargs)
43+
super(GELU, self).__init__(**kwargs)
4444
self.approximate = approximate
4545
self.supports_masking = True
4646

@@ -49,7 +49,7 @@ def call(self, inputs):
4949

5050
def get_config(self):
5151
config = {'approximate': self.approximate}
52-
base_config = super(GeLU, self).get_config()
52+
base_config = super(GELU, self).get_config()
5353
return dict(list(base_config.items()) + list(config.items()))
5454

5555
def compute_output_shape(self, input_shape):

tensorflow_addons/layers/gelu_test.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
# ==============================================================================
15-
"""Tests for GeLU activation."""
15+
"""Tests for GELU activation."""
1616

1717
from __future__ import absolute_import
1818
from __future__ import division
@@ -21,18 +21,18 @@
2121
import numpy as np
2222
import tensorflow as tf
2323
from absl.testing import parameterized
24-
from tensorflow_addons.layers.gelu import GeLU
24+
from tensorflow_addons.layers.gelu import GELU
2525
from tensorflow_addons.utils import test_utils
2626

2727

2828
@parameterized.parameters([np.float16, np.float32, np.float64])
2929
@test_utils.run_all_in_graph_and_eager_modes
30-
class TestGeLU(tf.test.TestCase):
30+
class TestGELU(tf.test.TestCase):
3131
def test_random(self, dtype):
3232
x = np.array([[0.5, 1.2, -0.3]]).astype(dtype)
3333
val = np.array([[0.345714, 1.0617027, -0.11462909]]).astype(dtype)
3434
test_utils.layer_test(
35-
GeLU, kwargs={'dtype': dtype}, input_data=x, expected_output=val)
35+
GELU, kwargs={'dtype': dtype}, input_data=x, expected_output=val)
3636

3737

3838
if __name__ == '__main__':

0 commit comments

Comments
 (0)