12
12
# See the License for the specific language governing permissions and
13
13
# limitations under the License.
14
14
# ==============================================================================
15
- """Implements GeLU activation."""
15
+ """Implements GELU activation."""
16
16
17
17
from __future__ import absolute_import
18
18
from __future__ import division
23
23
24
24
25
25
@tf .keras .utils .register_keras_serializable (package = 'Addons' )
26
- class GeLU (tf .keras .layers .Layer ):
26
+ class GELU (tf .keras .layers .Layer ):
27
27
"""Gaussian Error Linear Unit.
28
28
29
29
A smoother version of ReLU generally used
@@ -40,7 +40,7 @@ class GeLU(tf.keras.layers.Layer):
40
40
"""
41
41
42
42
def __init__ (self , approximate = True , ** kwargs ):
43
- super (GeLU , self ).__init__ (** kwargs )
43
+ super (GELU , self ).__init__ (** kwargs )
44
44
self .approximate = approximate
45
45
self .supports_masking = True
46
46
@@ -49,7 +49,7 @@ def call(self, inputs):
49
49
50
50
def get_config (self ):
51
51
config = {'approximate' : self .approximate }
52
- base_config = super (GeLU , self ).get_config ()
52
+ base_config = super (GELU , self ).get_config ()
53
53
return dict (list (base_config .items ()) + list (config .items ()))
54
54
55
55
def compute_output_shape (self , input_shape ):
0 commit comments