@@ -12,15 +12,17 @@ module nf_activation_1d
12
12
public :: gaussian, gaussian_prime
13
13
public :: linear, linear_prime
14
14
public :: relu, relu_prime
15
+ public :: leaky_relu, leaky_relu_prime
15
16
public :: sigmoid, sigmoid_prime
16
17
public :: softmax, softmax_prime
17
18
public :: softplus, softplus_prime
18
19
public :: step, step_prime
19
20
public :: tanhf, tanh_prime
20
21
21
22
interface
22
- pure function activation_function (x )
23
+ pure function activation_function (x , alpha )
23
24
real , intent (in ) :: x(:)
25
+ real , intent (in ), optional :: alpha
24
26
real :: activation_function(size (x))
25
27
end function activation_function
26
28
end interface
@@ -30,7 +32,7 @@ end function activation_function
30
32
pure function elu (x , alpha ) result(res)
31
33
! Exponential Linear Unit (ELU) activation function.
32
34
real , intent (in ) :: x(:)
33
- real , intent (in ) :: alpha
35
+ real , intent (in ), optional :: alpha
34
36
real :: res(size (x))
35
37
where (x >= 0 )
36
38
res = x
@@ -43,7 +45,7 @@ pure function elu_prime(x, alpha) result(res)
43
45
! First derivative of the Exponential Linear Unit (ELU)
44
46
! activation function.
45
47
real , intent (in ) :: x(:)
46
- real , intent (in ) :: alpha
48
+ real , intent (in ), optional :: alpha
47
49
real :: res(size (x))
48
50
where (x >= 0 )
49
51
res = 1
@@ -52,51 +54,58 @@ pure function elu_prime(x, alpha) result(res)
52
54
end where
53
55
end function elu_prime
54
56
55
- pure function exponential (x ) result(res)
57
+ pure function exponential (x , alpha ) result(res)
56
58
! Exponential activation function.
57
59
real , intent (in ) :: x(:)
60
+ real , intent (in ), optional :: alpha
58
61
real :: res(size (x))
59
62
res = exp (x)
60
63
end function exponential
61
64
62
- pure function gaussian (x ) result(res)
65
+ pure function gaussian (x , alpha ) result(res)
63
66
! Gaussian activation function.
64
67
real , intent (in ) :: x(:)
68
+ real , intent (in ), optional :: alpha
65
69
real :: res(size (x))
66
70
res = exp (- x** 2 )
67
71
end function gaussian
68
72
69
- pure function gaussian_prime (x ) result(res)
73
+ pure function gaussian_prime (x , alpha ) result(res)
70
74
! First derivative of the Gaussian activation function.
71
75
real , intent (in ) :: x(:)
76
+ real , intent (in ), optional :: alpha
72
77
real :: res(size (x))
73
78
res = - 2 * x * gaussian(x)
74
79
end function gaussian_prime
75
80
76
- pure function linear (x ) result(res)
81
+ pure function linear (x , alpha ) result(res)
77
82
! Linear activation function.
78
83
real , intent (in ) :: x(:)
84
+ real , intent (in ), optional :: alpha
79
85
real :: res(size (x))
80
86
res = x
81
87
end function linear
82
88
83
- pure function linear_prime (x ) result(res)
89
+ pure function linear_prime (x , alpha ) result(res)
84
90
! First derivative of the Gaussian activation function.
85
91
real , intent (in ) :: x(:)
92
+ real , intent (in ), optional :: alpha
86
93
real :: res(size (x))
87
94
res = 1
88
95
end function linear_prime
89
96
90
- pure function relu (x ) result(res)
97
+ pure function relu (x , alpha ) result(res)
91
98
! ! Rectified Linear Unit (ReLU) activation function.
92
99
real , intent (in ) :: x(:)
100
+ real , intent (in ), optional :: alpha
93
101
real :: res(size (x))
94
102
res = max (0 ., x)
95
103
end function relu
96
104
97
- pure function relu_prime (x ) result(res)
105
+ pure function relu_prime (x , alpha ) result(res)
98
106
! First derivative of the Rectified Linear Unit (ReLU) activation function.
99
107
real , intent (in ) :: x(:)
108
+ real , intent (in ), optional :: alpha
100
109
real :: res(size (x))
101
110
where (x > 0 )
102
111
res = 1
@@ -105,52 +114,79 @@ pure function relu_prime(x) result(res)
105
114
end where
106
115
end function relu_prime
107
116
108
- pure function sigmoid (x ) result(res)
117
+ pure function leaky_relu (x , alpha ) result(res)
118
+ ! ! Leaky Rectified Linear Unit (Leaky ReLU) activation function.
119
+ real , intent (in ) :: x(:)
120
+ real , intent (in ), optional :: alpha
121
+ real :: res(size (x))
122
+ res = max (alpha* x, x)
123
+ end function leaky_relu
124
+
125
+ pure function leaky_relu_prime (x , alpha ) result(res)
126
+ ! First derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
127
+ real , intent (in ) :: x(:)
128
+ real , intent (in ), optional :: alpha
129
+ real :: res(size (x))
130
+ where (x > 0 )
131
+ res = 1
132
+ elsewhere
133
+ res = alpha
134
+ end where
135
+ end function leaky_relu_prime
136
+
137
+ pure function sigmoid (x , alpha ) result(res)
109
138
! Sigmoid activation function.
110
139
real , intent (in ) :: x(:)
140
+ real , intent (in ), optional :: alpha
111
141
real :: res(size (x))
112
142
res = 1 / (1 + exp (- x))
113
143
end function sigmoid
114
144
115
- pure function sigmoid_prime (x ) result(res)
145
+ pure function sigmoid_prime (x , alpha ) result(res)
116
146
! First derivative of the sigmoid activation function.
117
147
real , intent (in ) :: x(:)
148
+ real , intent (in ), optional :: alpha
118
149
real :: res(size (x))
119
150
res = sigmoid(x) * (1 - sigmoid(x))
120
151
end function sigmoid_prime
121
152
122
- pure function softmax (x ) result(res)
153
+ pure function softmax (x , alpha ) result(res)
123
154
! ! Softmax activation function
124
155
real , intent (in ) :: x(:)
156
+ real , intent (in ), optional :: alpha
125
157
real :: res(size (x))
126
158
res = exp (x - maxval (x))
127
159
res = res / sum (res)
128
160
end function softmax
129
161
130
- pure function softmax_prime (x ) result(res)
162
+ pure function softmax_prime (x , alpha ) result(res)
131
163
! ! Derivative of the softmax activation function.
132
164
real , intent (in ) :: x(:)
165
+ real , intent (in ), optional :: alpha
133
166
real :: res(size (x))
134
167
res = softmax(x) * (1 - softmax(x))
135
168
end function softmax_prime
136
169
137
- pure function softplus (x ) result(res)
170
+ pure function softplus (x , alpha ) result(res)
138
171
! Softplus activation function.
139
172
real , intent (in ) :: x(:)
173
+ real , intent (in ), optional :: alpha
140
174
real :: res(size (x))
141
175
res = log (exp (x) + 1 )
142
176
end function softplus
143
177
144
- pure function softplus_prime (x ) result(res)
178
+ pure function softplus_prime (x , alpha ) result(res)
145
179
! First derivative of the softplus activation function.
146
180
real , intent (in ) :: x(:)
181
+ real , intent (in ), optional :: alpha
147
182
real :: res(size (x))
148
183
res = exp (x) / (exp (x) + 1 )
149
184
end function softplus_prime
150
185
151
- pure function step (x ) result(res)
186
+ pure function step (x , alpha ) result(res)
152
187
! Step activation function.
153
188
real , intent (in ) :: x(:)
189
+ real , intent (in ), optional :: alpha
154
190
real :: res(size (x))
155
191
where (x > 0 )
156
192
res = 1
@@ -159,26 +195,29 @@ pure function step(x) result(res)
159
195
end where
160
196
end function step
161
197
162
- pure function step_prime (x ) result(res)
198
+ pure function step_prime (x , alpha ) result(res)
163
199
! First derivative of the step activation function.
164
200
real , intent (in ) :: x(:)
201
+ real , intent (in ), optional :: alpha
165
202
real :: res(size (x))
166
203
res = 0
167
204
end function step_prime
168
205
169
- pure function tanhf (x ) result(res)
206
+ pure function tanhf (x , alpha ) result(res)
170
207
! Tangent hyperbolic activation function.
171
208
! Same as the intrinsic tanh, but must be
172
209
! defined here so that we can use procedure
173
210
! pointer with it.
174
211
real , intent (in ) :: x(:)
212
+ real , intent (in ), optional :: alpha
175
213
real :: res(size (x))
176
214
res = tanh (x)
177
215
end function tanhf
178
216
179
- pure function tanh_prime (x ) result(res)
217
+ pure function tanh_prime (x , alpha ) result(res)
180
218
! First derivative of the tanh activation function.
181
219
real , intent (in ) :: x(:)
220
+ real , intent (in ), optional :: alpha
182
221
real :: res(size (x))
183
222
res = 1 - tanh (x)** 2
184
223
end function tanh_prime
0 commit comments