@@ -22,7 +22,7 @@ def cross_entropy(output, target, name="cross_entropy_loss"):
22
22
23
23
Examples
24
24
--------
25
- >>> ce = tf .cost.cross_entropy(y_logits, y_target_logits)
25
+ >>> ce = tl .cost.cross_entropy(y_logits, y_target_logits)
26
26
27
27
References
28
28
-----------
@@ -41,7 +41,7 @@ def cross_entropy(output, target, name="cross_entropy_loss"):
41
41
def binary_cross_entropy (output , target , epsilon = 1e-8 , name = 'bce_loss' ):
42
42
"""Computes binary cross entropy given `output`.
43
43
44
- For brevity, let `x = `, `z = targets `. The logistic loss is
44
+ For brevity, let `x = output `, `z = target `. The binary cross entropy loss is
45
45
46
46
loss(x, z) = - sum_i (x[i] * log(z[i]) + (1 - x[i]) * log(1 - z[i]))
47
47
@@ -78,8 +78,9 @@ def mean_squared_error(output, target):
78
78
A distribution with shape: [batch_size, n_feature].
79
79
"""
80
80
with tf .name_scope ("mean_squared_error_loss" ):
81
- mse = tf .reduce_sum (tf .squared_difference (output , target ), reduction_indices = 1 )
82
- return tf .reduce_mean (mse )
81
+ mse = tf .reduce_mean (tf .reduce_sum (tf .squared_difference (output , target ),
82
+ reduction_indices = 1 ))
83
+ return mse
83
84
84
85
85
86
@@ -223,7 +224,7 @@ def li_regularizer(scale):
223
224
224
225
Returns
225
226
--------
226
- A function with signature `li(weights, name=None)` that apply L1 regularization.
227
+ A function with signature `li(weights, name=None)` that apply Li regularization.
227
228
228
229
Raises
229
230
------
0 commit comments