Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions pydeepflow/device.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,22 @@ def tanh(self, x):
"""
return cp.tanh(x) if self.use_gpu else np.tanh(x)

def cosh(self, x):
"""
Computes the hyperbolic cosine of the input array.

Parameters:
-----------
x : np.ndarray or cp.ndarray
The input array.

Returns:
--------
np.ndarray or cp.ndarray
The hyperbolic cosine of the input.
"""
return cp.cosh(x) if self.use_gpu else np.cosh(x)

def sum(self, x, axis=None, keepdims=False):
"""
Sums the elements of an array along a specified axis.
Expand Down
46 changes: 46 additions & 0 deletions pydeepflow/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,48 @@ def huber_loss_derivative(y_true, y_pred, device, delta=1.0):
is_small_error = device.abs(error) <= delta
return device.where(is_small_error, error, delta * device.sign(error))

def log_cosh_loss(y_true, y_pred, device):
"""
Computes the Log-Cosh loss for regression tasks.

Parameters:
-----------
y_true : np.ndarray or cp.ndarray
Ground truth values.
y_pred : np.ndarray or cp.ndarray
Predicted values.
device : Device
The device instance (CPU or GPU) to perform calculations.

Returns:
--------
float
The Log-Cosh loss.
"""
error = y_pred - y_true
return device.mean(device.log(device.cosh(error)))

def log_cosh_loss_derivative(y_true, y_pred, device):
"""
Computes the derivative of the Log-Cosh loss.

Parameters:
-----------
y_true : np.ndarray or cp.ndarray
Ground truth values.
y_pred : np.ndarray or cp.ndarray
Predicted values.
device : Device
The device instance (CPU or GPU) to perform calculations.

Returns:
--------
np.ndarray or cp.ndarray
The derivative of the Log-Cosh loss with respect to predictions.
"""
error = y_pred - y_true
return device.tanh(error)

# Get the appropriate loss function
def get_loss_function(loss_name):
"""
Expand Down Expand Up @@ -242,6 +284,8 @@ def get_loss_function(loss_name):
return hinge_loss
elif loss_name == 'huber':
return huber_loss
elif loss_name == 'log_cosh':
return log_cosh_loss
else:
raise ValueError(f"Unsupported loss function: {loss_name}")

Expand Down Expand Up @@ -275,5 +319,7 @@ def get_loss_derivative(loss_name):
return hinge_loss_derivative
elif loss_name == 'huber':
return huber_loss_derivative
elif loss_name == 'log_cosh':
return log_cosh_loss_derivative
else:
raise ValueError(f"Unsupported loss derivative: {loss_name}")
19 changes: 18 additions & 1 deletion tests/test_losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
binary_crossentropy, mse, mse_derivative,
categorical_crossentropy, categorical_crossentropy_derivative,
hinge_loss, hinge_loss_derivative,
huber_loss, huber_loss_derivative
huber_loss, huber_loss_derivative,
log_cosh_loss, log_cosh_loss_derivative
)
from pydeepflow.device import Device

Expand Down Expand Up @@ -85,6 +86,22 @@ def test_huber_loss_derivative(self):
result = huber_loss_derivative(y_true, y_pred, self.device_cpu, delta)
np.testing.assert_array_almost_equal(result, expected)

def test_log_cosh_loss(self):
y_true = np.array([1, 2, 3])
y_pred = np.array([2, 2, 4])
error = y_pred - y_true
expected = np.mean(np.log(np.cosh(error)))
result = log_cosh_loss(y_true, y_pred, self.device_cpu)
self.assertAlmostEqual(result, expected)

def test_log_cosh_loss_derivative(self):
y_true = np.array([1.0, 2.0, 3.0])
y_pred = np.array([1.5, 1.7, 2.5])
error = y_pred - y_true
expected = np.tanh(error)
result = log_cosh_loss_derivative(y_true, y_pred, self.device_cpu)
np.testing.assert_array_almost_equal(result, expected)

if __name__ == "__main__":
unittest.main()