Skip to content
32 changes: 32 additions & 0 deletions machine_learning/loss_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -663,6 +663,38 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float
return np.sum(kl_loss)


def root_mean_squared_error(y_true, y_pred):
"""
Root Mean Squared Error (RMSE)

Root Mean Squared Error (RMSE) is a standard metric used to evaluate
the accuracy of regression models.
It measures the average magnitude of the prediction errors, giving
higher weight to larger errors due to squaring.

RMSE = sqrt( (1/n) * Σ (y_true - y_pred) ^ 2)

Reference: https://en.wikipedia.org/wiki/Root_mean_square_deviation

Parameters:
y_pred: Predicted Value
y_true: Actual Value

Returns:
float: The RMSE Loss function between y_pred and y_true

>>> y_true = np.array([100, 200, 300])
>>> y_pred = np.array([110, 190, 310])
>>> rmse(y_true, y_pred)
3.42
"""
y_true, y_pred = np.array(y_true), np.array(y_pred)

rmse = np.sqrt(np.mean((y_pred - y_true) ** 2))

return rmse


if __name__ == "__main__":
import doctest

Expand Down
Loading