diff --git a/machine_learning/loss_functions.py b/machine_learning/loss_functions.py index 0bd9aa8b5401..ed819c91a98c 100644 --- a/machine_learning/loss_functions.py +++ b/machine_learning/loss_functions.py @@ -662,6 +662,38 @@ def kullback_leibler_divergence(y_true: np.ndarray, y_pred: np.ndarray) -> float kl_loss = y_true * np.log(y_true / y_pred) return np.sum(kl_loss) +def root_mean_squared_error(y_true, y_pred): + """ + Root Mean Squared Error (RMSE) + + Root Mean Squared Error (RMSE) is a standard metric used to evaluate + the accuracy of regression models. + It measures the average magnitude of the prediction errors, giving + higher weight to larger errors due to squaring. + + RMSE = sqrt( (1/n) * Σ (y_true - y_pred) ^ 2) + + Reference: https://en.wikipedia.org/wiki/Root_mean_square_deviation + + Parameters: + y_pred: Predicted Value + y_true: Actual Value + + Returns: + float: The RMSE Loss function between y_pred and y_true + + Example: + >>> y_true = np.array([100, 200, 300]) + >>> y_pred = np.array([110, 190, 310]) + >>> rmse(y_true, y_pred) + 3.42 + """ + y_true, y_pred = np.array(y_true), np.array(y_pred) + + rmse = np.sqrt(np.mean((y_pred - y_true) ** 2)) + + return rmse + if __name__ == "__main__": import doctest