From 81b29066d206217cb689fe2c9c8d530a1aa66cbe Mon Sep 17 00:00:00 2001 From: Arnav Kohli <95236897+THEGAMECHANGER416@users.noreply.github.com> Date: Sun, 8 Oct 2023 21:34:43 +0530 Subject: [PATCH] Created folder for losses in Machine_Learning (#9969) * Created folder for losses in Machine_Learning * Update binary_cross_entropy.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update mean_squared_error.py * Update binary_cross_entropy.py * Update mean_squared_error.py * Update binary_cross_entropy.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update mean_squared_error.py * Update binary_cross_entropy.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update mean_squared_error.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update binary_cross_entropy.py * Update mean_squared_error.py * Update binary_cross_entropy.py * Update mean_squared_error.py * Update machine_learning/losses/binary_cross_entropy.py Co-authored-by: Christian Clauss * Update machine_learning/losses/mean_squared_error.py Co-authored-by: Christian Clauss * Update machine_learning/losses/binary_cross_entropy.py Co-authored-by: Christian Clauss * Update mean_squared_error.py * Update machine_learning/losses/mean_squared_error.py Co-authored-by: Tianyi Zheng * Update binary_cross_entropy.py * Update mean_squared_error.py * Update binary_cross_entropy.py * Update mean_squared_error.py * Update mean_squared_error.py * Update binary_cross_entropy.py * renamed: losses -> loss_functions * updated 2 files * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update mean_squared_error.py * Update mean_squared_error.py * Update binary_cross_entropy.py * Update mean_squared_error.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Christian Clauss Co-authored-by: Tianyi Zheng --- .../loss_functions/binary_cross_entropy.py | 59 +++++++++++++++++++ .../loss_functions/mean_squared_error.py | 51 ++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 machine_learning/loss_functions/binary_cross_entropy.py create mode 100644 machine_learning/loss_functions/mean_squared_error.py diff --git a/machine_learning/loss_functions/binary_cross_entropy.py b/machine_learning/loss_functions/binary_cross_entropy.py new file mode 100644 index 000000000..4ebca7f21 --- /dev/null +++ b/machine_learning/loss_functions/binary_cross_entropy.py @@ -0,0 +1,59 @@ +""" +Binary Cross-Entropy (BCE) Loss Function + +Description: +Quantifies dissimilarity between true labels (0 or 1) and predicted probabilities. +It's widely used in binary classification tasks. + +Formula: +BCE = -Σ(y_true * log(y_pred) + (1 - y_true) * log(1 - y_pred)) + +Source: +[Wikipedia - Cross entropy](https://en.wikipedia.org/wiki/Cross_entropy) +""" + +import numpy as np + + +def binary_cross_entropy( + y_true: np.ndarray, y_pred: np.ndarray, epsilon: float = 1e-15 +) -> float: + """ + Calculate the BCE Loss between true labels and predicted probabilities. + + Parameters: + - y_true: True binary labels (0 or 1). + - y_pred: Predicted probabilities for class 1. + - epsilon: Small constant to avoid numerical instability. + + Returns: + - bce_loss: Binary Cross-Entropy Loss. + + Example Usage: + >>> true_labels = np.array([0, 1, 1, 0, 1]) + >>> predicted_probs = np.array([0.2, 0.7, 0.9, 0.3, 0.8]) + >>> binary_cross_entropy(true_labels, predicted_probs) + 0.2529995012327421 + >>> true_labels = np.array([0, 1, 1, 0, 1]) + >>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2]) + >>> binary_cross_entropy(true_labels, predicted_probs) + Traceback (most recent call last): + ... + ValueError: Input arrays must have the same length. + """ + if len(y_true) != len(y_pred): + raise ValueError("Input arrays must have the same length.") + # Clip predicted probabilities to avoid log(0) and log(1) + y_pred = np.clip(y_pred, epsilon, 1 - epsilon) + + # Calculate binary cross-entropy loss + bce_loss = -(y_true * np.log(y_pred) + (1 - y_true) * np.log(1 - y_pred)) + + # Take the mean over all samples + return np.mean(bce_loss) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() diff --git a/machine_learning/loss_functions/mean_squared_error.py b/machine_learning/loss_functions/mean_squared_error.py new file mode 100644 index 000000000..d2b0e1e15 --- /dev/null +++ b/machine_learning/loss_functions/mean_squared_error.py @@ -0,0 +1,51 @@ +""" +Mean Squared Error (MSE) Loss Function + +Description: +MSE measures the mean squared difference between true values and predicted values. +It serves as a measure of the model's accuracy in regression tasks. + +Formula: +MSE = (1/n) * Σ(y_true - y_pred)^2 + +Source: +[Wikipedia - Mean squared error](https://en.wikipedia.org/wiki/Mean_squared_error) +""" + +import numpy as np + + +def mean_squared_error(y_true: np.ndarray, y_pred: np.ndarray) -> float: + """ + Calculate the Mean Squared Error (MSE) between two arrays. + + Parameters: + - y_true: The true values (ground truth). + - y_pred: The predicted values. + + Returns: + - mse: The Mean Squared Error between y_true and y_pred. + + Example usage: + >>> true_values = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) + >>> predicted_values = np.array([0.8, 2.1, 2.9, 4.2, 5.2]) + >>> mean_squared_error(true_values, predicted_values) + 0.028000000000000032 + >>> true_labels = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) + >>> predicted_probs = np.array([0.3, 0.8, 0.9, 0.2]) + >>> mean_squared_error(true_labels, predicted_probs) + Traceback (most recent call last): + ... + ValueError: Input arrays must have the same length. + """ + if len(y_true) != len(y_pred): + raise ValueError("Input arrays must have the same length.") + + squared_errors = (y_true - y_pred) ** 2 + return np.mean(squared_errors) + + +if __name__ == "__main__": + import doctest + + doctest.testmod()