From afd47d278fb1135191037dbafb9b9769ae2ae8cc Mon Sep 17 00:00:00 2001 From: dhruvi003 Date: Mon, 13 Oct 2025 15:40:54 +0530 Subject: [PATCH 1/2] Add Doctest to back_propogation_neural_network and huffman.py --- data_compression/huffman.py | 17 ++++++++++++++ .../back_propagation_neural_network.py | 22 +++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/data_compression/huffman.py b/data_compression/huffman.py index 44eda6c03180..4ac230fc333c 100644 --- a/data_compression/huffman.py +++ b/data_compression/huffman.py @@ -39,6 +39,13 @@ def build_tree(letters: list[Letter]) -> Letter | TreeNode: """ Run through the list of Letters and build the min heap for the Huffman Tree. + + >>> letters = [Letter('a', 5), Letter('b', 9), Letter('c', 12), Letter('d', 13)] + >>> root = build_tree(letters) + >>> isinstance(root, TreeNode) + True + >>> root.freq + 39 """ response: list[Letter | TreeNode] = list(letters) while len(response) > 1: @@ -55,6 +62,16 @@ def traverse_tree(root: Letter | TreeNode, bitstring: str) -> list[Letter]: """ Recursively traverse the Huffman Tree to set each Letter's bitstring dictionary, and return the list of Letters + + >>> letters = [Letter('a', 2), Letter('b', 3), Letter('c', 4)] + >>> root = build_tree(letters) + >>> result = traverse_tree(root, "") + >>> sorted([l.letter for l in result]) + ['a', 'b', 'c'] + >>> all(l.bitstring[l.letter] for l in result) + True + >>> sum(l.freq for l in result) + 9 """ if isinstance(root, Letter): root.bitstring[root.letter] = bitstring diff --git a/neural_network/back_propagation_neural_network.py b/neural_network/back_propagation_neural_network.py index 182f759c5fc7..0214aca3fb6f 100644 --- a/neural_network/back_propagation_neural_network.py +++ b/neural_network/back_propagation_neural_network.py @@ -23,6 +23,15 @@ def sigmoid(x: np.ndarray) -> np.ndarray: + """ + Compute the sigmoid activation function + + >>> import numpy as np + >>> np.allclose(sigmoid(np.array([0])), np.array([0.5])) + True + >>> np.allclose(sigmoid(np.array([-1, 0, 1])), np.array([0.26894142, 0.5, 0.73105858])) + True + """ return 1 / (1 + np.exp(-x)) @@ -158,6 +167,19 @@ def train(self, xdata, ydata, train_round, accuracy): return None def cal_loss(self, ydata, ydata_): + """ + Calculate Mean Squared Error (MSE) loss and its gradient. + + >>> import numpy as np + >>> bp = BPNN() + >>> y_true = np.asmatrix([[1.0], [0.5]]) + >>> y_pred = np.asmatrix([[0.8], [0.3]]) + >>> loss, grad = bp.cal_loss(y_true, y_pred) + >>> float(round(loss, 2)) + 0.08 + >>> np.allclose(grad, np.array([[-0.4], [-0.4]])) + True + """ self.loss = np.sum(np.power((ydata - ydata_), 2)) self.loss_gradient = 2 * (ydata_ - ydata) # vector (shape is the same as _ydata.shape) From 821b64cbb518af3ee9995f3aac7d0a0078a5db87 Mon Sep 17 00:00:00 2001 From: dhruvi003 Date: Mon, 13 Oct 2025 16:00:32 +0530 Subject: [PATCH 2/2] Changed as per PEP8 --- neural_network/back_propagation_neural_network.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/neural_network/back_propagation_neural_network.py b/neural_network/back_propagation_neural_network.py index 0214aca3fb6f..b1c5451735ee 100644 --- a/neural_network/back_propagation_neural_network.py +++ b/neural_network/back_propagation_neural_network.py @@ -29,7 +29,10 @@ def sigmoid(x: np.ndarray) -> np.ndarray: >>> import numpy as np >>> np.allclose(sigmoid(np.array([0])), np.array([0.5])) True - >>> np.allclose(sigmoid(np.array([-1, 0, 1])), np.array([0.26894142, 0.5, 0.73105858])) + >>> np.allclose( + ... sigmoid(np.array([-1, 0, 1])), + ... np.array([0.26894142, 0.5, 0.73105858]) + ... ) True """ return 1 / (1 + np.exp(-x))