Skip to content

Commit afd47d2

Browse files
committed
Add Doctest to back_propogation_neural_network and huffman.py
1 parent 788d95b commit afd47d2

File tree

2 files changed

+39
-0
lines changed

2 files changed

+39
-0
lines changed

data_compression/huffman.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,13 @@ def build_tree(letters: list[Letter]) -> Letter | TreeNode:
3939
"""
4040
Run through the list of Letters and build the min heap
4141
for the Huffman Tree.
42+
43+
>>> letters = [Letter('a', 5), Letter('b', 9), Letter('c', 12), Letter('d', 13)]
44+
>>> root = build_tree(letters)
45+
>>> isinstance(root, TreeNode)
46+
True
47+
>>> root.freq
48+
39
4249
"""
4350
response: list[Letter | TreeNode] = list(letters)
4451
while len(response) > 1:
@@ -55,6 +62,16 @@ def traverse_tree(root: Letter | TreeNode, bitstring: str) -> list[Letter]:
5562
"""
5663
Recursively traverse the Huffman Tree to set each
5764
Letter's bitstring dictionary, and return the list of Letters
65+
66+
>>> letters = [Letter('a', 2), Letter('b', 3), Letter('c', 4)]
67+
>>> root = build_tree(letters)
68+
>>> result = traverse_tree(root, "")
69+
>>> sorted([l.letter for l in result])
70+
['a', 'b', 'c']
71+
>>> all(l.bitstring[l.letter] for l in result)
72+
True
73+
>>> sum(l.freq for l in result)
74+
9
5875
"""
5976
if isinstance(root, Letter):
6077
root.bitstring[root.letter] = bitstring

neural_network/back_propagation_neural_network.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,15 @@
2323

2424

2525
def sigmoid(x: np.ndarray) -> np.ndarray:
26+
"""
27+
Compute the sigmoid activation function
28+
29+
>>> import numpy as np
30+
>>> np.allclose(sigmoid(np.array([0])), np.array([0.5]))
31+
True
32+
>>> np.allclose(sigmoid(np.array([-1, 0, 1])), np.array([0.26894142, 0.5, 0.73105858]))
33+
True
34+
"""
2635
return 1 / (1 + np.exp(-x))
2736

2837

@@ -158,6 +167,19 @@ def train(self, xdata, ydata, train_round, accuracy):
158167
return None
159168

160169
def cal_loss(self, ydata, ydata_):
170+
"""
171+
Calculate Mean Squared Error (MSE) loss and its gradient.
172+
173+
>>> import numpy as np
174+
>>> bp = BPNN()
175+
>>> y_true = np.asmatrix([[1.0], [0.5]])
176+
>>> y_pred = np.asmatrix([[0.8], [0.3]])
177+
>>> loss, grad = bp.cal_loss(y_true, y_pred)
178+
>>> float(round(loss, 2))
179+
0.08
180+
>>> np.allclose(grad, np.array([[-0.4], [-0.4]]))
181+
True
182+
"""
161183
self.loss = np.sum(np.power((ydata - ydata_), 2))
162184
self.loss_gradient = 2 * (ydata_ - ydata)
163185
# vector (shape is the same as _ydata.shape)

0 commit comments

Comments
 (0)