Skip to content

Commit 7ea2f66

Browse files
enhancement
1 parent 7530a41 commit 7ea2f66

File tree

1 file changed

+54
-0
lines changed

1 file changed

+54
-0
lines changed
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import numpy as np
2+
3+
# -------------------- Naive Linear Regression --------------------
4+
def naive_linear_regression(X, y, learning_rate=0.01, epochs=1000):
5+
"""
6+
Naive Linear Regression using loops.
7+
X: input features (2D array)
8+
y: target values (column vector)
9+
"""
10+
m, n = X.shape
11+
theta = np.zeros((n, 1)) # initialize parameters
12+
13+
for _ in range(epochs):
14+
predictions = []
15+
for i in range(m):
16+
pred = 0
17+
for j in range(n):
18+
pred += X[i][j] * theta[j][0]
19+
predictions.append([pred])
20+
predictions = np.array(predictions)
21+
# compute gradient
22+
errors = predictions - y
23+
for j in range(n):
24+
grad = 0
25+
for i in range(m):
26+
grad += errors[i][0] * X[i][j]
27+
theta[j][0] -= learning_rate * grad / m
28+
return theta
29+
30+
# -------------------- Vectorized Linear Regression --------------------
31+
def vectorized_linear_regression(X, y, learning_rate=0.01, epochs=1000):
32+
"""
33+
Fully vectorized Linear Regression using matrix operations.
34+
"""
35+
m, n = X.shape
36+
theta = np.zeros((n, 1))
37+
for _ in range(epochs):
38+
predictions = X.dot(theta)
39+
errors = predictions - y
40+
gradient = (X.T.dot(errors)) / m
41+
theta -= learning_rate * gradient
42+
return theta
43+
44+
# -------------------- Test Both Implementations --------------------
45+
if __name__ == "__main__":
46+
# Sample dataset
47+
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
48+
y = np.dot(X, np.array([[1],[2]])) + 3 # y = 1*x1 + 2*x2 + 3
49+
50+
theta_naive = naive_linear_regression(X, y)
51+
theta_vec = vectorized_linear_regression(X, y)
52+
53+
print("Theta naive:\n", theta_naive)
54+
print("Theta vectorized:\n", theta_vec)

0 commit comments

Comments
 (0)