Skip to content

Commit 460197e

Browse files
committed
📝 : added comment to 5.3.
1 parent 232b495 commit 460197e

1 file changed

Lines changed: 21 additions & 5 deletions

File tree

Regularization/scratching/5.3_vectorized.py

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,32 +12,40 @@ def define_X_include_bias(X):
1212
def problem(X):
1313
return np.sin(np.dot(np.pi, X))
1414

15+
# Define a mean model calculation
1516
def meanModel(models):
1617
return np.mean(models, axis=0)
1718

19+
# Define a bias calculation
1820
def computeBias(mean_model):
1921
z = np.square(mean_model - y)
2022
return np.mean(z)
2123

24+
# Define a variance calculation
2225
def computeVariance(E_d, mean_model):
2326
z = np.square(E_d - mean_model)
2427
var_x = np.mean(z)
2528
return np.mean(var_x)
2629

30+
# Define a E out calculation
2731
def computeEout(bias, variance):
2832
return bias + variance
2933

3034
if __name__ == "__main__":
31-
# Generate X features
35+
36+
# Generate X from -1 to 1 with a hundred values
3237
X = np.linspace(-1, 1, 100)
33-
# Compute y values
38+
39+
# Make y values from problem function (sin)
3440
y = problem(X)
3541

36-
# Define X data (bias + weigths)
42+
# Add bias term to X at the first column
3743
X_b = define_X_include_bias(X)
3844

45+
# For collecting prediction value of each genaral data for both models
3946
E_d_linear = []
4047
E_d_ridge = []
48+
4149
# Initialize subplots
4250
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 6))
4351

@@ -56,26 +64,34 @@ def computeEout(bias, variance):
5664

5765
# Perform the training and plotting in the loop
5866
for _ in range(1000):
67+
# Create Linear model
5968
linearModel = LinearRegression()
69+
# Create Ridge model
6070
ridgeModel = RidgeRegression(lambda_param=0.5)
6171

6272
# Randomly select 2 values from the dataset
6373
rands_X = np.random.choice(X, 2)
64-
y_sample = problem(rands_X)
74+
75+
# Create X and y from genaral value for training
6576
X_sample = define_X_include_bias(rands_X)
77+
y_sample = problem(rands_X)
6678

79+
# Train model to fit the data with normal equation
6780
linearModel.training(X=X_sample, y=y_sample, type="normalEq")
6881
ridgeModel.training(X=X_sample, y=y_sample, type="normalEq")
6982

83+
# Make a prediction with the model that train only 2 data but use with all data for both models
7084
y_pred_lin = linearModel.prediction(X_b)
7185
E_d_linear.append(y_pred_lin)
7286

7387
y_pred_ridge = ridgeModel.prediction(X_b)
7488
E_d_ridge.append(y_pred_ridge)
7589

90+
# Plot the prediction line for each iters with random genaral data
7691
ax1.plot(X, y_pred_lin, c="black", alpha=0.05)
7792
ax2.plot(X, y_pred_ridge, c="black", alpha=0.05)
7893

94+
# Calculate mean model, bias, varaince, and E out for both models to see the difference between non-regularization and regularization
7995
mean_linearModel = meanModel(E_d_linear)
8096
mean_ridgeModel = meanModel(E_d_ridge)
8197

@@ -100,9 +116,9 @@ def computeEout(bias, variance):
100116
ax1.set_title(f'Linear Regression \n Bias = {bias_linearModel:.2f} | Varinace = {variance_linearModel:.2f} | E out = {eOut_linearModel:.2f}')
101117
ax2.set_title(f'Ridge Regression \n Bias = {bias_ridgeModel:.2f} | Varinace = {variance_ridgeModel:.2f} | E out = {eOut_ridgeModel:.2f}')
102118

119+
# Plot the mean predictions line
103120
ax1.plot(X, mean_linearModel, c="red", label="Mean Model", linewidth=2)
104121
ax2.plot(X, mean_ridgeModel, c="red", label="Mean Model", linewidth=2)
105122

106-
# Display the plots
107123
plt.tight_layout()
108124
plt.show()

0 commit comments

Comments
 (0)