L2 Regularization example with Simple Python
1. Here’s a minimal neural network with L2 regularization built from scratch using only NumPy:
import numpy as np
import matplotlib.pyplot as plt
# Create dummy data
np.random.seed(1)
X = np.random.randn(100, 1)
y = 3 * X[:, 0] + np.random.randn(100) * 0.5 # True function: y = 3x + noise
# Initialize weights and bias
w = np.random.randn()
b = 0.0
# Hyperparameters
lr = 0.1
epochs = 100
lambda_l2 = 0.1 # Regularization strength
# For plotting
loss_history_no_l2 = []
loss_history_l2 = []
# Training without L2
w1 = w
b1 = b
for epoch in range(epochs):
y_pred = w1 * X[:, 0] + b1
loss = np.mean((y - y_pred) ** 2)
dw = -2 * np.mean(X[:, 0] * (y - y_pred))
db = -2 * np.mean(y - y_pred)
w1 -= lr * dw
b1 -= lr * db
loss_history_no_l2.append(loss)
# Training with L2 Regularization
w2 = w
b2 = b
for epoch in range(epochs):
y_pred = w2 * X[:, 0] + b2
loss = np.mean((y - y_pred) ** 2) + lambda_l2 * (w2 ** 2)
dw = -2 * np.mean(X[:, 0] * (y - y_pred)) + 2 * lambda_l2 * w2
db = -2 * np.mean(y - y_pred)
w2 -= lr * dw
b2 -= lr * db
loss_history_l2.append(loss)
# Plot the loss
plt.plot(loss_history_no_l2, label='No L2')
plt.plot(loss_history_l2, label='With L2')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('Training Loss Over Epochs')
plt.legend()
plt.grid(True)
plt.show()
Output:

L2 Regularization in Neural Network – Basic Math Concepts
