CNN Pattern Detection example with Simple Python

Python Implementation (Mini CNN from Scratch)

Here’s a basic Python simulation for 1 filter + 1 training step:

import numpy as np

# Step 1: Input image (4x4)
image = np.array([
    [1, 2, 3, 0],
    [4, 5, 6, 1],
    [7, 8, 9, 2],
    [0, 1, 2, 3]
])

# Step 2: Random 3x3 filter (pattern detector)
kernel = np.random.randn(3, 3)

# Step 3: Convolution operation (valid padding)
def convolve(image, kernel):
    h, w = image.shape
    kh, kw = kernel.shape
    output = np.zeros((h - kh + 1, w - kw + 1))
    for i in range(h - kh + 1):
        for j in range(w - kw + 1):
            patch = image[i:i+kh, j:j+kw]
            output[i, j] = np.sum(patch * kernel)
    return output

# Step 4: ReLU activation
def relu(x):
    return np.maximum(0, x)

# Step 5: Forward pass
conv_output = convolve(image, kernel)
activated_output = relu(conv_output)

# Step 6: Flatten and connect to output neuron (e.g., binary classification)
flattened = activated_output.flatten()
weights = np.random.randn(flattened.size)
bias = 0.0

# Output prediction (sigmoid activation)
def sigmoid(x):
    return 1 / (1 + np.exp(-x))

z = np.dot(flattened, weights) + bias
pred = sigmoid(z)

# Step 7: Compute loss (assume label = 1)
label = 1
loss = -(label * np.log(pred) + (1 - label) * np.log(1 - pred))
print(f"Prediction: {pred:.4f}, Loss: {loss:.4f}")

# Step 8: Backpropagation (for just weights & bias)
lr = 0.01
dL_dz = pred - label
weights -= lr * dL_dz * flattened
bias -= lr * dL_dz

# Note: In real CNNs, we’d also update the kernel via backprop through convolution.

CNN Pattern Detection Tutorial – Summary