Understanding the fundamentals of neural networks through interactive examples
Explore how neurons are connected across different layers to form a neural network:
Click on layers to activate them
Different loss functions serve different purposes. Explore how they behave:
Watch how gradient descent optimizes the network weights to minimize loss:
Step 0 of 10: Loss = 16.00
import numpy as np
class SimpleNeuralNetwork:
def __init__(self, layers):
self.weights = []
for i in range(len(layers)-1):
w = np.random.randn(layers[i], layers[i+1])
self.weights.append(w)
def forward(self, x):
activations = [x]
for w in self.weights:
x = self.sigmoid(np.dot(x, w))
activations.append(x)
return activations
def sigmoid(self, x):
return 1/(1 + np.exp(-x))