Feed Forward Implementations
import numpy as np
np.random.seed(42)
Helper Function
def print_array(array):
print(array.shape)
print('\r')
print(array)
Activation Function
def sigmoid(x):
"""
Calculate sigmoid
"""
return 1/(1+np.exp(-x))
Network size
N_input = 4
N_hidden = 3
N_output = 2
Data for Inputs
X = np.random.randn(N_input)
print_array(X)
(4,)
[ 0.49671415 -0.1382643 0.64768854 1.52302986]
Weights
weights_input_to_hidden = np.random.normal(0,
scale=1,
size=(N_input, N_hidden))
weights_hidden_to_output = np.random.normal(0,
scale=1,
size=(N_hidden, N_output))
print_array(weights_input_to_hidden)
(4, 3)
[[-0.23415337 -0.23413696 1.57921282]
[ 0.76743473 -0.46947439 0.54256004]
[-0.46341769 -0.46572975 0.24196227]
[-1.91328024 -1.72491783 -0.56228753]]
print_array(weights_hidden_to_output)
(3, 2)
[[-1.01283112 0.31424733]
[-0.90802408 -1.4123037 ]
[ 1.46564877 -0.2257763 ]]
Hidden Layer Calculations
hidden_layer_in = np.dot(X, weights_input_to_hidden)
hidden_layer_out = sigmoid(hidden_layer_in)
print_array(hidden_layer_in)
(3,)
[-3.43654939 -2.98013677 0.00973617]
print_array(hidden_layer_out)
(3,)
[0.03117253 0.04833134 0.50243402]
Output Layer Calculations
output_layer_in = np.dot(hidden_layer_out, weights_hidden_to_output)
output_layer_out = sigmoid(output_layer_in)
print_array(output_layer_in)
(2,)
[ 0.66093328 -0.17190034]
print_array(output_layer_out)
(2,)
[0.65947001 0.45713043]
This content is taken from notes I took while pursuing the Intro to Machine Learning with Pytorch nanodegree certification.