@JesseMalnik/

Neural net 4

Python

multi layer

fork
loading
Files
  • main.py
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import numpy as np 

def sigmoid_derivative(x):
  return x + (1-x)

def sigmoid(x):
  return 1/(1+np.exp(-x))

training_inputs = np.array([[1,0,0],
                            [0,1,1],
                            [1,0,1],
                            [0,1,0],])

training_outputs_f = np.array([[1,0],[0,1],[1,0],[0,1]])

training_outputs_s = np.array([[1,0,1,0]]).T

np.random.seed(1)

synaptic_weights_f = np.random.random((3,2)) -1

synaptic_weights_s = np.random.random((2,1))-1

print("random synaptic_weights_f :")
print(synaptic_weights_f)
print("random synaptic_weights_s : ")
print(synaptic_weights_s)

for iteration in range(200000):
  input_layer = training_inputs
  outputs = sigmoid(np.dot(input_layer,synaptic_weights_f))
  error = training_outputs_f - outputs
  adjustments = error * sigmoid_derivative(outputs)
  synaptic_weights_f += np.dot(input_layer.T, adjustments)


print("synaptic weights f after training: ")
print(synaptic_weights_f)

print("outputs after training layer 1: ")
print(outputs)

for iteration in range(200000):
  input_layer = outputs
  outputs_ = sigmoid(np.dot(input_layer, synaptic_weights_s))
  error = training_outputs_s - outputs_
  adjustments = error * sigmoid_derivative(outputs_)
  synaptic_weights_s += np.dot(input_layer.T, adjustments)

print(" synaptic weights s after training: ")
print(synaptic_weights_s)
print("outputs second layer:")
print(outputs_)