Skip to content

Commit c40e551

Browse files
committed
split
1 parent 5cf6041 commit c40e551

File tree

2 files changed

+50
-32
lines changed

2 files changed

+50
-32
lines changed

Fall20/NeuralNetworks1/NN.py

Lines changed: 20 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -3,42 +3,31 @@
33
def linear_activation(z):
44
return z
55

6-
def parametric_activation(a,z):
7-
if z<=0:
8-
return a*z
9-
else:
10-
return z
116

127
def tanh_activation(z):
138
return np.tanh(z)
14-
9+
1510
# 2 layer NN for implementation of OR gate
16-
def orgate(x):
17-
weights = np.array([2,2])
11+
def orgate(input1, input2):
1812
bias = -1
19-
weighted_input = np.matmul(weights,x) + bias
13+
weighted_input = 2*input1 + 2*input2 + bias
2014
y = linear_activation(weighted_input)
21-
return y
22-
23-
x = np.array([0,0])
24-
print(orgate(x))
15+
if y<0:
16+
return False
17+
else:
18+
return True
2519

26-
# 4 layer NN for computing whether absolute difference is between 1 and 3
27-
# if between 1 and 3 outputs >0 else output <=0
28-
def multilayer(x):
29-
w1 = np.array([1,-1])
30-
b1 = 0
31-
weighted_input1 = np.matmul(w1,x) + b1
32-
output1 = parametric_activation(-1,weighted_input1)
33-
w2 = np.array([1])
34-
b2 = -2
35-
weighted_input2 = np.matmul(w2,[output1]) + b2
36-
output2 = parametric_activation(-1,weighted_input2)
37-
w3 = np.array([-1])
38-
b3 = 1
39-
weighted_input3 = np.matmul(w3,[output2]) + b3
40-
y = tanh_activation(weighted_input3)
41-
return y
20+
def boolToBinary(bool1,bool2):
21+
binary = []
22+
if bool1:
23+
binary.append(1)
24+
else:
25+
binary.append(0)
26+
if bool2:
27+
binary.append(1)
28+
else:
29+
binary.append(0)
30+
return binary[0], binary[1]
4231

43-
x = np.array([4,5.5])
44-
print(multilayer(x))
32+
input1, input2 = boolToBinary(True,True)
33+
print(orgate(input1,input2))

Fall20/NeuralNetworks1/vectorized.py

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,15 @@
33
def activation(z):
44
return z
55

6+
def parametric_activation(a,z):
7+
if z<=0:
8+
return a*z
9+
else:
10+
return z
11+
12+
def tanh_activation(z):
13+
return np.tanh(z)
14+
615
# Simple 2 layer neural network that returns the average of a 3 numbers given as a 3x1 column vector
716
# this function does a "forward pass" of the input x through the 2 layer network and returns the results
817
def average_nn(x):
@@ -47,7 +56,27 @@ def random_nn(x):
4756

4857
z_3 = np.matmul(w_3, a_2) + b_3
4958
a_3 = random_nn_activation(z_3)
50-
59+
5160
return a_3
5261

5362
print("On 3 layer network, input {} fed forward gives {}".format(x, random_nn(x)))
63+
64+
# 4 layer NN for computing whether absolute difference is between 1 and 3
65+
# if between 1 and 3 outputs >0 else output <=0
66+
def multilayer(x):
67+
w1 = np.array([1,-1])
68+
b1 = 0
69+
weighted_input1 = np.matmul(w1,x) + b1
70+
output1 = parametric_activation(-1,weighted_input1)
71+
w2 = np.array([1])
72+
b2 = -2
73+
weighted_input2 = np.matmul(w2,[output1]) + b2
74+
output2 = parametric_activation(-1,weighted_input2)
75+
w3 = np.array([-1])
76+
b3 = 1
77+
weighted_input3 = np.matmul(w3,[output2]) + b3
78+
y = tanh_activation(weighted_input3)
79+
return y
80+
81+
x = np.array([4,5.5])
82+
print(multilayer(x))

0 commit comments

Comments
 (0)