Skip to content

Commit ec44ae7

Browse files
committed
added 2 functions
1 parent b707a6c commit ec44ae7

File tree

1 file changed

+44
-0
lines changed
  • Fall20/NeuralNetworks1

1 file changed

+44
-0
lines changed

Fall20/NeuralNetworks1/NN.py

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import numpy as np
2+
3+
def linear_activation(z):
4+
return z
5+
6+
def parametric_activation(a,z):
7+
if z<=0:
8+
return a*z
9+
else:
10+
return z
11+
12+
def tanh_activation(z):
13+
return np.tanh(z)
14+
15+
# 2 layer NN for implementation of OR gate
16+
def orgate(x):
17+
weights = np.array([2,2])
18+
bias = -1
19+
weighted_input = np.matmul(weights,x) + bias
20+
y = linear_activation(weighted_input)
21+
return y
22+
23+
x = np.array([0,0])
24+
print(orgate(x))
25+
26+
# 4 layer NN for computing whether absolute difference is between 1 and 3
27+
# if between 1 and 3 outputs >0 else output <=0
28+
def multilayer(x):
29+
w1 = np.array([1,-1])
30+
b1 = 0
31+
weighted_input1 = np.matmul(w1,x) + b1
32+
output1 = parametric_activation(-1,weighted_input1)
33+
w2 = np.array([1])
34+
b2 = -2
35+
weighted_input2 = np.matmul(w2,[output1]) + b2
36+
output2 = parametric_activation(-1,weighted_input2)
37+
w3 = np.array([-1])
38+
b3 = 1
39+
weighted_input3 = np.matmul(w3,[output2]) + b3
40+
y = tanh_activation(weighted_input3)
41+
return y
42+
43+
x = np.array([4,5.5])
44+
print(multilayer(x))

0 commit comments

Comments
 (0)