Skip to content

Commit 1107c6a

Browse files
committed
Merge branch 'master' of https://github.com/acmucsd/acm-ai-workshops into master
2 parents 1b34262 + c40e551 commit 1107c6a

File tree

2 files changed

+63
-1
lines changed

2 files changed

+63
-1
lines changed

Fall20/NeuralNetworks1/NN.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import numpy as np
2+
3+
def linear_activation(z):
4+
return z
5+
6+
7+
def tanh_activation(z):
8+
return np.tanh(z)
9+
10+
# 2 layer NN for implementation of OR gate
11+
def orgate(input1, input2):
12+
bias = -1
13+
weighted_input = 2*input1 + 2*input2 + bias
14+
y = linear_activation(weighted_input)
15+
if y<0:
16+
return False
17+
else:
18+
return True
19+
20+
def boolToBinary(bool1,bool2):
21+
binary = []
22+
if bool1:
23+
binary.append(1)
24+
else:
25+
binary.append(0)
26+
if bool2:
27+
binary.append(1)
28+
else:
29+
binary.append(0)
30+
return binary[0], binary[1]
31+
32+
input1, input2 = boolToBinary(True,True)
33+
print(orgate(input1,input2))

Fall20/NeuralNetworks1/vectorized.py

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,15 @@
33
def activation(z):
44
return z
55

6+
def parametric_activation(a,z):
7+
if z<=0:
8+
return a*z
9+
else:
10+
return z
11+
12+
def tanh_activation(z):
13+
return np.tanh(z)
14+
615
# Simple 2 layer neural network that returns the average of a 3 numbers given as a 3x1 column vector
716
# this function does a "forward pass" of the input x through the 2 layer network and returns the results
817
def average_nn(x):
@@ -47,7 +56,27 @@ def random_nn(x):
4756

4857
z_3 = np.matmul(w_3, a_2) + b_3
4958
a_3 = random_nn_activation(z_3)
50-
59+
5160
return a_3
5261

5362
print("On 3 layer network, input {} fed forward gives {}".format(x, random_nn(x)))
63+
64+
# 4 layer NN for computing whether absolute difference is between 1 and 3
65+
# if between 1 and 3 outputs >0 else output <=0
66+
def multilayer(x):
67+
w1 = np.array([1,-1])
68+
b1 = 0
69+
weighted_input1 = np.matmul(w1,x) + b1
70+
output1 = parametric_activation(-1,weighted_input1)
71+
w2 = np.array([1])
72+
b2 = -2
73+
weighted_input2 = np.matmul(w2,[output1]) + b2
74+
output2 = parametric_activation(-1,weighted_input2)
75+
w3 = np.array([-1])
76+
b3 = 1
77+
weighted_input3 = np.matmul(w3,[output2]) + b3
78+
y = tanh_activation(weighted_input3)
79+
return y
80+
81+
x = np.array([4,5.5])
82+
print(multilayer(x))

0 commit comments

Comments
 (0)