Skip to content

Commit b7aff85

Browse files
committed
add worksheets
1 parent 79a05e0 commit b7aff85

File tree

2 files changed

+39
-0
lines changed

2 files changed

+39
-0
lines changed
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import numpy as np
2+
3+
# fill in this neural network and activation function to calculate the of average of 5 values using numpy and vectorization
4+
def activation(z):
5+
raise NotImplementedError
6+
7+
def average5NN(x):
8+
# this should be a simple 2 layer network, one input layer and one output layer, figure out how many neurons are in each layer, the weights, and biases!
9+
10+
# This should take input column vector x and apply an appropriate weight matrix and add a bias, apply the activation and then return the result
11+
12+
weights = np.array([]) # fill me in!
13+
14+
bias = 100 # fix me!
15+
16+
weighted_input = 0 # fill me in!
17+
18+
raise NotImplementedError
19+
20+
21+
# Your network works if you don't get any errors!
22+
assert average5NN(np.array([1, 1, 1, 1, 1])) == 1
23+
assert average5NN(np.array([1, -1, 1, -1, 0])) == 0
24+
assert average5NN(np.array([100, 200, 300, 400, -400])) == 1.2
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# fill in this neural network and activation function to calculate the square of the sum of 2 inputs
2+
def activation(z):
3+
raise NotImplementedError
4+
5+
def squareNN(x1, x2):
6+
# this should be a simple 2 layer network, one input layer and one output layer, figure out how many neurons are in each layer, the weights, and biases!
7+
# This should take x1 and x2 and return the square of the sum of the 2 inputs, but obviously in neural network style!
8+
raise NotImplementedError
9+
10+
11+
# Your network works if you don't get any errors!
12+
assert squareNN(1, 1) == 4
13+
assert squareNN(-1, 1) == 0
14+
assert squareNN(10, 2.5) == 156.25
15+
assert squareNN(0, 0) == 0

0 commit comments

Comments
 (0)