Skip to content

Commit ed7c6ad

Browse files
committed
Added linear
1 parent 1c50814 commit ed7c6ad

File tree

2 files changed

+67
-1
lines changed

2 files changed

+67
-1
lines changed

Summer20/NeuralNetwork/tflinear.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
from tensorflow.keras import datasets, models, layers, losses
2+
import tensorflow as tf
3+
import numpy as np
4+
import random
5+
import matplotlib.pyplot as plt
6+
7+
# randomPoints on the sin graph
8+
# number - number of points
9+
# bounds - tuple for range of values
10+
def randomPoints(number, bounds, func):
11+
inputList = []
12+
outputList = []
13+
while(number>0):
14+
value = random.uniform(bounds[0],bounds[1])
15+
inputList.append([value])
16+
outputList.append([func(value)])
17+
number = number - 1
18+
return inputList, outputList
19+
20+
def linear(x):
21+
return x
22+
23+
def coolLinear(x):
24+
return 3*x + 1
25+
26+
# generate training data
27+
bounds = (-10,10) # represents full system dynamics
28+
29+
inputList, outputList = randomPoints(10000, bounds, coolLinear)
30+
31+
# neural network code
32+
model = models.Sequential()
33+
model.add(layers.Dense(1, activation='linear', input_shape=(1,)))
34+
model.compile(optimizer='Adam',
35+
loss=losses.MeanSquaredError(),
36+
metrics=['mean_squared_error'])
37+
38+
history = model.fit(np.array(inputList),np.array(outputList), epochs=200)
39+
print(model.get_weights())
40+
41+
# plots out learning curve
42+
# plt.plot(history.history['mean_squared_error'], label='mean_squared_error')
43+
# plt.xlabel('Epoch')
44+
# plt.ylabel('MSE')
45+
# plt.ylim([0.0, 0.2])
46+
# plt.legend(loc='lower right')
47+
# plt.show()
48+
49+
# generate test data
50+
inputTest, outputTest = randomPoints(10, bounds, coolLinear)
51+
print(model.predict(np.array(inputTest)))
52+
print(outputTest)
53+
54+
# graph of the test, train points
55+
graph = plt.figure()
56+
ax = graph.add_subplot(111)
57+
58+
x = np.linspace(-10,10,500)
59+
y = 3*x + 1
60+
61+
plt.plot(x,y, label= 'y = x', markersize = 2, c='c')
62+
ax.scatter(inputTest, outputTest, label = 'training', c='b')
63+
ax.scatter(inputTest,model.predict(np.array(inputTest)), label = 'testing', c='r')
64+
plt.legend(loc='lower right')
65+
plt.show()

Summer20/NeuralNetwork/tfsin.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,13 +48,14 @@ def selectPoints(number, angle=90):
4848

4949
# neural network code
5050
model = models.Sequential()
51-
model.add(layers.Dense(10, activation='tanh', input_shape=(1,)))
51+
model.add(layers.Dense(12, activation='tanh', input_shape=(1,)))
5252
model.add(layers.Dense(1, activation=None))
5353
model.compile(optimizer='Adam',
5454
loss=losses.MeanSquaredError(),
5555
metrics=['mean_squared_error'])
5656

5757
history = model.fit(np.array(angleList),np.array(sinList), epochs=200)
58+
print(model.get_weights())
5859

5960
# plots out learning curve
6061
# plt.plot(history.history['mean_squared_error'], label='mean_squared_error')

0 commit comments

Comments
 (0)