Skip to content

Commit d524958

Browse files
committed
Added quadratic function
1 parent ed7c6ad commit d524958

File tree

2 files changed

+69
-4
lines changed

2 files changed

+69
-4
lines changed

Summer20/NeuralNetwork/tflinear.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
# randomPoints on the sin graph
88
# number - number of points
99
# bounds - tuple for range of values
10+
# func - function to be used
1011
def randomPoints(number, bounds, func):
1112
inputList = []
1213
outputList = []
@@ -17,6 +18,9 @@ def randomPoints(number, bounds, func):
1718
number = number - 1
1819
return inputList, outputList
1920

21+
def boolfunc(x):
22+
return x*0 + 1
23+
2024
def linear(x):
2125
return x
2226

@@ -26,7 +30,7 @@ def coolLinear(x):
2630
# generate training data
2731
bounds = (-10,10) # represents full system dynamics
2832

29-
inputList, outputList = randomPoints(10000, bounds, coolLinear)
33+
inputList, outputList = randomPoints(10000, bounds, boolfunc)
3034

3135
# neural network code
3236
model = models.Sequential()
@@ -47,7 +51,7 @@ def coolLinear(x):
4751
# plt.show()
4852

4953
# generate test data
50-
inputTest, outputTest = randomPoints(10, bounds, coolLinear)
54+
inputTest, outputTest = randomPoints(10, bounds, boolfunc)
5155
print(model.predict(np.array(inputTest)))
5256
print(outputTest)
5357

@@ -56,9 +60,9 @@ def coolLinear(x):
5660
ax = graph.add_subplot(111)
5761

5862
x = np.linspace(-10,10,500)
59-
y = 3*x + 1
63+
y = boolfunc(x)
6064

61-
plt.plot(x,y, label= 'y = x', markersize = 2, c='c')
65+
plt.plot(x,y, label= 'y = 1', markersize = 2, c='c')
6266
ax.scatter(inputTest, outputTest, label = 'training', c='b')
6367
ax.scatter(inputTest,model.predict(np.array(inputTest)), label = 'testing', c='r')
6468
plt.legend(loc='lower right')

Summer20/NeuralNetwork/tfquad.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
from tensorflow.keras import datasets, models, layers, losses
2+
import tensorflow as tf
3+
import numpy as np
4+
import random
5+
import matplotlib.pyplot as plt
6+
7+
# randomPoints on the sin graph
8+
# number - number of points
9+
# bounds - tuple for range of values
10+
# bool - either 1 or 0
11+
def generatePoints(number, bounds):
12+
inputList = []
13+
outputList = []
14+
while(number>0):
15+
value = random.uniform(bounds[0],bounds[1])
16+
inputList.append([value])
17+
outputList.append([value**2])
18+
number = number - 1
19+
return inputList, outputList
20+
21+
# generate training data
22+
bounds = (-10,10) # represents full system dynamics
23+
24+
inputList, outputList = generatePoints(10000, bounds)
25+
26+
# neural network code
27+
model = models.Sequential()
28+
model.add(layers.Dense(16, activation='exponential', input_shape=(1,)))
29+
model.add(layers.Dense(1, activation=None))
30+
model.compile(optimizer='Adam',
31+
loss=losses.MeanSquaredError(),
32+
metrics=['mean_squared_error'])
33+
34+
history = model.fit(np.array(inputList),np.array(outputList), epochs=200)
35+
print(model.get_weights())
36+
37+
# plots out learning curve
38+
# plt.plot(history.history['mean_squared_error'], label='mean_squared_error')
39+
# plt.xlabel('Epoch')
40+
# plt.ylabel('MSE')
41+
# plt.ylim([0.0, 0.2])
42+
# plt.legend(loc='lower right')
43+
# plt.show()
44+
45+
# generate test data
46+
inputTest, outputTest = generatePoints(10, bounds)
47+
print(model.predict(np.array(inputTest)))
48+
print(outputTest)
49+
50+
# graph of the test, train points
51+
graph = plt.figure()
52+
ax = graph.add_subplot(111)
53+
54+
x = np.linspace(-10,10,500)
55+
y = x**2
56+
57+
plt.plot(x,y, label= 'y = x^2', markersize = 2, c='c')
58+
ax.scatter(inputTest, outputTest, label = 'training', c='b')
59+
ax.scatter(inputTest,model.predict(np.array(inputTest)), label = 'testing', c='r')
60+
plt.legend(loc='lower right')
61+
plt.show()

0 commit comments

Comments
 (0)