|
| 1 | +from tensorflow.keras import datasets, models, layers, losses |
| 2 | +import tensorflow as tf |
| 3 | +import numpy as np |
| 4 | +import random |
| 5 | +import matplotlib.pyplot as plt |
| 6 | + |
| 7 | +# randomPoints on the sin graph |
| 8 | +# number - number of points |
| 9 | +# bounds - tuple for range of values |
| 10 | +# bool - either 1 or 0 |
| 11 | +def generatePoints(number, bounds): |
| 12 | + inputList = [] |
| 13 | + outputList = [] |
| 14 | + while(number>0): |
| 15 | + value = random.uniform(bounds[0],bounds[1]) |
| 16 | + inputList.append([value]) |
| 17 | + outputList.append([value**2]) |
| 18 | + number = number - 1 |
| 19 | + return inputList, outputList |
| 20 | + |
| 21 | +# generate training data |
| 22 | +bounds = (-10,10) # represents full system dynamics |
| 23 | + |
| 24 | +inputList, outputList = generatePoints(10000, bounds) |
| 25 | + |
| 26 | +# neural network code |
| 27 | +model = models.Sequential() |
| 28 | +model.add(layers.Dense(16, activation='exponential', input_shape=(1,))) |
| 29 | +model.add(layers.Dense(1, activation=None)) |
| 30 | +model.compile(optimizer='Adam', |
| 31 | + loss=losses.MeanSquaredError(), |
| 32 | + metrics=['mean_squared_error']) |
| 33 | + |
| 34 | +history = model.fit(np.array(inputList),np.array(outputList), epochs=200) |
| 35 | +print(model.get_weights()) |
| 36 | + |
| 37 | +# plots out learning curve |
| 38 | +# plt.plot(history.history['mean_squared_error'], label='mean_squared_error') |
| 39 | +# plt.xlabel('Epoch') |
| 40 | +# plt.ylabel('MSE') |
| 41 | +# plt.ylim([0.0, 0.2]) |
| 42 | +# plt.legend(loc='lower right') |
| 43 | +# plt.show() |
| 44 | + |
| 45 | +# generate test data |
| 46 | +inputTest, outputTest = generatePoints(10, bounds) |
| 47 | +print(model.predict(np.array(inputTest))) |
| 48 | +print(outputTest) |
| 49 | + |
| 50 | +# graph of the test, train points |
| 51 | +graph = plt.figure() |
| 52 | +ax = graph.add_subplot(111) |
| 53 | + |
| 54 | +x = np.linspace(-10,10,500) |
| 55 | +y = x**2 |
| 56 | + |
| 57 | +plt.plot(x,y, label= 'y = x^2', markersize = 2, c='c') |
| 58 | +ax.scatter(inputTest, outputTest, label = 'training', c='b') |
| 59 | +ax.scatter(inputTest,model.predict(np.array(inputTest)), label = 'testing', c='r') |
| 60 | +plt.legend(loc='lower right') |
| 61 | +plt.show() |
0 commit comments