|
| 1 | +from tensorflow.keras import datasets, models, layers, losses |
| 2 | +import tensorflow as tf |
| 3 | +import numpy as np |
| 4 | +import random |
| 5 | +import matplotlib.pyplot as plt |
| 6 | + |
| 7 | +# randomPoints on the sin graph |
| 8 | +# number - number of points |
| 9 | +# bounds - tuple for range of values |
| 10 | +def randomPoints(number, bounds, func): |
| 11 | + inputList = [] |
| 12 | + outputList = [] |
| 13 | + while(number>0): |
| 14 | + value = random.uniform(bounds[0],bounds[1]) |
| 15 | + inputList.append([value]) |
| 16 | + outputList.append([func(value)]) |
| 17 | + number = number - 1 |
| 18 | + return inputList, outputList |
| 19 | + |
| 20 | +def linear(x): |
| 21 | + return x |
| 22 | + |
| 23 | +def coolLinear(x): |
| 24 | + return 3*x + 1 |
| 25 | + |
| 26 | +# generate training data |
| 27 | +bounds = (-10,10) # represents full system dynamics |
| 28 | + |
| 29 | +inputList, outputList = randomPoints(10000, bounds, coolLinear) |
| 30 | + |
| 31 | +# neural network code |
| 32 | +model = models.Sequential() |
| 33 | +model.add(layers.Dense(1, activation='linear', input_shape=(1,))) |
| 34 | +model.compile(optimizer='Adam', |
| 35 | + loss=losses.MeanSquaredError(), |
| 36 | + metrics=['mean_squared_error']) |
| 37 | + |
| 38 | +history = model.fit(np.array(inputList),np.array(outputList), epochs=200) |
| 39 | +print(model.get_weights()) |
| 40 | + |
| 41 | +# plots out learning curve |
| 42 | +# plt.plot(history.history['mean_squared_error'], label='mean_squared_error') |
| 43 | +# plt.xlabel('Epoch') |
| 44 | +# plt.ylabel('MSE') |
| 45 | +# plt.ylim([0.0, 0.2]) |
| 46 | +# plt.legend(loc='lower right') |
| 47 | +# plt.show() |
| 48 | + |
| 49 | +# generate test data |
| 50 | +inputTest, outputTest = randomPoints(10, bounds, coolLinear) |
| 51 | +print(model.predict(np.array(inputTest))) |
| 52 | +print(outputTest) |
| 53 | + |
| 54 | +# graph of the test, train points |
| 55 | +graph = plt.figure() |
| 56 | +ax = graph.add_subplot(111) |
| 57 | + |
| 58 | +x = np.linspace(-10,10,500) |
| 59 | +y = 3*x + 1 |
| 60 | + |
| 61 | +plt.plot(x,y, label= 'y = x', markersize = 2, c='c') |
| 62 | +ax.scatter(inputTest, outputTest, label = 'training', c='b') |
| 63 | +ax.scatter(inputTest,model.predict(np.array(inputTest)), label = 'testing', c='r') |
| 64 | +plt.legend(loc='lower right') |
| 65 | +plt.show() |
0 commit comments