Skip to content

Commit 84046c7

Browse files
🎉 Network Wrapper completely rewritten and tested
1 parent a016808 commit 84046c7

12 files changed

+161
-32
lines changed

README.md

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -8,28 +8,30 @@ The framework while being extremely easy to use also allows a relatively high de
88
All implementations are fully documented to help understand the underlying logic for each component better.
99

1010

11+
#### The following files contain example code for using the Network wrapper.
12+
- [demo_Network_categorical.py](./demo_Network_categorical.py)
13+
- [demo_Network_binary_logistic_regression.py](./demo_Network_binary_logistic_regression.py)
14+
- [demo_Network_linear_regression.py](./demo_Network_linear_regression.py)
1115

1216
#### The following files contain example code to get you started.
13-
- [demo_multiclass_classification.py](./demo_multiclass_classification.py)
17+
- [demo_categorical.py](./demo_categorical.py)
1418
- [demo_binary_logistic_regression.py](./demo_binary_logistic_regression.py)
19+
- [demo_linear_regression.py](./demo_linear_regression.py)
1520

16-
#### The [NetworkDemo.py](NetworkDemo.py) file contains example code for using the Network wrapper.
17-
18-
## *Features*
19-
- Fully connected Layers
20-
- Dropout Layers
21-
- ReLU, Softmax, Sigmoid activations
22-
- Generate training data (Classification data)
23-
- Categorical Cross Entropy Loss, Binary Cross Entropy Loss
21+
## **Features**
22+
- Fully connected, Dropout Layers
23+
- ReLU, Softmax, Sigmoid, and Linear activations
24+
- Generate synthetic training data (Classification, and Regression data)
25+
- Categorical Cross Entropy Loss, Binary Cross Entropy Loss, Mean Squared Error
2426
- L1 & L2 regularization
2527
- Backpropogation
2628
- Optimizers
2729
- SGD (with decay and momentum)
2830
- AdaGrad
2931
- RMSprop
3032
- Adam
31-
- Network Wrapper with TinyFlow backend (under development)
32-
- The wrapper currently supports setting up a basic Neural Network and running a simple training loop using the above components, without having to worry about instantiating and getting the dimensions right for every layer.
33+
- Network Wrapper with TinyFlow backend
34+
- The wrapper supports setting up a Neural Network and running a simple training loop using the above components, without having to write long error prone code.
3335

3436
## Steps to use
3537
```

TinyFlow/Activations.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ class Activation_ReLU:
77
max (0, input)
88
'''
99

10-
def forward(self, inputs):
10+
def forward(self, inputs, training):
1111
'''Activation_ReLU.forward (input_data)'''
1212
# np.maximum takes two inputs and finds element wise maximum
1313
self.output = np.maximum(0, inputs)
@@ -39,7 +39,7 @@ class Activation_Softmax:
3939
'''Softmax activation'''
4040

4141
# Forward Pass
42-
def forward(self, inputs):
42+
def forward(self, inputs, training):
4343
'''Activation_Softmax.forward (input_data)'''
4444

4545
# get unnormalized probabilities
@@ -71,7 +71,7 @@ def __str__(self):
7171
# Sigmoid activation
7272
class Activation_Sigmoid:
7373
# Forward Pass
74-
def forward(self, inputs):
74+
def forward(self, inputs, training):
7575
# Save input and calcuilate/save output of the sigmoid function
7676
self.input = inputs
7777
self.output = 1 / (1 + np.exp(-inputs))
@@ -93,7 +93,7 @@ def __str__(self):
9393
class Activation_Linear:
9494

9595
# Forward pass
96-
def forward(self, inputs):
96+
def forward(self, inputs, training):
9797
# All you need to do is just cache the values
9898
self.input = inputs
9999
self.output = inputs

TinyFlow/Layers.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def __init__(self, inputs, neurons, weight_regularizer_l1=0, weight_regularizer_
1919
self.bias_regularizer_l2 = bias_regulariser_l2
2020

2121
# Forward Pass
22-
def forward(self, inputs):
22+
def forward(self, inputs, training):
2323
'''Layer_Dense.forward (input_data)'''
2424

2525
# Calculate the output values from inputs, weights, and biases
@@ -78,10 +78,16 @@ def __init__(self, rate):
7878
self.rate = 1 - rate
7979

8080
# Forward pass
81-
def forward(self, values):
81+
def forward(self, values, training=True):
8282
# save input values
8383
self.input = values
8484

85+
# If not in training mode - return values
86+
if not training:
87+
self.output = values.copy()
88+
return
89+
90+
# Generate and save scaled mask
8591
self.binary_mask = np.random.binomial(
8692
1, self.rate, size=values.shape) / self.rate
8793

TinyFlow/Loss.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def remember_trainable_layers(self, trainable_layers):
8686
# Calculates the data and regularization losses
8787
# given model output and ground truth values
8888

89-
def calculate(self, output, y):
89+
def calculate(self, output, y, *, include_regularization=False):
9090
'''calculate(self, output, ground_truth)\n
9191
internal method for Network wrapper\n
9292
Calculates the data and regularization losses
@@ -99,6 +99,10 @@ def calculate(self, output, y):
9999
# Calculate the mean loss
100100
data_loss = np.mean(sample_losses)
101101

102+
# If just data loss is needed, return it
103+
if not include_regularization:
104+
return data_loss
105+
102106
# Return the data and regularization losses
103107
return data_loss, self.network_regularization_loss()
104108

TinyFlow/Metrics.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,27 @@ def compare(self, predictions, y):
3838
return np.absolute(predictions - y) < self.precision
3939

4040

41+
class Accuracy_Categorical(Accuracy):
42+
43+
# No initalization is needed
44+
def init(self, y):
45+
pass
46+
47+
# Compares predictions to the ground truth values
48+
def compare(self, predictions, y):
49+
return predictions == y
50+
51+
52+
# class Accuracy_BinaryLogisticRegression(Accuracy):
53+
54+
# # No initialization is needed
55+
# def init(self, y):
56+
# pass
57+
58+
# def compare(self, predictions, y):
59+
# return predictions == y
60+
61+
4162
def model_accuracy_softmax(outputs, labels):
4263
'''Returns the accuracy of the model on the current batch'''
4364

TinyFlow/Network.py

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
######################
2-
## UNDER DEVELOPMENT##
3-
######################
4-
51
# Network class
62
class Network:
73

@@ -65,12 +61,12 @@ def forward(self, X, training):
6561
# Call the forward method on the input layer
6662
# This will set the output property that the first layer
6763
# is expecting as the 'prev' object
68-
self.input_layer.forward(X)
64+
self.input_layer.forward(X, training)
6965

7066
# Call the forward methid of every object in sequence
7167
# while passing the output property of the previous object as a parameter
7268
for layer in self.layers:
73-
layer.forward(layer.prev.output)
69+
layer.forward(layer.prev.output, training)
7470

7571
# 'layer' is now the last object from the list, return its output
7672
return layer.output
@@ -99,7 +95,7 @@ def train(self, X, y, *, epochs=1, print_every=1, validation_data=None):
9995
output = self.forward(X, training=True)
10096

10197
# Calculate loss
102-
data_loss, regularization_loss = self.loss.calculate(output, y)
98+
data_loss, regularization_loss = self.loss.calculate(output, y, include_regularization=True)
10399

104100
loss = data_loss + regularization_loss
105101

@@ -118,11 +114,29 @@ def train(self, X, y, *, epochs=1, print_every=1, validation_data=None):
118114

119115
# Print a summary
120116
if not epoch % print_every:
121-
print(f'epoch: {epoch}, acc: {accuracy:.3f}, loss: {loss:.3f} (data_loss: {data_loss:.3f}, reg_loss: {regularization_loss:.3f}), lr: {self.optimizer.current_learning_rate}')
117+
print(f'\nepoch: {epoch}\nacc: {accuracy:.3f}, loss: {loss:.3f} (data_loss: {data_loss:.3f}, reg_loss: {regularization_loss:.3f}), lr: {self.optimizer.current_learning_rate}')
118+
119+
# If validation data has been provided
120+
if validation_data is not None:
121+
122+
# For better readability
123+
X_val, y_val = validation_data
124+
125+
# Perform forward pass
126+
output = self.forward(X_val, training=False)
127+
128+
# Calculate the loss
129+
loss = self.loss.calculate(output, y_val)
130+
131+
# Get predictions and calculate validation accuracy
132+
predictions = self.output_layer_activation.predictions(output)
133+
accuracy = self.accuracy.calculate(predictions, y_val)
122134

135+
# Print a sumarry
136+
print(f'\nvalidation,\nacc: {accuracy:.3f}, loss: {loss:.3f}')
123137

124138
class Layer_Input:
125139

126140
# Pass the input
127-
def forward(self, inputs):
141+
def forward(self, inputs, training):
128142
self.output = inputs

TinyFlow/Network_DEPRECATED.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
############################################################################
2+
#***************************************************************************
3+
# THIS VERSION OF THE NETWORK WRAPPER HAS BEEN DEPRECATED
4+
# It is HIGHLY RECOMMENDED to use the more stable and flexible Network class
5+
#***************************************************************************
6+
############################################################################
7+
18
import numpy as np
29
from TinyFlow import Layers
310
from TinyFlow import Activations

demo_Network_DEPRECATED.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,10 @@
1+
############################################################################
2+
#***************************************************************************
3+
# THIS FILE USES A DEPRECATED VERSION OF THE NETWORK WRAPPER
4+
# It is HIGHLY RECOMMENDED to use the more stable and flexible Network class
5+
#***************************************************************************
6+
############################################################################
7+
18
from TinyFlow.Network_DEPRECATED import Network
29
from TinyFlow.Loss import Loss_CategoricalCrossEntropy
310
from TinyFlow.Optimizers import Optimizer_Adam
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# Demo for Network wrapper to implement Binary Logistic Regression
2+
import numpy as np
3+
from TinyFlow.Network import Network
4+
from TinyFlow.Datasets import spiral_data
5+
from TinyFlow.Layers import Layer_Dense
6+
from TinyFlow.Activations import Activation_Sigmoid, Activation_ReLU
7+
from TinyFlow.Optimizers import Optimizer_Adam
8+
from TinyFlow.Loss import Loss_BinaryCrossEntropy
9+
from TinyFlow.Metrics import Accuracy_Categorical
10+
11+
# Create train and test set
12+
X, y = spiral_data(100, 2)
13+
X_test, y_test = spiral_data(100, 2)
14+
15+
# Reshape labels to be a list of lists
16+
# Inner list contains one output (either 0 or 1)
17+
# per each output neuron, 1 in this case
18+
y = y.reshape(-1, 1)
19+
y_test = y_test.reshape(-1, 1)
20+
21+
# Instantiate the model
22+
model = Network()
23+
24+
# Add layers
25+
model.add(Layer_Dense(2, 64, weight_regularizer_l2=5e-4, bias_regulariser_l2=5e-4))
26+
model.add(Activation_ReLU())
27+
model.add(Layer_Dense(64, 1))
28+
model.add(Activation_Sigmoid())
29+
30+
# Set loss, optimizer, and accuracy
31+
model.set(loss=Loss_BinaryCrossEntropy(), optimizer=Optimizer_Adam(
32+
decay=1e-8), accuracy=Accuracy_Categorical())
33+
34+
# Compile the model
35+
model.compile_model()
36+
37+
# Train the model
38+
model.train(X, y, epochs=10000, print_every=100,
39+
validation_data=(X_test, y_test))

demo_Network_categorical.py

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
# Demo for Network wrapper to implement Multiclass Classification
2+
import numpy as np
3+
from TinyFlow.Network import Network
4+
from TinyFlow.Datasets import spiral_data
5+
from TinyFlow.Layers import Layer_Dense, Layer_Dropout
6+
from TinyFlow.Activations import Activation_Softmax, Activation_ReLU
7+
from TinyFlow.Optimizers import Optimizer_Adam
8+
from TinyFlow.Loss import Loss_CategoricalCrossEntropy
9+
from TinyFlow.Metrics import Accuracy_Categorical
10+
11+
# Create dataset
12+
X, y = spiral_data(1000, 3)
13+
X_test, y_test = spiral_data(100, 3)
14+
15+
# Instantiate the model
16+
model = Network()
17+
18+
# Add layers
19+
model.add(Layer_Dense(2, 512, weight_regularizer_l2=5e-4, bias_regulariser_l2=5e-4))
20+
model.add(Activation_ReLU())
21+
model.add(Layer_Dropout(0.1))
22+
model.add(Layer_Dense(512, 3))
23+
model.add(Activation_Softmax())
24+
25+
# Set loss, optimizer, and accuracy objects
26+
model.set(loss=Loss_CategoricalCrossEntropy(), optimizer=Optimizer_Adam(
27+
learning_rate=0.05, decay=1e-8), accuracy=Accuracy_Categorical())
28+
29+
# Compile the model
30+
model.compile_model()
31+
32+
# Train the model
33+
model.train(X, y, epochs=10000, print_every=100, validation_data=(X_test, y_test))

0 commit comments

Comments
 (0)