-
Notifications
You must be signed in to change notification settings - Fork 0
/
ffn.cu
110 lines (94 loc) · 2.22 KB
/
ffn.cu
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
#pragma once
#include <vector>
#include "layers/layer.hpp"
#include "layers/cross_entropy.cu"
#include "utils.cu"
/**
* Implementation of a standard feed forward network.
*/
class FFN
{
public:
// Constructor.
FFN()
{
/* Nothing to do here. */
}
Matrix Forward(Matrix input, bool CPU)
{
Matrix layerOutput = input;
if (CPU)
{
for (int i = 0; i < network.size(); i++)
{
layerOutput = network[i]->ForwardCPU(layerOutput);
}
}
else
{
for (int i = 0; i < network.size(); i++)
{
layerOutput = network[i]->Forward(layerOutput);
}
}
output = layerOutput;
return output;
}
void Backward(Matrix output, Matrix labels, float lr, bool CPU)
{
dOutput = crossEntropy.Backward(output, labels);
if (CPU)
{
for (int i = network.size() - 1; i >= 0; i--)
{
dOutput = network[i]->BackwardCPU(dOutput, lr);
}
}
else
{
for (int i = network.size() - 1; i >= 0; i--)
{
dOutput = network[i]->Backward(dOutput, lr);
}
}
}
void Train(Dataset dataset, float lr, int nEpochs, int nBatches, bool CPU)
{
for (int epoch = 0; epoch < 10; epoch++)
{
float accuracy;
float loss;
Matrix output;
for (int batch = 0; batch < nBatches - 1; batch++)
{
output = this->Forward(dataset.DataBatches().at(batch), CPU);
this->Backward(output, dataset.LabelBatches().at(batch), lr, CPU);
}
output = this->Forward(dataset.DataBatches().at(nBatches - 1), CPU);
output.CopyDeviceToHost();
loss = crossEntropy.Forward(output,
dataset.LabelBatches().at(nBatches - 1));
accuracy = Accuracy(output, dataset.LabelBatches().at(nBatches - 1));
std::cout << "Epoch: " << epoch << std::endl;
std::cout << "Test Loss: " << loss << std::endl;
std::cout << "Test Accuracy: " << accuracy << std::endl;
}
}
void Add(Layer* layer)
{
this->network.push_back(layer);
}
// Destructor to release allocated memory.
~FFN()
{
for (auto layer : network)
{
delete layer;
}
}
private:
Matrix output;
Matrix dOutput;
CrossEntropy crossEntropy;
std::vector<Layer*> network;
};