-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNeuralNetwork.cpp
155 lines (122 loc) · 4.01 KB
/
NeuralNetwork.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
#include "NeuralNetwork.h"
#include <iostream>
#include <fstream>
#include <cassert>
NeuralNetwork::NeuralNetwork(const std::vector<int>& topology) : RMS(0.0)
{
//topology's size is the number of layer
for (int layer_index = 0; layer_index < topology.size(); ++layer_index)
{
//add a new layer
this->layers.push_back(Layer());
//output layer has no connections
int output_num = (layer_index == topology.size() - 1 ? 0 : topology[layer_index + 1]);
//populate the neurons, + 1 for the bias neuron
for (int neuron_index = 0; neuron_index < topology[layer_index] + 1; ++neuron_index)
{
this->layers.back().push_back(Neuron(neuron_index, output_num));
}
//set bias neuron's value to 1
this->layers.back().back().SetValue(1.0);
}
}
void NeuralNetwork::ForwardPropagate(const std::vector<double>& features)
{
Layer& input_layer = this->layers.front();
for (int neuron_index = 0; neuron_index < features.size(); ++neuron_index)
{
input_layer[neuron_index].SetValue(features[neuron_index]);
}
//forward propagate
for (int layer_index = 1; layer_index < this->layers.size(); ++layer_index)
{
Layer& curr_layer = this->layers[layer_index];
const Layer& previous_layer = this->layers[layer_index - 1];
//feedforward all non-bias neurons
for (int neuron_index = 0; neuron_index < curr_layer.size() - 1; ++neuron_index)
{
curr_layer[neuron_index].FeedForwardFrom(previous_layer);
}
}
}
void NeuralNetwork::BackPropagate(const std::vector<double>& labled_examples)
{
//calculate the mean of square loss
Layer& output_layer = this->layers.back();
//square root of mean error
this->RMS = 0.0;
//- 1 to skip the bias neuron
for (int neuron_index = 0; neuron_index < output_layer.size() - 1; ++neuron_index)
{
double loss = labled_examples[neuron_index] - output_layer[neuron_index].GetValue();
this->RMS += loss * loss;
}
this->RMS /= double(output_layer.size() - 1);
this->RMS = sqrt(RMS);
//calculate the output layer gradient
for (int neuron_index = 0; neuron_index < output_layer.size() - 1; ++neuron_index)
{
output_layer[neuron_index].UpdateOutputLayerGradient(labled_examples[neuron_index]);
}
//calculate the hidden layer gradient
for (int layer_index = this->layers.size() - 2; layer_index > 0; --layer_index)
{
Layer& curr_layer = this->layers[layer_index];
const Layer& next_layer = this->layers[layer_index + 1];
for (Neuron& curr_neuron : curr_layer)
{
curr_neuron.UpdateHiddenLayerGradient(next_layer);
}
}
//update the weight
for (int layer_index = this->layers.size() - 1; layer_index > 0; --layer_index)
{
Layer& curr_layer = this->layers[layer_index];
Layer& prev_layer = this->layers[layer_index - 1];
for (int neuron_index = 0; neuron_index < curr_layer.size() - 1; ++neuron_index)
{
curr_layer[neuron_index].UpdateLayerWeight(prev_layer);
}
}
}
std::vector<double> NeuralNetwork::GetResult() const
{
const Layer& output_layer = this->layers.back();
std::vector<double> results(output_layer.size() - 1);
//-1 to skip the bias neuron
for (int neuron_index = 0; neuron_index < output_layer.size() - 1; ++neuron_index)
{
results[neuron_index] = output_layer[neuron_index].GetValue();
}
return results;
}
double NeuralNetwork::GetRMS() const
{
return this->RMS;
}
void NeuralNetwork::SaveNeuralNetwork(const std::string& file_name) const
{
std::ofstream output(file_name, std::ofstream::trunc | std::ofstream::binary);
for (const Layer& layer : this->layers)
{
for (const Neuron& neuron : layer)
{
output.write(reinterpret_cast<const char*>(neuron.output_weights.data()), neuron.output_weights.size() * sizeof(Connection));
}
}
output.close();
}
bool NeuralNetwork::LoadNeuralNetwork(const std::string& file_name)
{
std::ifstream input(file_name, std::ifstream::binary);
if (!input.is_open()) return false;
for (Layer& layer : this->layers)
{
for (Neuron& neuron : layer)
{
input.read(reinterpret_cast<char*>(neuron.output_weights.data()), neuron.output_weights.size() * sizeof(Connection));
}
}
input.close();
return true;
}