-
Notifications
You must be signed in to change notification settings - Fork 1
/
MLP_Network.cpp
79 lines (63 loc) · 2.32 KB
/
MLP_Network.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#include "MLP_Network.h"
void MLP_Network::Allocate(int nInputUnit, int nHiddenUnit, int nOutputUnit, int nHiddenLayer,
int nTrainingSet)
{
this->nTrainingSet = nTrainingSet;
this->nInputUnit = nInputUnit;
this->nHiddenUnit = nHiddenUnit;
this->nOutputUnit = nOutputUnit;
this->nHiddenLayer = nHiddenLayer;
layerNetwork = new MLP_Layer[nHiddenLayer+1]();
layerNetwork[0].Allocate(nInputUnit, nHiddenUnit);
for (int i = 1; i < nHiddenLayer; i++)
{
layerNetwork[i].Allocate(nHiddenUnit, nHiddenUnit);
}
layerNetwork[nHiddenLayer].Allocate(nHiddenUnit, nOutputUnit);
}
void MLP_Network::Delete()
{
for (int i = 0; i < nHiddenLayer+1; i++)
{
layerNetwork[i].Delete();
}
}
void MLP_Network::ForwardPropagateNetwork(float* inputNetwork)
{
float* outputOfHiddenLayer=NULL;
outputOfHiddenLayer=layerNetwork[0].ForwardPropagate(inputNetwork);
for (int i=1; i < nHiddenLayer ; i++)
{
outputOfHiddenLayer=layerNetwork[i].ForwardPropagate(outputOfHiddenLayer); //hidden forward
}
layerNetwork[nHiddenLayer].ForwardPropagate(outputOfHiddenLayer); // output forward
}
void MLP_Network::BackwardPropagateNetwork(float* desiredOutput)
{
layerNetwork[nHiddenLayer].BackwardPropagateOutputLayer(desiredOutput); // back_propa_output
for (int i= nHiddenLayer-1; i >= 0 ; i--)
layerNetwork[i].BackwardPropagateHiddenLayer(&layerNetwork[i+1]); // back_propa_hidden
}
void MLP_Network::UpdateWeight(float learningRate)
{
for (int i = 0; i < nHiddenLayer; i++)
layerNetwork[i].UpdateWeight(learningRate);
layerNetwork[nHiddenLayer].UpdateWeight(learningRate);
}
float MLP_Network::CostFunction(float* inputNetwork, float* desiredOutput)
{
float *outputNetwork = layerNetwork[nHiddenLayer].GetOutput();
float err=0.F;
for (int j = 0; j < nOutputUnit; ++j)
err += (desiredOutput[j] - outputNetwork[j])*(desiredOutput[j] - outputNetwork[j]);
err /= 2;
return err;
}
float MLP_Network::CalculateResult(float* inputNetwork,float* desiredOutput)
{
int maxIdx = 0;
maxIdx = layerNetwork[nHiddenLayer].GetMaxOutputIndex();
if(desiredOutput[maxIdx] == 1.0f)
return 1;
return 0;
}