00001
00002
00003
00004
00005 #ifndef l2_h
00006 #define l2_h
00007
00008
00009
00010 #include <math.h>
00011 #include <qp.h>
00012
00013
00014
00015 namespace l2
00016 {
00017
00018
00019
00020
00021 using qp::real;
00022 using qp::vector;
00023 using qp::max_real;
00024 using qp::min_real;
00025 using qp::set;
00026 using qp::neuron;
00027 using qp::layer;
00028
00029
00030
00031
00032
00033 real error(const mlp::net& net, const set& ts)
00034 {
00035 real error_ = 0.0;
00036
00037 for (set::const_iterator s = ts.begin(); s != ts.end(); ++s)
00038 {
00039 vector out = net(s->input);
00040
00041 for (unsigned i = 0; i < out.size(); ++i)
00042 {
00043 real target = s->output[i];
00044 real value = out[i];
00045 error_ -= target * log(value + min_real) +
00046 (1.0 - target) * log(1.0 - value + min_real);
00047 }
00048 }
00049
00050 return error_;
00051 }
00052
00053
00054
00055
00056
00057 class net: public qp::net
00058 {
00059 public:
00060 net(mlp::net& n): qp::net(n) {}
00061
00062 real error(const set& ts)
00063 {
00064 real error_ = 0;
00065
00066 for (set::const_iterator s = ts.begin(); s != ts.end(); ++s)
00067 {
00068 forward(s->input);
00069 error_ -= backward(s->input, s->output);
00070 }
00071
00072 return error_;
00073 }
00074
00075 private:
00076 real backward(const vector& input, const vector& output)
00077 {
00078 reverse_iterator current_layer = rbegin();
00079 reverse_iterator backward_layer = current_layer + 1;
00080 real error_ = 0;
00081
00082
00083 for (unsigned j = 0; j < current_layer->size(); ++j)
00084 {
00085 neuron& n = (*current_layer)[j];
00086 real out = output[j];
00087 n.ndelta += n.delta = (out - n.out) /
00088 (n.out * (1.0 - n.out) + min_real) * n.out * (1.0 - n.out);
00089
00090 if (size() == 1)
00091 n.dxo += n.delta * input;
00092 else
00093 for (unsigned k = 0; k < n.dxo.size(); ++k)
00094 n.dxo[k] += n.delta * (*backward_layer)[k].out;
00095
00096 error_ += out * log(n.out + min_real) +
00097 (1.0 - out) * log(1.0 - n.out + min_real);
00098 }
00099
00100
00101 while (++current_layer != rend())
00102 {
00103 reverse_iterator forward_layer = current_layer - 1;
00104 reverse_iterator backward_layer = current_layer + 1;
00105
00106 for (unsigned j = 0; j < current_layer->size(); ++j)
00107 {
00108 neuron& n = (*current_layer)[j];
00109 real sum = 0;
00110 for (unsigned k = 0; k < forward_layer->size(); ++k)
00111 {
00112 neuron& nf = (*forward_layer)[k];
00113 sum += nf.delta * (nf.n->weight[j] + nf.dweight1[j]);
00114 }
00115 n.delta = n.out * (1.0 - n.out) * sum;
00116 n.ndelta += n.delta;
00117
00118 if (backward_layer == rend())
00119 n.dxo += n.delta * input;
00120 else
00121 for (unsigned k = 0; k < n.dxo.size(); ++k)
00122 n.dxo[k] += n.delta * (*backward_layer)[k].out;
00123 }
00124 }
00125
00126 return error_;
00127 }
00128 };
00129
00130
00131
00132 }
00133
00134
00135
00136 #endif // l2_h
00137
00138
00139
00140