00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <iostream>
00019 #include <fstream>
00020 #include <cmath>
00021 #include <sstream>
00022 #include <string>
00023 #include <limits>
00024
00025
00026
00027 #include "neural_parameters_norm.h"
00028
00029
00030
00031 #include "../../parsers/tinyxml/tinyxml.h"
00032
00033
00034 namespace OpenNN
00035 {
00036
00037
00038
00042
00043 NeuralParametersNorm::NeuralParametersNorm(void)
00044 : PerformanceTerm()
00045 {
00046 set_default();
00047 }
00048
00049
00050
00051
00056
00057 NeuralParametersNorm::NeuralParametersNorm(NeuralNetwork* new_neural_network_pointer)
00058 : PerformanceTerm(new_neural_network_pointer)
00059 {
00060 set_default();
00061 }
00062
00063
00064
00065
00071
00072 NeuralParametersNorm::NeuralParametersNorm(TiXmlElement* neural_parameters_norm_element)
00073 : PerformanceTerm()
00074 {
00075 set_default();
00076
00077 from_XML(neural_parameters_norm_element);
00078 }
00079
00080
00081
00082
00085
00086 NeuralParametersNorm::~NeuralParametersNorm(void)
00087 {
00088 }
00089
00090
00091
00092
00093
00094
00096
00097 const double& NeuralParametersNorm::get_neural_parameters_norm_weight(void) const
00098 {
00099 return(neural_parameters_norm_weight);
00100 }
00101
00102
00103
00104
00106
00107 void NeuralParametersNorm::set_neural_parameters_norm_weight(const double& new_neural_parameters_norm_weight)
00108 {
00109 neural_parameters_norm_weight = new_neural_parameters_norm_weight;
00110 }
00111
00112
00113
00114
00120
00121 void NeuralParametersNorm::set_default(void)
00122 {
00123 neural_parameters_norm_weight = 0.01;
00124
00125 display = true;
00126 }
00127
00128
00129
00130
00134
00135 void NeuralParametersNorm::check(void) const
00136 {
00137 std::ostringstream buffer;
00138
00139
00140
00141 if(!neural_network_pointer)
00142 {
00143 buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
00144 << "void check(void) const method.\n"
00145 << "Pointer to neural network is NULL.\n";
00146
00147 throw std::logic_error(buffer.str().c_str());
00148 }
00149
00150 const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
00151
00152 if(!multilayer_perceptron_pointer)
00153 {
00154 buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
00155 << "void check(void) const method.\n"
00156 << "Pointer to multilayer perceptron is NULL.\n";
00157
00158 throw std::logic_error(buffer.str().c_str());
00159 }
00160
00161 const unsigned int inputs_number = multilayer_perceptron_pointer->count_inputs_number();
00162 const unsigned int outputs_number = multilayer_perceptron_pointer->count_outputs_number();
00163
00164 if(inputs_number == 0)
00165 {
00166 buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
00167 << "void check(void) const method.\n"
00168 << "Number of inputs in multilayer perceptron object is zero.\n";
00169
00170 throw std::logic_error(buffer.str().c_str());
00171 }
00172
00173 if(outputs_number == 0)
00174 {
00175 buffer << "OpenNN Exception: NeuralParametersNorm class.\n"
00176 << "void check(void) const method.\n"
00177 << "Number of outputs in multilayer perceptron object is zero.\n";
00178
00179 throw std::logic_error(buffer.str().c_str());
00180 }
00181
00182 }
00183
00184
00185
00186
00189
00190 double NeuralParametersNorm::calculate_evaluation(void) const
00191 {
00192 #ifdef _DEBUG
00193
00194 check();
00195
00196 #endif
00197
00198 const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
00199
00200 const Vector<double> multilayer_perceptron_parameters = multilayer_perceptron_pointer->arrange_parameters();
00201
00202 return(multilayer_perceptron_parameters.calculate_norm());
00203 }
00204
00205
00206
00207
00211
00212 double NeuralParametersNorm::calculate_evaluation(const Vector<double>& parameters) const
00213 {
00214
00215
00216 #ifdef _DEBUG
00217
00218 check();
00219
00220 #endif
00221
00222 return(parameters.calculate_norm());
00223 }
00224
00225
00226
00227
00230
00231 Vector<double> NeuralParametersNorm::calculate_gradient(void) const
00232 {
00233
00234
00235 #ifdef _DEBUG
00236
00237 check();
00238
00239 #endif
00240
00241 const Vector<double> parameters = neural_network_pointer->arrange_parameters();
00242
00243 const double parameters_norm = parameters.calculate_norm();
00244
00245 return(parameters/parameters_norm);
00246 }
00247
00248
00249
00250
00253
00254 Matrix<double> NeuralParametersNorm::calculate_Hessian(void) const
00255 {
00256
00257
00258 #ifdef _DEBUG
00259
00260 check();
00261
00262 #endif
00263
00264 const Vector<double> parameters = neural_network_pointer->arrange_parameters();
00265
00266
00267
00268 const double parameters_norm = parameters.calculate_norm();
00269
00270 const Matrix<double> Hessian = parameters.direct(parameters)/pow(parameters_norm, 3);
00271
00272 return(Hessian);
00273 }
00274
00275
00276
00277
00279
00280 std::string NeuralParametersNorm::write_performance_term_type(void) const
00281 {
00282 return("NEURAL_PARAMETERS_NORM");
00283 }
00284
00285
00286
00287
00288 std::string NeuralParametersNorm::write_information(void) const
00289 {
00290 std::ostringstream buffer;
00291
00292 buffer << "Neural parameters norm: " << calculate_evaluation() << "\n";
00293
00294 return(buffer.str());
00295 }
00296
00297
00298
00299
00301
00302 TiXmlElement* NeuralParametersNorm::to_XML(void) const
00303 {
00304 std::ostringstream buffer;
00305
00306
00307
00308 TiXmlElement* neural_network_parameters_norm_element = new TiXmlElement("NeuralParametersNorm");
00309 neural_network_parameters_norm_element->SetAttribute("Version", 4);
00310
00311
00312 {
00313 TiXmlElement* weight_element = new TiXmlElement("NeuralParametersNormWeight");
00314 neural_network_parameters_norm_element->LinkEndChild(weight_element);
00315
00316 buffer.str("");
00317 buffer << neural_parameters_norm_weight;
00318
00319 TiXmlText* weight_text = new TiXmlText(buffer.str().c_str());
00320 weight_element->LinkEndChild(weight_text);
00321 }
00322
00323
00324
00325 {
00326 TiXmlElement* display_element = new TiXmlElement("Display");
00327 neural_network_parameters_norm_element->LinkEndChild(display_element);
00328
00329 buffer.str("");
00330 buffer << display;
00331
00332 TiXmlText* display_text = new TiXmlText(buffer.str().c_str());
00333 display_element->LinkEndChild(display_text);
00334 }
00335
00336 return(neural_network_parameters_norm_element);
00337 }
00338
00339
00340
00341
00343
00344
00345 void NeuralParametersNorm::from_XML(TiXmlElement* neural_parameters_norm_element)
00346 {
00347 if(neural_parameters_norm_element)
00348 {
00349
00350 {
00351 TiXmlElement* element = neural_parameters_norm_element->FirstChildElement("NeuralParametersNormWeight");
00352
00353 if(element)
00354 {
00355 try
00356 {
00357 const double new_neural_parameters_norm_weight = atof(element->GetText());
00358
00359 set_neural_parameters_norm_weight(new_neural_parameters_norm_weight);
00360 }
00361 catch(std::exception& e)
00362 {
00363 std::cout << e.what() << std::endl;
00364 }
00365 }
00366 }
00367
00368
00369 {
00370 TiXmlElement* element = neural_parameters_norm_element->FirstChildElement("Display");
00371
00372 if(element)
00373 {
00374 try
00375 {
00376 std::string new_display_string = element->GetText();
00377
00378 set_display(new_display_string != "0");
00379 }
00380 catch(std::exception& e)
00381 {
00382 std::cout << e.what() << std::endl;
00383 }
00384 }
00385 }
00386 }
00387 }
00388
00389 }
00390
00391
00392
00393
00394
00395
00396
00397
00398
00399
00400
00401
00402
00403
00404
00405
00406