00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <string>
00019 #include <sstream>
00020 #include <iostream>
00021 #include <fstream>
00022 #include <algorithm>
00023 #include <functional>
00024 #include <limits>
00025 #include <math.h>
00026 #include <time.h>
00027
00028
00029
00030 #include "levenberg_marquardt_algorithm.h"
00031
00032 #include "../data_set/data_set.h"
00033
00034
00035 namespace OpenNN
00036 {
00037
00038
00039
00043
00044 LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithm(void)
00045 : TrainingAlgorithm()
00046 {
00047 set_default();
00048 }
00049
00050
00051
00052
00057
00058 LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithm(PerformanceFunctional* new_performance_functional_pointer)
00059 : TrainingAlgorithm(new_performance_functional_pointer)
00060 {
00061 set_default();
00062 }
00063
00064
00065
00066
00070
00071 LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithm(TiXmlElement* Levenberg_Marquardt_algorithm_element)
00072 : TrainingAlgorithm(Levenberg_Marquardt_algorithm_element)
00073 {
00074 set_default();
00075
00076 from_XML(Levenberg_Marquardt_algorithm_element);
00077 }
00078
00079
00080
00081
00084
00085 LevenbergMarquardtAlgorithm::~LevenbergMarquardtAlgorithm(void)
00086 {
00087 }
00088
00089
00090
00091
00094
00095 const double& LevenbergMarquardtAlgorithm::get_warning_parameters_norm(void) const
00096 {
00097 return(warning_parameters_norm);
00098 }
00099
00100
00101
00102
00105
00106 const double& LevenbergMarquardtAlgorithm::get_warning_gradient_norm(void) const
00107 {
00108 return(warning_gradient_norm);
00109 }
00110
00111
00112
00113
00116
00117 const double& LevenbergMarquardtAlgorithm::get_error_parameters_norm(void) const
00118 {
00119 return(error_parameters_norm);
00120 }
00121
00122
00123
00124
00127
00128 const double& LevenbergMarquardtAlgorithm::get_error_gradient_norm(void) const
00129 {
00130 return(error_gradient_norm);
00131 }
00132
00133
00134
00135
00137
00138 const double& LevenbergMarquardtAlgorithm::get_minimum_parameters_increment_norm(void) const
00139 {
00140 return(minimum_parameters_increment_norm);
00141 }
00142
00143
00144
00145
00147
00148 const double& LevenbergMarquardtAlgorithm::get_minimum_performance_increase(void) const
00149 {
00150 return(minimum_performance_increase);
00151 }
00152
00153
00154
00155
00158
00159 const double& LevenbergMarquardtAlgorithm::get_performance_goal(void) const
00160 {
00161 return(performance_goal);
00162 }
00163
00164
00165
00166
00169
00170 const double& LevenbergMarquardtAlgorithm::get_gradient_norm_goal(void) const
00171 {
00172 return(gradient_norm_goal);
00173 }
00174
00175
00176
00177
00179
00180 const unsigned int& LevenbergMarquardtAlgorithm::get_maximum_generalization_evaluation_decreases(void) const
00181 {
00182 return(maximum_generalization_evaluation_decreases);
00183 }
00184
00185
00186
00187
00189
00190 const unsigned int& LevenbergMarquardtAlgorithm::get_maximum_epochs_number(void) const
00191 {
00192 return(maximum_epochs_number);
00193 }
00194
00195
00196
00197
00199
00200 const double& LevenbergMarquardtAlgorithm::get_maximum_time(void) const
00201 {
00202 return(maximum_time);
00203 }
00204
00205
00206
00207
00209
00210 const bool& LevenbergMarquardtAlgorithm::get_reserve_parameters_history(void) const
00211 {
00212 return(reserve_parameters_history);
00213 }
00214
00215
00216
00217
00219
00220 const bool& LevenbergMarquardtAlgorithm::get_reserve_parameters_norm_history(void) const
00221 {
00222 return(reserve_parameters_norm_history);
00223 }
00224
00225
00226
00227
00229
00230 const bool& LevenbergMarquardtAlgorithm::get_reserve_evaluation_history(void) const
00231 {
00232 return(reserve_evaluation_history);
00233 }
00234
00235
00236
00237
00239
00240 const bool& LevenbergMarquardtAlgorithm::get_reserve_gradient_history(void) const
00241 {
00242 return(reserve_gradient_history);
00243 }
00244
00245
00246
00247
00249
00250 const bool& LevenbergMarquardtAlgorithm::get_reserve_gradient_norm_history(void) const
00251 {
00252 return(reserve_gradient_norm_history);
00253 }
00254
00255
00256
00257
00259
00260 const bool& LevenbergMarquardtAlgorithm::get_reserve_inverse_Hessian_history(void) const
00261 {
00262 return(reserve_inverse_Hessian_history);
00263 }
00264
00265
00266
00267
00269
00270 const bool& LevenbergMarquardtAlgorithm::get_reserve_elapsed_time_history(void) const
00271 {
00272 return(reserve_elapsed_time_history);
00273 }
00274
00275
00276
00277
00279
00280 const bool& LevenbergMarquardtAlgorithm::get_reserve_generalization_evaluation_history(void) const
00281 {
00282 return(reserve_generalization_evaluation_history);
00283 }
00284
00285
00286
00287
00289
00290 const unsigned int& LevenbergMarquardtAlgorithm::get_display_period(void) const
00291 {
00292 return(display_period);
00293 }
00294
00295
00296
00297
00299
00300 const double& LevenbergMarquardtAlgorithm::get_damping_parameter(void) const
00301 {
00302 return(damping_parameter);
00303 }
00304
00305
00306
00307
00309
00310 const double& LevenbergMarquardtAlgorithm::get_damping_parameter_factor(void) const
00311 {
00312 return(damping_parameter_factor);
00313 }
00314
00315
00316
00317
00319
00320 const double& LevenbergMarquardtAlgorithm::get_minimum_damping_parameter(void) const
00321 {
00322 return(minimum_damping_parameter);
00323 }
00324
00325
00326
00327
00329
00330 const double& LevenbergMarquardtAlgorithm::get_maximum_damping_parameter(void) const
00331 {
00332 return(maximum_damping_parameter);
00333 }
00334
00335
00336
00337
00339
00340 const bool& LevenbergMarquardtAlgorithm::get_reserve_damping_parameter_history(void) const
00341 {
00342 return(reserve_damping_parameter_history);
00343 }
00344
00345
00346
00347
00349
00350 const Vector<double>& LevenbergMarquardtAlgorithm::get_damping_parameter_history(void) const
00351 {
00352 return(damping_parameter_history);
00353 }
00354
00355
00356
00357
00374
00375 void LevenbergMarquardtAlgorithm::set_default(void)
00376 {
00377
00378
00379 warning_parameters_norm = 1.0e6;
00380 warning_gradient_norm = 1.0e6;
00381
00382 error_parameters_norm = 1.0e9;
00383 error_gradient_norm = 1.0e9;
00384
00385
00386
00387 minimum_parameters_increment_norm = 0.0;
00388
00389 minimum_performance_increase = 0.0;
00390 performance_goal = -1.0e99;
00391 gradient_norm_goal = 0.0;
00392 maximum_generalization_evaluation_decreases = 1000000;
00393
00394 maximum_epochs_number = 1000;
00395 maximum_time = 1000.0;
00396
00397
00398
00399 reserve_parameters_history = false;
00400 reserve_parameters_norm_history = false;
00401
00402 reserve_evaluation_history = true;
00403 reserve_gradient_history = false;
00404 reserve_gradient_norm_history = false;
00405 reserve_generalization_evaluation_history = false;
00406
00407 reserve_elapsed_time_history = false;
00408
00409
00410
00411 display = true;
00412 display_period = 100;
00413
00414
00415
00416 damping_parameter = 1.0e-3;
00417
00418 damping_parameter_factor = 10.0;
00419
00420 minimum_damping_parameter = 1.0e-6;
00421 maximum_damping_parameter = 1.0e6;
00422
00423 reserve_damping_parameter_history = false;
00424 }
00425
00426
00427
00428
00431
00432 void LevenbergMarquardtAlgorithm::set_damping_parameter(const double& new_damping_parameter)
00433 {
00434 if(new_damping_parameter <= minimum_damping_parameter)
00435 {
00436 damping_parameter = minimum_damping_parameter;
00437 }
00438 else if(new_damping_parameter >= maximum_damping_parameter)
00439 {
00440 damping_parameter = maximum_damping_parameter;
00441 }
00442 else
00443 {
00444 damping_parameter = new_damping_parameter;
00445 }
00446 }
00447
00448
00449
00450
00453
00454 void LevenbergMarquardtAlgorithm::set_damping_parameter_factor(const double& new_damping_parameter_factor)
00455 {
00456 #ifdef _DEBUG
00457
00458 if(new_damping_parameter_factor <= 0.0)
00459 {
00460 std::ostringstream buffer;
00461
00462 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
00463 << "void set_damping_parameter_factor(const double&) method." << std::endl
00464 << "Damping parameter factor must be greater than zero." << std::endl;
00465
00466 throw std::logic_error(buffer.str().c_str());
00467 }
00468
00469 #endif
00470
00471 damping_parameter_factor = new_damping_parameter_factor;
00472 }
00473
00474
00475
00476
00479
00480 void LevenbergMarquardtAlgorithm::set_minimum_damping_parameter(const double& new_minimum_damping_parameter)
00481 {
00482 #ifdef _DEBUG
00483
00484 if(new_minimum_damping_parameter <= 0.0)
00485 {
00486 std::ostringstream buffer;
00487
00488 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
00489 << "void set_minimum_damping_parameter(const double&) method." << std::endl
00490 << "Minimum damping parameter must be greater than zero." << std::endl;
00491
00492 throw std::logic_error(buffer.str().c_str());
00493 }
00494
00495 #endif
00496
00497 minimum_damping_parameter = new_minimum_damping_parameter;
00498 }
00499
00500
00501
00502
00505
00506 void LevenbergMarquardtAlgorithm::set_maximum_damping_parameter(const double& new_maximum_damping_parameter)
00507 {
00508 #ifdef _DEBUG
00509
00510 if(new_maximum_damping_parameter <= 0.0)
00511 {
00512 std::ostringstream buffer;
00513
00514 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
00515 << "void set_maximum_damping_parameter(const double&) method." << std::endl
00516 << "Maximum damping parameter must be greater than zero." << std::endl;
00517
00518 throw std::logic_error(buffer.str().c_str());
00519 }
00520
00521 #endif
00522
00523 maximum_damping_parameter = new_maximum_damping_parameter;
00524 }
00525
00526
00527
00528
00531
00532 void LevenbergMarquardtAlgorithm::set_reserve_damping_parameter_history(const bool& new_reserve_damping_parameter_history)
00533 {
00534 reserve_damping_parameter_history = new_reserve_damping_parameter_history;
00535 }
00536
00537
00538
00539
00543
00544 void LevenbergMarquardtAlgorithm::set_warning_parameters_norm(const double& new_warning_parameters_norm)
00545 {
00546
00547
00548 #ifdef _DEBUG
00549
00550 if(new_warning_parameters_norm < 0.0)
00551 {
00552 std::ostringstream buffer;
00553
00554 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00555 << "void set_warning_parameters_norm(const double&) method.\n"
00556 << "Warning parameters norm must be equal or greater than 0.\n";
00557
00558 throw std::logic_error(buffer.str().c_str());
00559 }
00560
00561 #endif
00562
00563
00564
00565 warning_parameters_norm = new_warning_parameters_norm;
00566 }
00567
00568
00569
00570
00574
00575 void LevenbergMarquardtAlgorithm::set_warning_gradient_norm(const double& new_warning_gradient_norm)
00576 {
00577
00578
00579 #ifdef _DEBUG
00580
00581 if(new_warning_gradient_norm < 0.0)
00582 {
00583 std::ostringstream buffer;
00584
00585 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00586 << "void set_warning_gradient_norm(const double&) method.\n"
00587 << "Warning gradient norm must be equal or greater than 0.\n";
00588
00589 throw std::logic_error(buffer.str().c_str());
00590 }
00591
00592 #endif
00593
00594
00595
00596 warning_gradient_norm = new_warning_gradient_norm;
00597 }
00598
00599
00600
00601
00605
00606 void LevenbergMarquardtAlgorithm::set_error_parameters_norm(const double& new_error_parameters_norm)
00607 {
00608
00609
00610 #ifdef _DEBUG
00611
00612 if(new_error_parameters_norm < 0.0)
00613 {
00614 std::ostringstream buffer;
00615
00616 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00617 << "void set_error_parameters_norm(const double&) method.\n"
00618 << "Error parameters norm must be equal or greater than 0.\n";
00619
00620 throw std::logic_error(buffer.str().c_str());
00621 }
00622
00623 #endif
00624
00625
00626
00627 error_parameters_norm = new_error_parameters_norm;
00628 }
00629
00630
00631
00632
00636
00637 void LevenbergMarquardtAlgorithm::set_error_gradient_norm(const double& new_error_gradient_norm)
00638 {
00639
00640
00641 #ifdef _DEBUG
00642
00643 if(new_error_gradient_norm < 0.0)
00644 {
00645 std::ostringstream buffer;
00646
00647 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00648 << "void set_error_gradient_norm(const double&) method.\n"
00649 << "Error gradient norm must be equal or greater than 0.\n";
00650
00651 throw std::logic_error(buffer.str().c_str());
00652 }
00653
00654 #endif
00655
00656
00657
00658 error_gradient_norm = new_error_gradient_norm;
00659 }
00660
00661
00662
00663
00666
00667 void LevenbergMarquardtAlgorithm::set_minimum_parameters_increment_norm(const double& new_minimum_parameters_increment_norm)
00668 {
00669
00670
00671 #ifdef _DEBUG
00672
00673 if(new_minimum_parameters_increment_norm < 0.0)
00674 {
00675 std::ostringstream buffer;
00676
00677 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00678 << "void new_minimum_parameters_increment_norm(const double&) method.\n"
00679 << "Minimum parameters increment norm must be equal or greater than 0.\n";
00680
00681 throw std::logic_error(buffer.str().c_str());
00682 }
00683
00684 #endif
00685
00686
00687
00688 minimum_parameters_increment_norm = new_minimum_parameters_increment_norm;
00689 }
00690
00691
00692
00693
00696
00697 void LevenbergMarquardtAlgorithm::set_minimum_performance_increase(const double& new_minimum_performance_increase)
00698 {
00699
00700
00701 #ifdef _DEBUG
00702
00703 if(new_minimum_performance_increase < 0.0)
00704 {
00705 std::ostringstream buffer;
00706
00707 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00708 << "void set_minimum_performance_increase(const double&) method.\n"
00709 << "Minimum performance improvement must be equal or greater than 0.\n";
00710
00711 throw std::logic_error(buffer.str().c_str());
00712 }
00713
00714 #endif
00715
00716
00717
00718 minimum_performance_increase = new_minimum_performance_increase;
00719 }
00720
00721
00722
00723
00727
00728 void LevenbergMarquardtAlgorithm::set_performance_goal(const double& new_performance_goal)
00729 {
00730 performance_goal = new_performance_goal;
00731 }
00732
00733
00734
00735
00739
00740 void LevenbergMarquardtAlgorithm::set_gradient_norm_goal(const double& new_gradient_norm_goal)
00741 {
00742
00743
00744 #ifdef _DEBUG
00745
00746 if(new_gradient_norm_goal < 0.0)
00747 {
00748 std::ostringstream buffer;
00749
00750 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00751 << "void set_gradient_norm_goal(const double&) method.\n"
00752 << "Gradient norm goal must be equal or greater than 0.\n";
00753
00754 throw std::logic_error(buffer.str().c_str());
00755 }
00756
00757 #endif
00758
00759
00760
00761 gradient_norm_goal = new_gradient_norm_goal;
00762 }
00763
00764
00765
00766
00769
00770 void LevenbergMarquardtAlgorithm::set_maximum_generalization_evaluation_decreases(const unsigned int& new_maximum_generalization_evaluation_decreases)
00771 {
00772
00773
00774 #ifdef _DEBUG
00775
00776 if(new_maximum_generalization_evaluation_decreases < 0)
00777 {
00778 std::ostringstream buffer;
00779
00780 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00781 << "void set_maximum_generalization_evaluation_decreases(const unsigned int&) method.\n"
00782 << "Number of generalization performance decreases must be equal or greater than 0.\n";
00783
00784 throw std::logic_error(buffer.str().c_str());
00785 }
00786
00787 #endif
00788
00789
00790
00791 maximum_generalization_evaluation_decreases = new_maximum_generalization_evaluation_decreases;
00792 }
00793
00794
00795
00796
00799
00800 void LevenbergMarquardtAlgorithm::set_maximum_epochs_number(const unsigned int& new_maximum_epochs_number)
00801 {
00802
00803
00804 #ifdef _DEBUG
00805
00806 if(new_maximum_epochs_number < 0)
00807 {
00808 std::ostringstream buffer;
00809
00810 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00811 << "void set_maximum_epochs_number(unsigned int) method.\n"
00812 << "Number of epochs must be equal or greater than 0.\n";
00813
00814 throw std::logic_error(buffer.str().c_str());
00815 }
00816
00817 #endif
00818
00819
00820
00821 maximum_epochs_number = new_maximum_epochs_number;
00822 }
00823
00824
00825
00826
00829
00830 void LevenbergMarquardtAlgorithm::set_maximum_time(const double& new_maximum_time)
00831 {
00832
00833
00834 #ifdef _DEBUG
00835
00836 if(new_maximum_time < 0.0)
00837 {
00838 std::ostringstream buffer;
00839
00840 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00841 << "void set_maximum_time(const double&) method.\n"
00842 << "Maximum time must be equal or greater than 0.\n";
00843
00844 throw std::logic_error(buffer.str().c_str());
00845 }
00846
00847 #endif
00848
00849
00850
00851 maximum_time = new_maximum_time;
00852 }
00853
00854
00855
00856
00859
00860 void LevenbergMarquardtAlgorithm::set_reserve_parameters_history(const bool& new_reserve_parameters_history)
00861 {
00862 reserve_parameters_history = new_reserve_parameters_history;
00863 }
00864
00865
00866
00867
00870
00871 void LevenbergMarquardtAlgorithm::set_reserve_parameters_norm_history(const bool& new_reserve_parameters_norm_history)
00872 {
00873 reserve_parameters_norm_history = new_reserve_parameters_norm_history;
00874 }
00875
00876
00877
00878
00881
00882 void LevenbergMarquardtAlgorithm::set_reserve_evaluation_history(const bool& new_reserve_evaluation_history)
00883 {
00884 reserve_evaluation_history = new_reserve_evaluation_history;
00885 }
00886
00887
00888
00889
00892
00893 void LevenbergMarquardtAlgorithm::set_reserve_gradient_history(const bool& new_reserve_gradient_history)
00894 {
00895 reserve_gradient_history = new_reserve_gradient_history;
00896 }
00897
00898
00899
00900
00904
00905 void LevenbergMarquardtAlgorithm::set_reserve_gradient_norm_history(const bool& new_reserve_gradient_norm_history)
00906 {
00907 reserve_gradient_norm_history = new_reserve_gradient_norm_history;
00908 }
00909
00910
00911
00912
00916
00917 void LevenbergMarquardtAlgorithm::set_reserve_inverse_Hessian_history(const bool& new_reserve_inverse_Hessian_history)
00918 {
00919 reserve_inverse_Hessian_history = new_reserve_inverse_Hessian_history;
00920 }
00921
00922
00923
00924
00928
00929 void LevenbergMarquardtAlgorithm::set_reserve_elapsed_time_history(const bool& new_reserve_elapsed_time_history)
00930 {
00931 reserve_elapsed_time_history = new_reserve_elapsed_time_history;
00932 }
00933
00934
00935
00936
00940
00941 void LevenbergMarquardtAlgorithm::set_reserve_generalization_evaluation_history(const bool& new_reserve_generalization_evaluation_history)
00942 {
00943 reserve_generalization_evaluation_history = new_reserve_generalization_evaluation_history;
00944 }
00945
00946
00947
00948
00952
00953 void LevenbergMarquardtAlgorithm::set_display_period(const unsigned int& new_display_period)
00954 {
00955
00956
00957 #ifdef _DEBUG
00958
00959 if(new_display_period <= 0)
00960 {
00961 std::ostringstream buffer;
00962
00963 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00964 << "void set_display_period(const double&) method.\n"
00965 << "First training rate must be greater than 0.\n";
00966
00967 throw std::logic_error(buffer.str().c_str());
00968 }
00969
00970 #endif
00971
00972 display_period = new_display_period;
00973 }
00974
00975
00976
00977
00986
00987 void LevenbergMarquardtAlgorithm::check(void) const
00988 {
00989 std::ostringstream buffer;
00990
00991 if(!performance_functional_pointer)
00992 {
00993 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class.\n"
00994 << "void check(void) const method.\n"
00995 << "Pointer to performance functional is NULL.\n";
00996
00997 throw std::logic_error(buffer.str().c_str());
00998 }
00999
01000 const PerformanceTerm* objective_term_pointer = performance_functional_pointer->get_objective_term_pointer();
01001
01002 if(!objective_term_pointer)
01003 {
01004 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class.\n"
01005 << "void check(void) const method.\n"
01006 << "Pointer to objective term in performance functional is NULL.\n";
01007
01008 throw std::logic_error(buffer.str().c_str());
01009 }
01010
01011 const DataSet* data_set_pointer = objective_term_pointer->get_data_set_pointer();
01012
01013 if(!data_set_pointer)
01014 {
01015 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01016 << "void check(void) const method.\n"
01017 << "Pointer to data set in objective term is NULL." << std::endl;
01018
01019 throw std::logic_error(buffer.str().c_str());
01020 }
01021
01022 const NeuralNetwork* neural_network_pointer = performance_functional_pointer->get_neural_network_pointer();
01023
01024 if(!neural_network_pointer)
01025 {
01026 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01027 << "void check(void) const method.\n"
01028 << "Pointer to neural network is NULL." << std::endl;
01029
01030 throw std::logic_error(buffer.str().c_str());
01031 }
01032 }
01033
01034
01035
01038
01039 Matrix<double> LevenbergMarquardtAlgorithm::calculate_Hessian_approximation(const Matrix<double>& Jacobian_terms) const
01040 {
01041
01042
01043 #ifdef _DEBUG
01044
01045 check();
01046
01047 std::ostringstream buffer;
01048
01049 const DataSet* data_set_pointer = performance_functional_pointer->get_objective_term_pointer()->get_data_set_pointer();
01050
01051 const InstancesInformation& instances_information = data_set_pointer->get_instances_information();
01052
01053 const unsigned int training_instances_number = instances_information.count_training_instances_number();
01054
01055 const unsigned int rows_number = Jacobian_terms.get_rows_number();
01056
01057 if(rows_number != training_instances_number)
01058 {
01059 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01060 << "Matrix<double> calculate_Hessian_approximation(const Matrix<double>&) const method." << std::endl
01061 << "Number of rows of errors derivatives must be equal to number of training instances." << std::endl;
01062
01063 throw std::logic_error(buffer.str().c_str());
01064 }
01065
01066 #endif
01067
01068 const NeuralNetwork* neural_network_pointer = performance_functional_pointer->get_neural_network_pointer();
01069
01070 const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
01071
01072 #ifdef _DEBUG
01073
01074 if(!multilayer_perceptron_pointer)
01075 {
01076 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01077 << "Matrix<double> calculate_Hessian_approximation(const Matrix<double>&) const method." << std::endl
01078 << "Pointer to multilayer perceptron in neural network is NULL." << std::endl;
01079
01080 throw std::logic_error(buffer.str().c_str());
01081 }
01082
01083 #endif
01084
01085 const unsigned int network_parameters_number = multilayer_perceptron_pointer->count_parameters_number();
01086
01087 #ifdef _DEBUG
01088
01089 const unsigned int columns_number = Jacobian_terms.get_columns_number();
01090
01091 if(columns_number != network_parameters_number)
01092 {
01093 std::ostringstream buffer;
01094
01095 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01096 << "Matrix<double> calculate_Hessian_approximation(const Matrix<double>&) const method." << std::endl
01097 << "Number of columns of errors derivatives must be equal to number of network parameters." << std::endl;
01098
01099 throw std::logic_error(buffer.str().c_str());
01100 }
01101
01102 #endif
01103
01104 Matrix<double> identity(network_parameters_number, network_parameters_number);
01105 identity.initialize_identity();
01106
01107 return(Jacobian_terms.calculate_transpose().dot(Jacobian_terms) + identity*damping_parameter);
01108 }
01109
01110
01111
01112
01116
01117 Vector<double> LevenbergMarquardtAlgorithm
01118 ::calculate_gradient(const Vector<double>& evaluation_terms, const Matrix<double>& Jacobian_terms) const
01119 {
01120
01121
01122 #ifdef _DEBUG
01123
01124 DataSet* data_set_pointer = performance_functional_pointer->get_objective_term_pointer()->get_data_set_pointer();
01125
01126 const InstancesInformation& instances_information = data_set_pointer->get_instances_information();
01127
01128 const unsigned int training_instances_number = instances_information.count_training_instances_number();
01129
01130 const unsigned int size = evaluation_terms.size();
01131
01132 if(size != training_instances_number)
01133 {
01134 std::ostringstream buffer;
01135
01136 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01137 << "Vector<double> calculate_gradient(const Vector<double>&, const Matrix<double>&) const method." << std::endl
01138 << "Size of errors must be equal to number of training instances." << std::endl;
01139
01140 throw std::logic_error(buffer.str().c_str());
01141 }
01142
01143 const unsigned int rows_number = Jacobian_terms.get_rows_number();
01144
01145 if(rows_number != training_instances_number)
01146 {
01147 std::ostringstream buffer;
01148
01149 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01150 << "Vector<double> calculate_gradient(const Vector<double>&, const Matrix<double>&) const method." << std::endl
01151 << "Number of rows of errors derivatives must be equal to number of training instances." << std::endl;
01152
01153 throw std::logic_error(buffer.str().c_str());
01154 }
01155
01156 const NeuralNetwork* neural_network_pointer = performance_functional_pointer->get_neural_network_pointer();
01157
01158 const MultilayerPerceptron* multilayer_perceptron_pointer = neural_network_pointer->get_multilayer_perceptron_pointer();
01159
01160 const unsigned int network_parameters_number = multilayer_perceptron_pointer->count_parameters_number();
01161
01162 const unsigned int columns_number = Jacobian_terms.get_columns_number();
01163
01164 if(columns_number != network_parameters_number)
01165 {
01166 std::ostringstream buffer;
01167
01168 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class." << std::endl
01169 << "Vector<double> calculate_gradient(const Vector<double>&, const Matrix<double>&) const method." << std::endl
01170 << "Number of columns of errors derivatives must be equal to number of network parameters." << std::endl;
01171
01172 throw std::logic_error(buffer.str().c_str());
01173 }
01174
01175 #endif
01176
01177 return(Jacobian_terms.calculate_transpose().dot(evaluation_terms)*2.0);
01178
01179 }
01180
01181
01182
01183
01186
01187 void LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithmResults::resize_training_history(const unsigned int& new_size)
01188 {
01189 parameters_history.resize(new_size);
01190 parameters_norm_history.resize(new_size);
01191
01192 evaluation_history.resize(new_size);
01193 generalization_evaluation_history.resize(new_size);
01194 gradient_history.resize(new_size);
01195 gradient_norm_history.resize(new_size);
01196 Hessian_approximation_history.resize(new_size);
01197
01198 damping_parameter_history.resize(new_size);
01199 elapsed_time_history.resize(new_size);
01200 }
01201
01202
01203
01204
01206
01207 std::string LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithmResults::to_string(void) const
01208 {
01209 std::ostringstream buffer;
01210
01211
01212
01213 if(!parameters_history.empty())
01214 {
01215 if(!parameters_history[0].empty())
01216 {
01217 buffer << "% Parameters history:\n"
01218 << parameters_history << "\n";
01219 }
01220 }
01221
01222
01223
01224 if(!parameters_norm_history.empty())
01225 {
01226 buffer << "% Parameters norm history:\n"
01227 << parameters_norm_history << "\n";
01228 }
01229
01230
01231
01232 if(!evaluation_history.empty())
01233 {
01234 buffer << "% Performance history:\n"
01235 << evaluation_history << "\n";
01236 }
01237
01238
01239
01240 if(!generalization_evaluation_history.empty())
01241 {
01242 buffer << "% Generalization evaluation history:\n"
01243 << generalization_evaluation_history << "\n";
01244 }
01245
01246
01247
01248 if(!gradient_history.empty())
01249 {
01250 if(!gradient_history[0].empty())
01251 {
01252 buffer << "% Gradient history:\n"
01253 << gradient_history << "\n";
01254 }
01255 }
01256
01257
01258
01259 if(!gradient_norm_history.empty())
01260 {
01261 buffer << "% Gradient norm history:\n"
01262 << gradient_norm_history << "\n";
01263 }
01264
01265
01266
01267 if(!Hessian_approximation_history.empty())
01268 {
01269 if(!Hessian_approximation_history[0].empty())
01270 {
01271 buffer << "% Hessian approximation history:\n"
01272 << Hessian_approximation_history << "\n";
01273 }
01274 }
01275
01276
01277
01278 if(!damping_parameter_history.empty())
01279 {
01280 buffer << "% Damping parameter history:\n"
01281 << damping_parameter_history << "\n";
01282 }
01283
01284
01285
01286 if(!elapsed_time_history.empty())
01287 {
01288 buffer << "% Elapsed time history:\n"
01289 << elapsed_time_history << "\n";
01290 }
01291
01292 return(buffer.str());
01293 }
01294
01295
01296
01297
01300
01301 LevenbergMarquardtAlgorithm::LevenbergMarquardtAlgorithmResults* LevenbergMarquardtAlgorithm::perform_training(void)
01302 {
01303 std::ostringstream buffer;
01304
01305 buffer << "OpenNN Exception: LevenbergMarquardtAlgorithm class.\n"
01306 << "LevenbergMarquardtAlgorithmResults* perform_training(void) method.\n"
01307 << "This method is under development.\n";
01308
01309 throw std::logic_error(buffer.str().c_str());
01310
01311
01312
01313
01314
01315
01316
01317
01318
01319
01320
01321
01322
01323
01324
01325
01326
01327
01328
01329
01330
01331
01332
01333
01334
01335
01336
01337
01338
01339
01340
01341
01342
01343
01344
01345
01346
01347
01348
01349
01350
01351
01352
01353
01354
01355
01356
01357
01358
01359
01360
01361
01362
01363
01364
01365
01366
01367
01368
01369
01370
01371
01372
01373
01374
01375
01376
01377
01378
01379
01380
01381
01382
01383
01384
01385
01386
01387
01388
01389
01390
01391
01392
01393
01394
01395
01396
01397
01398
01399
01400
01401
01402
01403
01404
01405
01406
01407
01408
01409
01410
01411
01412
01413
01414
01415
01416
01417
01418
01419
01420
01421
01422
01423
01424
01425
01426
01427
01428
01429
01430
01431
01432
01433
01434
01435
01436
01437
01438
01439
01440
01441
01442
01443
01444
01445
01446
01447
01448
01449
01450
01451
01452
01453
01454
01455
01456
01457
01458
01459
01460
01461
01462
01463
01464
01465
01466
01467
01468
01469
01470
01471
01472
01473
01474
01475
01476
01477
01478
01479
01480
01481
01482
01483
01484
01485
01486
01487
01488
01489
01490
01491
01492
01493
01494
01495
01496
01497
01498
01499
01500
01501
01502
01503
01504
01505
01506
01507
01508
01509
01510
01511
01512
01513
01514
01515
01516
01517
01518
01519
01520
01521
01522
01523
01524
01525
01526
01527
01528
01529
01530
01531
01532
01533
01534
01535
01536
01537
01538
01539
01540
01541
01542
01543
01544
01545
01546
01547
01548
01549
01550
01551
01552
01553
01554
01555
01556
01557
01558
01559
01560
01561
01562
01563
01564
01565
01566
01567
01568
01569
01570
01571
01572
01573
01574
01575
01576
01577
01578
01579
01580
01581
01582
01583
01584
01585
01586
01587
01588
01589
01590
01591
01592
01593
01594
01595
01596
01597
01598
01599
01600
01601
01602
01603
01604
01605
01606
01607
01608
01609
01610
01611
01612
01613
01614
01615
01616
01617
01618
01619
01620
01621
01622
01623
01624
01625
01626
01627
01628
01629
01630
01631
01632
01633
01634
01635
01636
01637
01638
01639
01640
01641
01642
01643
01644
01645
01646
01647
01648
01649
01650
01651
01652
01653
01654
01655
01656
01657 }
01658
01659
01660
01661
01662 void LevenbergMarquardtAlgorithm::set_reserve_all_training_history(const bool&)
01663 {
01664 reserve_parameters_history = true;
01665 reserve_parameters_norm_history = true;
01666
01667 reserve_evaluation_history = true;
01668 reserve_generalization_evaluation_history = true;
01669
01670 reserve_gradient_history = true;
01671 reserve_gradient_norm_history = true;
01672 reserve_inverse_Hessian_history = true;
01673
01674 reserve_damping_parameter_history = true;
01675 reserve_elapsed_time_history = true;
01676 }
01677
01678
01679
01680
01681 std::string LevenbergMarquardtAlgorithm::write_training_algorithm_type(void) const
01682 {
01683 return("LEVENBERG_MARQUARDT_ALGORITHM");
01684 }
01685
01686
01687
01688
01689 TiXmlElement* LevenbergMarquardtAlgorithm::to_XML(void) const
01690 {
01691 std::ostringstream buffer;
01692
01693
01694
01695 TiXmlElement* Levenberg_Marquardt_algorithm_element = new TiXmlElement("LevenbergMarquardtAlgorithm");
01696 Levenberg_Marquardt_algorithm_element->SetAttribute("Version", 4);
01697
01698
01699
01700
01701 TiXmlElement* warning_parameters_norm_element = new TiXmlElement("WarningParametersNorm");
01702 Levenberg_Marquardt_algorithm_element->LinkEndChild(warning_parameters_norm_element);
01703
01704 buffer.str("");
01705 buffer << warning_parameters_norm;
01706
01707 TiXmlText* warning_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01708 warning_parameters_norm_element->LinkEndChild(warning_parameters_norm_text);
01709
01710
01711
01712 TiXmlElement* warning_gradient_norm_element = new TiXmlElement("WarningGradientNorm");
01713 Levenberg_Marquardt_algorithm_element->LinkEndChild(warning_gradient_norm_element);
01714
01715 buffer.str("");
01716 buffer << warning_gradient_norm;
01717
01718 TiXmlText* warning_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01719 warning_gradient_norm_element->LinkEndChild(warning_gradient_norm_text);
01720
01721
01722
01723 TiXmlElement* error_parameters_norm_element = new TiXmlElement("ErrorParametersNorm");
01724 Levenberg_Marquardt_algorithm_element->LinkEndChild(error_parameters_norm_element);
01725
01726 buffer.str("");
01727 buffer << error_parameters_norm;
01728
01729 TiXmlText* error_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01730 error_parameters_norm_element->LinkEndChild(error_parameters_norm_text);
01731
01732
01733
01734 TiXmlElement* error_gradient_norm_element = new TiXmlElement("ErrorGradientNorm");
01735 Levenberg_Marquardt_algorithm_element->LinkEndChild(error_gradient_norm_element);
01736
01737 buffer.str("");
01738 buffer << error_gradient_norm;
01739
01740 TiXmlText* error_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01741 error_gradient_norm_element->LinkEndChild(error_gradient_norm_text);
01742
01743
01744
01745 TiXmlElement* minimum_parameters_increment_norm_element = new TiXmlElement("MinimumParametersIncrement");
01746 Levenberg_Marquardt_algorithm_element->LinkEndChild(minimum_parameters_increment_norm_element);
01747
01748 buffer.str("");
01749 buffer << minimum_parameters_increment_norm;
01750
01751 TiXmlText* minimum_parameters_increment_norm_text = new TiXmlText(buffer.str().c_str());
01752 minimum_parameters_increment_norm_element->LinkEndChild(minimum_parameters_increment_norm_text);
01753
01754
01755
01756 TiXmlElement* minimum_performance_increase_element = new TiXmlElement("MinimumPerformanceIncrease");
01757 Levenberg_Marquardt_algorithm_element->LinkEndChild(minimum_performance_increase_element);
01758
01759 buffer.str("");
01760 buffer << minimum_performance_increase;
01761
01762 TiXmlText* minimum_performance_increase_text = new TiXmlText(buffer.str().c_str());
01763 minimum_performance_increase_element->LinkEndChild(minimum_performance_increase_text);
01764
01765
01766
01767 TiXmlElement* performance_goal_element = new TiXmlElement("PerformanceGoal");
01768 Levenberg_Marquardt_algorithm_element->LinkEndChild(performance_goal_element);
01769
01770 buffer.str("");
01771 buffer << performance_goal;
01772
01773 TiXmlText* performance_goal_text = new TiXmlText(buffer.str().c_str());
01774 performance_goal_element->LinkEndChild(performance_goal_text);
01775
01776
01777
01778 TiXmlElement* gradient_norm_goal_element = new TiXmlElement("GradientNormGoal");
01779 Levenberg_Marquardt_algorithm_element->LinkEndChild(gradient_norm_goal_element);
01780
01781 buffer.str("");
01782 buffer << gradient_norm_goal;
01783
01784 TiXmlText* gradient_norm_goal_text = new TiXmlText(buffer.str().c_str());
01785 gradient_norm_goal_element->LinkEndChild(gradient_norm_goal_text);
01786
01787
01788
01789 TiXmlElement* maximum_generalization_evaluation_decreases_element = new TiXmlElement("MaximumGeneralizationEvaluationDecreases");
01790 Levenberg_Marquardt_algorithm_element->LinkEndChild(maximum_generalization_evaluation_decreases_element);
01791
01792 buffer.str("");
01793 buffer << maximum_generalization_evaluation_decreases;
01794
01795 TiXmlText* maximum_generalization_evaluation_decreases_text = new TiXmlText(buffer.str().c_str());
01796 maximum_generalization_evaluation_decreases_element->LinkEndChild(maximum_generalization_evaluation_decreases_text);
01797
01798
01799
01800 TiXmlElement* maximum_epochs_number_element = new TiXmlElement("MaximumEpochsNumber");
01801 Levenberg_Marquardt_algorithm_element->LinkEndChild(maximum_epochs_number_element);
01802
01803 buffer.str("");
01804 buffer << maximum_epochs_number;
01805
01806 TiXmlText* maximum_epochs_number_text = new TiXmlText(buffer.str().c_str());
01807 maximum_epochs_number_element->LinkEndChild(maximum_epochs_number_text);
01808
01809
01810
01811 TiXmlElement* maximum_time_element = new TiXmlElement("MaximumTime");
01812 Levenberg_Marquardt_algorithm_element->LinkEndChild(maximum_time_element);
01813
01814 buffer.str("");
01815 buffer << maximum_time;
01816
01817 TiXmlText* maximum_time_text = new TiXmlText(buffer.str().c_str());
01818 maximum_time_element->LinkEndChild(maximum_time_text);
01819
01820
01821
01822 TiXmlElement* reserve_parameters_history_element = new TiXmlElement("ReserveParametersHistory");
01823 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_parameters_history_element);
01824
01825 buffer.str("");
01826 buffer << reserve_parameters_history;
01827
01828 TiXmlText* reserve_parameters_history_text = new TiXmlText(buffer.str().c_str());
01829 reserve_parameters_history_element->LinkEndChild(reserve_parameters_history_text);
01830
01831
01832
01833 TiXmlElement* reserve_parameters_norm_history_element = new TiXmlElement("ReserveParametersNormHistory");
01834 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_parameters_norm_history_element);
01835
01836 buffer.str("");
01837 buffer << reserve_parameters_norm_history;
01838
01839 TiXmlText* reserve_parameters_norm_history_text = new TiXmlText(buffer.str().c_str());
01840 reserve_parameters_norm_history_element->LinkEndChild(reserve_parameters_norm_history_text);
01841
01842
01843
01844 TiXmlElement* reserve_evaluation_history_element = new TiXmlElement("ReservePerformanceHistory");
01845 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_evaluation_history_element);
01846
01847 buffer.str("");
01848 buffer << reserve_evaluation_history;
01849
01850 TiXmlText* reserve_evaluation_history_text = new TiXmlText(buffer.str().c_str());
01851 reserve_evaluation_history_element->LinkEndChild(reserve_evaluation_history_text);
01852
01853
01854
01855 TiXmlElement* reserve_gradient_history_element = new TiXmlElement("ReserveGradientHistory");
01856 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_gradient_history_element);
01857
01858 buffer.str("");
01859 buffer << reserve_gradient_history;
01860
01861 TiXmlText* reserve_gradient_history_text = new TiXmlText(buffer.str().c_str());
01862 reserve_gradient_history_element->LinkEndChild(reserve_gradient_history_text);
01863
01864
01865
01866 TiXmlElement* reserve_gradient_norm_history_element = new TiXmlElement("ReserveGradientNormHistory");
01867 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_gradient_norm_history_element);
01868
01869 buffer.str("");
01870 buffer << reserve_gradient_norm_history;
01871
01872 TiXmlText* reserve_gradient_norm_history_text = new TiXmlText(buffer.str().c_str());
01873 reserve_gradient_norm_history_element->LinkEndChild(reserve_gradient_norm_history_text);
01874
01875
01876
01877 TiXmlElement* reserve_inverse_Hessian_history_element = new TiXmlElement("ReserveInverseHessianHistory");
01878 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_inverse_Hessian_history_element);
01879
01880 buffer.str("");
01881 buffer << reserve_inverse_Hessian_history;
01882
01883 TiXmlText* reserve_inverse_Hessian_history_text = new TiXmlText(buffer.str().c_str());
01884 reserve_inverse_Hessian_history_element->LinkEndChild(reserve_inverse_Hessian_history_text);
01885
01886
01887
01888 TiXmlElement* reserve_elapsed_time_history_element = new TiXmlElement("ReserveElapsedTimeHistory");
01889 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_elapsed_time_history_element);
01890
01891 buffer.str("");
01892 buffer << reserve_elapsed_time_history;
01893
01894 TiXmlText* reserve_elapsed_time_history_text = new TiXmlText(buffer.str().c_str());
01895 reserve_elapsed_time_history_element->LinkEndChild(reserve_elapsed_time_history_text);
01896
01897
01898
01899 TiXmlElement* reserve_generalization_evaluation_history_element = new TiXmlElement("ReserveGeneralizationPerformanceHistory");
01900 Levenberg_Marquardt_algorithm_element->LinkEndChild(reserve_generalization_evaluation_history_element);
01901
01902 buffer.str("");
01903 buffer << reserve_generalization_evaluation_history;
01904
01905 TiXmlText* reserve_generalization_evaluation_history_text = new TiXmlText(buffer.str().c_str());
01906 reserve_generalization_evaluation_history_element->LinkEndChild(reserve_generalization_evaluation_history_text);
01907
01908
01909
01910 TiXmlElement* display_period_element = new TiXmlElement("DisplayPeriod");
01911 Levenberg_Marquardt_algorithm_element->LinkEndChild(display_period_element);
01912
01913 buffer.str("");
01914 buffer << display_period;
01915
01916 TiXmlText* display_period_text = new TiXmlText(buffer.str().c_str());
01917 display_period_element->LinkEndChild(display_period_text);
01918
01919
01920
01921 TiXmlElement* display_element = new TiXmlElement("Display");
01922 Levenberg_Marquardt_algorithm_element->LinkEndChild(display_element);
01923
01924 buffer.str("");
01925 buffer << display;
01926
01927 TiXmlText* display_text = new TiXmlText(buffer.str().c_str());
01928 display_element->LinkEndChild(display_text);
01929
01930 return(Levenberg_Marquardt_algorithm_element);
01931 }
01932
01933
01934
01935
01938
01939 void LevenbergMarquardtAlgorithm::from_XML(TiXmlElement* Levenberg_Marquardt_algorithm_element)
01940 {
01941
01942
01943 TiXmlElement* warning_parameters_norm_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("WarningParametersNorm");
01944
01945 if(warning_parameters_norm_element)
01946 {
01947 double new_warning_parameters_norm = atof(warning_parameters_norm_element->GetText());
01948
01949 try
01950 {
01951 set_warning_parameters_norm(new_warning_parameters_norm);
01952 }
01953 catch(std::exception& e)
01954 {
01955 std::cout << e.what() << std::endl;
01956 }
01957 }
01958
01959
01960
01961 TiXmlElement* warning_gradient_norm_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("WarningGradientNorm");
01962
01963 if(warning_gradient_norm_element)
01964 {
01965 double new_warning_gradient_norm = atof(warning_gradient_norm_element->GetText());
01966
01967 try
01968 {
01969 set_warning_gradient_norm(new_warning_gradient_norm);
01970 }
01971 catch(std::exception& e)
01972 {
01973 std::cout << e.what() << std::endl;
01974 }
01975 }
01976
01977
01978
01979 TiXmlElement* error_parameters_norm_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ErrorParametersNorm");
01980
01981 if(error_parameters_norm_element)
01982 {
01983 double new_error_parameters_norm = atof(error_parameters_norm_element->GetText());
01984
01985 try
01986 {
01987 set_error_parameters_norm(new_error_parameters_norm);
01988 }
01989 catch(std::exception& e)
01990 {
01991 std::cout << e.what() << std::endl;
01992 }
01993 }
01994
01995
01996
01997 TiXmlElement* error_gradient_norm_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ErrorGradientNorm");
01998
01999 if(error_gradient_norm_element)
02000 {
02001 double new_error_gradient_norm = atof(error_gradient_norm_element->GetText());
02002
02003 try
02004 {
02005 set_error_gradient_norm(new_error_gradient_norm);
02006 }
02007 catch(std::exception& e)
02008 {
02009 std::cout << e.what() << std::endl;
02010 }
02011 }
02012
02013
02014
02015 TiXmlElement* minimum_parameters_increment_norm_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("MinimumParametersIncrementNorm");
02016
02017 if(minimum_parameters_increment_norm_element)
02018 {
02019 double new_minimum_parameters_increment_norm = atof(minimum_parameters_increment_norm_element->GetText());
02020
02021 try
02022 {
02023 set_minimum_parameters_increment_norm(new_minimum_parameters_increment_norm);
02024 }
02025 catch(std::exception& e)
02026 {
02027 std::cout << e.what() << std::endl;
02028 }
02029 }
02030
02031
02032
02033 TiXmlElement* minimum_performance_increase_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("MinimumPerformanceIncrease");
02034
02035 if(minimum_performance_increase_element)
02036 {
02037 double new_minimum_performance_increase = atof(minimum_performance_increase_element->GetText());
02038
02039 try
02040 {
02041 set_minimum_performance_increase(new_minimum_performance_increase);
02042 }
02043 catch(std::exception& e)
02044 {
02045 std::cout << e.what() << std::endl;
02046 }
02047 }
02048
02049
02050
02051 TiXmlElement* performance_goal_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("PerformanceGoal");
02052
02053 if(performance_goal_element)
02054 {
02055 double new_performance_goal = atof(performance_goal_element->GetText());
02056
02057 try
02058 {
02059 set_performance_goal(new_performance_goal);
02060 }
02061 catch(std::exception& e)
02062 {
02063 std::cout << e.what() << std::endl;
02064 }
02065 }
02066
02067
02068
02069 TiXmlElement* gradient_norm_goal_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("GradientNormGoal");
02070
02071 if(gradient_norm_goal_element)
02072 {
02073 double new_gradient_norm_goal = atof(gradient_norm_goal_element->GetText());
02074
02075 try
02076 {
02077 set_gradient_norm_goal(new_gradient_norm_goal);
02078 }
02079 catch(std::exception& e)
02080 {
02081 std::cout << e.what() << std::endl;
02082 }
02083 }
02084
02085
02086
02087 TiXmlElement* maximum_generalization_evaluation_decreases_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("MaximumGeneralizationEvaluationDecreases");
02088
02089 if(maximum_generalization_evaluation_decreases_element)
02090 {
02091 unsigned int new_maximum_generalization_evaluation_decreases = atoi(maximum_generalization_evaluation_decreases_element->GetText());
02092
02093 try
02094 {
02095 set_maximum_generalization_evaluation_decreases(new_maximum_generalization_evaluation_decreases);
02096 }
02097 catch(std::exception& e)
02098 {
02099 std::cout << e.what() << std::endl;
02100 }
02101 }
02102
02103
02104
02105 TiXmlElement* maximum_epochs_number_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("MaximumEpochsNumber");
02106
02107 if(maximum_epochs_number_element)
02108 {
02109 unsigned int new_maximum_epochs_number = atoi(maximum_epochs_number_element->GetText());
02110
02111 try
02112 {
02113 set_maximum_epochs_number(new_maximum_epochs_number);
02114 }
02115 catch(std::exception& e)
02116 {
02117 std::cout << e.what() << std::endl;
02118 }
02119 }
02120
02121
02122
02123 TiXmlElement* maximum_time_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("MaximumTime");
02124
02125 if(maximum_time_element)
02126 {
02127 double new_maximum_time = atof(maximum_time_element->GetText());
02128
02129 try
02130 {
02131 set_maximum_time(new_maximum_time);
02132 }
02133 catch(std::exception& e)
02134 {
02135 std::cout << e.what() << std::endl;
02136 }
02137 }
02138
02139
02140
02141 TiXmlElement* reserve_parameters_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveParametersHistory");
02142
02143 if(reserve_parameters_history_element)
02144 {
02145 std::string new_reserve_parameters_history = reserve_parameters_history_element->GetText();
02146
02147 try
02148 {
02149 set_reserve_parameters_history(new_reserve_parameters_history != "0");
02150 }
02151 catch(std::exception& e)
02152 {
02153 std::cout << e.what() << std::endl;
02154 }
02155 }
02156
02157
02158
02159 TiXmlElement* reserve_parameters_norm_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveParametersNormHistory");
02160
02161 if(reserve_parameters_norm_history_element)
02162 {
02163 std::string new_reserve_parameters_norm_history = reserve_parameters_norm_history_element->GetText();
02164
02165 try
02166 {
02167 set_reserve_parameters_norm_history(new_reserve_parameters_norm_history != "0");
02168 }
02169 catch(std::exception& e)
02170 {
02171 std::cout << e.what() << std::endl;
02172 }
02173 }
02174
02175
02176
02177 TiXmlElement* reserve_evaluation_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReservePerformanceHistory");
02178
02179 if(reserve_evaluation_history_element)
02180 {
02181 std::string new_reserve_evaluation_history = reserve_evaluation_history_element->GetText();
02182
02183 try
02184 {
02185 set_reserve_evaluation_history(new_reserve_evaluation_history != "0");
02186 }
02187 catch(std::exception& e)
02188 {
02189 std::cout << e.what() << std::endl;
02190 }
02191 }
02192
02193
02194
02195 TiXmlElement* reserve_gradient_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveGradientHistory");
02196
02197 if(reserve_gradient_history_element)
02198 {
02199 std::string new_reserve_gradient_history = reserve_gradient_history_element->GetText();
02200
02201 try
02202 {
02203 set_reserve_gradient_history(new_reserve_gradient_history != "0");
02204 }
02205 catch(std::exception& e)
02206 {
02207 std::cout << e.what() << std::endl;
02208 }
02209 }
02210
02211
02212
02213 TiXmlElement* reserve_gradient_norm_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveGradientNormHistory");
02214
02215 if(reserve_gradient_norm_history_element)
02216 {
02217 std::string new_reserve_gradient_norm_history = reserve_gradient_norm_history_element->GetText();
02218
02219 try
02220 {
02221 set_reserve_gradient_norm_history(new_reserve_gradient_norm_history != "0");
02222 }
02223 catch(std::exception& e)
02224 {
02225 std::cout << e.what() << std::endl;
02226 }
02227 }
02228
02229
02230
02231 TiXmlElement* reserve_elapsed_time_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveElapsedTimeHistory");
02232
02233 if(reserve_elapsed_time_history_element)
02234 {
02235 std::string new_reserve_elapsed_time_history = reserve_elapsed_time_history_element->GetText();
02236
02237 try
02238 {
02239 set_reserve_elapsed_time_history(new_reserve_elapsed_time_history != "0");
02240 }
02241 catch(std::exception& e)
02242 {
02243 std::cout << e.what() << std::endl;
02244 }
02245 }
02246
02247
02248
02249 TiXmlElement* reserve_generalization_evaluation_history_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("ReserveGeneralizationPerformanceHistory");
02250
02251 if(reserve_generalization_evaluation_history_element)
02252 {
02253 std::string new_reserve_generalization_evaluation_history = reserve_generalization_evaluation_history_element->GetText();
02254
02255 try
02256 {
02257 set_reserve_generalization_evaluation_history(new_reserve_generalization_evaluation_history != "0");
02258 }
02259 catch(std::exception& e)
02260 {
02261 std::cout << e.what() << std::endl;
02262 }
02263 }
02264
02265
02266
02267 TiXmlElement* display_period_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("DisplayPeriod");
02268
02269 if(display_period_element)
02270 {
02271 unsigned int new_display_period = atoi(display_period_element->GetText());
02272
02273 try
02274 {
02275 set_display_period(new_display_period);
02276 }
02277 catch(std::exception& e)
02278 {
02279 std::cout << e.what() << std::endl;
02280 }
02281 }
02282
02283
02284
02285 TiXmlElement* display_element = Levenberg_Marquardt_algorithm_element->FirstChildElement("Display");
02286
02287 if(display_element)
02288 {
02289 std::string new_display = display_element->GetText();
02290
02291 try
02292 {
02293 set_display(new_display != "0");
02294 }
02295 catch(std::exception& e)
02296 {
02297 std::cout << e.what() << std::endl;
02298 }
02299 }
02300 }
02301
02302 }
02303
02304
02305
02306
02307
02308
02309
02310
02311
02312
02313
02314
02315
02316
02317
02318
02319
02320