00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <string>
00019 #include <sstream>
00020 #include <iostream>
00021 #include <fstream>
00022 #include <algorithm>
00023 #include <functional>
00024 #include <climits>
00025 #include <cmath>
00026 #include <ctime>
00027
00028
00029
00030 #include "conjugate_gradient.h"
00031
00032
00033
00034 #include "../../parsers/tinyxml/tinyxml.h"
00035
00036
00037 namespace OpenNN
00038 {
00039
00040
00041
00045
00046 ConjugateGradient::ConjugateGradient(void) : TrainingAlgorithm()
00047 {
00048 set_default();
00049 }
00050
00051
00052
00053
00058
00059 ConjugateGradient::ConjugateGradient(PerformanceFunctional* new_performance_functional_pointer)
00060 : TrainingAlgorithm(new_performance_functional_pointer)
00061 {
00062 training_rate_algorithm.set_performance_functional_pointer(new_performance_functional_pointer);
00063
00064 set_default();
00065 }
00066
00067
00068
00069
00074
00075 ConjugateGradient::ConjugateGradient(TiXmlElement* conjugate_gradient_element)
00076 : TrainingAlgorithm(conjugate_gradient_element)
00077 {
00078 set_default();
00079
00080 from_XML(conjugate_gradient_element);
00081 }
00082
00083
00084
00085
00087
00088 ConjugateGradient::~ConjugateGradient(void)
00089 {
00090 }
00091
00092
00093
00094
00095
00096
00098
00099 const TrainingRateAlgorithm& ConjugateGradient::get_training_rate_algorithm(void) const
00100 {
00101 return(training_rate_algorithm);
00102 }
00103
00104
00105
00106
00108
00109 TrainingRateAlgorithm* ConjugateGradient::get_training_rate_algorithm_pointer(void)
00110 {
00111 return(&training_rate_algorithm);
00112 }
00113
00114
00115
00116
00118
00119 const ConjugateGradient::TrainingDirectionMethod& ConjugateGradient::get_training_direction_method(void) const
00120 {
00121 return(training_direction_method);
00122 }
00123
00124
00125
00126
00128
00129 std::string ConjugateGradient::write_training_direction_method(void) const
00130 {
00131 switch(training_direction_method)
00132 {
00133 case PR:
00134 {
00135 return("PR");
00136 }
00137 break;
00138
00139 case FR:
00140 {
00141 return("FR");
00142 }
00143 break;
00144
00145 default:
00146 {
00147 std::ostringstream buffer;
00148
00149 buffer << "OpenNN Exception: ConjugateGradient class.\n"
00150 << "std::string write_training_direction_method(void) const method.\n"
00151 << "Unknown training direction method.\n";
00152
00153 throw std::logic_error(buffer.str().c_str());
00154 }
00155 break;
00156 }
00157 }
00158
00159
00160
00161
00163
00164 const double& ConjugateGradient::get_warning_parameters_norm(void) const
00165 {
00166 return(warning_parameters_norm);
00167 }
00168
00169
00170
00171
00173
00174 const double& ConjugateGradient::get_warning_gradient_norm(void) const
00175 {
00176 return(warning_gradient_norm);
00177 }
00178
00179
00180
00181
00183
00184 const double& ConjugateGradient::get_warning_training_rate(void) const
00185 {
00186 return(warning_training_rate);
00187 }
00188
00189
00190
00191
00193
00194 const double& ConjugateGradient::get_error_parameters_norm(void) const
00195 {
00196 return(error_parameters_norm);
00197 }
00198
00199
00200
00201
00204
00205 const double& ConjugateGradient::get_error_gradient_norm(void) const
00206 {
00207 return(error_gradient_norm);
00208 }
00209
00210
00211
00212
00215
00216 const double& ConjugateGradient::get_error_training_rate(void) const
00217 {
00218 return(error_training_rate);
00219 }
00220
00221
00222
00223
00225
00226 const double& ConjugateGradient::get_minimum_parameters_increment_norm(void) const
00227 {
00228 return(minimum_parameters_increment_norm);
00229 }
00230
00231
00232
00233
00235
00236 const double& ConjugateGradient::get_minimum_performance_increase(void) const
00237 {
00238 return(minimum_performance_increase);
00239 }
00240
00241
00242
00243
00246
00247 const double& ConjugateGradient::get_performance_goal(void) const
00248 {
00249 return(performance_goal);
00250 }
00251
00252
00253
00254
00257
00258 const double& ConjugateGradient::get_gradient_norm_goal(void) const
00259 {
00260 return(gradient_norm_goal);
00261 }
00262
00263
00264
00265
00267
00268 const unsigned int& ConjugateGradient::get_maximum_generalization_evaluation_decreases(void) const
00269 {
00270 return(maximum_generalization_evaluation_decreases);
00271 }
00272
00273
00274
00275
00277
00278 const unsigned int& ConjugateGradient::get_maximum_epochs_number(void) const
00279 {
00280 return(maximum_epochs_number);
00281 }
00282
00283
00284
00285
00287
00288 const double& ConjugateGradient::get_maximum_time(void) const
00289 {
00290 return(maximum_time);
00291 }
00292
00293
00294
00295
00297
00298 const bool& ConjugateGradient::get_reserve_parameters_history(void) const
00299 {
00300 return(reserve_parameters_history);
00301 }
00302
00303
00304
00305
00307
00308 const bool& ConjugateGradient::get_reserve_parameters_norm_history(void) const
00309 {
00310 return(reserve_parameters_norm_history);
00311 }
00312
00313
00314
00315
00317
00318 const bool& ConjugateGradient::get_reserve_evaluation_history(void) const
00319 {
00320 return(reserve_evaluation_history);
00321 }
00322
00323
00324
00325
00327
00328 const bool& ConjugateGradient::get_reserve_gradient_history(void) const
00329 {
00330 return(reserve_gradient_history);
00331 }
00332
00333
00334
00335
00337
00338 const bool& ConjugateGradient::get_reserve_gradient_norm_history(void) const
00339 {
00340 return(reserve_gradient_norm_history);
00341 }
00342
00343
00344
00345
00346
00348
00349 const bool& ConjugateGradient::get_reserve_training_direction_history(void) const
00350 {
00351 return(reserve_training_direction_history);
00352 }
00353
00354
00355
00356
00358
00359 const bool& ConjugateGradient::get_reserve_training_rate_history(void) const
00360 {
00361 return(reserve_training_rate_history);
00362 }
00363
00364
00365
00366
00368
00369 const bool& ConjugateGradient::get_reserve_elapsed_time_history(void) const
00370 {
00371 return(reserve_elapsed_time_history);
00372 }
00373
00374
00375
00376
00378
00379 const bool& ConjugateGradient::get_reserve_generalization_evaluation_history(void) const
00380 {
00381 return(reserve_generalization_evaluation_history);
00382 }
00383
00384
00385
00386
00388
00389 const unsigned int& ConjugateGradient::get_display_period(void) const
00390 {
00391 return(display_period);
00392 }
00393
00394
00395
00396
00397
00401
00402 void ConjugateGradient::set_training_direction_method
00403 (const ConjugateGradient::TrainingDirectionMethod& new_training_direction_method)
00404 {
00405 training_direction_method = new_training_direction_method;
00406 }
00407
00408
00409
00410
00418
00419 void ConjugateGradient::set_training_direction_method(const std::string& new_training_direction_method_name)
00420 {
00421 if(new_training_direction_method_name == "PR")
00422 {
00423 training_direction_method = PR;
00424 }
00425 else if(new_training_direction_method_name == "FR")
00426 {
00427 training_direction_method = FR;
00428 }
00429 else
00430 {
00431 std::ostringstream buffer;
00432
00433 buffer << "OpenNN Exception: ConjugateGradient class.\n"
00434 << "void set_training_direction_method(const std::string&) method.\n"
00435 << "Unknown training direction method: " << new_training_direction_method_name << ".\n";
00436
00437 throw std::logic_error(buffer.str().c_str());
00438 }
00439 }
00440
00441
00442
00443
00459
00460 void ConjugateGradient::set_reserve_all_training_history(const bool& new_reserve_all_training_history)
00461 {
00462
00463
00464
00465 reserve_parameters_history = new_reserve_all_training_history;
00466 reserve_parameters_norm_history = new_reserve_all_training_history;
00467
00468
00469
00470 reserve_evaluation_history = new_reserve_all_training_history;
00471 reserve_gradient_history = new_reserve_all_training_history;
00472 reserve_gradient_norm_history = new_reserve_all_training_history;
00473
00474 reserve_generalization_evaluation_history = new_reserve_all_training_history;
00475
00476
00477
00478 reserve_training_direction_history = new_reserve_all_training_history;
00479 reserve_training_rate_history = new_reserve_all_training_history;
00480 reserve_elapsed_time_history = new_reserve_all_training_history;
00481 }
00482
00483
00484
00485
00519
00520 void ConjugateGradient::set_default(void)
00521 {
00522
00523
00524 warning_parameters_norm = 1.0e6;
00525 warning_gradient_norm = 1.0e6;
00526 warning_training_rate = 1.0e6;
00527
00528 error_parameters_norm = 1.0e9;
00529 error_gradient_norm = 1.0e9;
00530 error_training_rate = 1.0e9;
00531
00532
00533
00534 minimum_parameters_increment_norm = 0.0;
00535
00536 minimum_performance_increase = 0.0;
00537 performance_goal = -1.0e99;
00538 gradient_norm_goal = 0.0;
00539 maximum_generalization_evaluation_decreases = 1000000;
00540
00541 maximum_epochs_number = 1000;
00542 maximum_time = 1000.0;
00543
00544
00545
00546 reserve_parameters_history = false;
00547 reserve_parameters_norm_history = false;
00548
00549 reserve_evaluation_history = true;
00550 reserve_gradient_history = false;
00551 reserve_gradient_norm_history = false;
00552 reserve_generalization_evaluation_history = false;
00553
00554 reserve_training_direction_history = false;
00555 reserve_training_rate_history = false;
00556 reserve_elapsed_time_history = false;
00557
00558
00559
00560 display = true;
00561 display_period = 100;
00562
00563
00564 training_direction_method = PR;
00565 }
00566
00567
00568
00569
00573
00574 void ConjugateGradient::set_warning_parameters_norm(const double& new_warning_parameters_norm)
00575 {
00576
00577
00578 #ifdef _DEBUG
00579
00580 if(new_warning_parameters_norm < 0.0)
00581 {
00582 std::ostringstream buffer;
00583
00584 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00585 << "void set_warning_parameters_norm(const double&) method.\n"
00586 << "Warning parameters norm must be equal or greater than 0.\n";
00587
00588 throw std::logic_error(buffer.str().c_str());
00589 }
00590
00591 #endif
00592
00593
00594
00595 warning_parameters_norm = new_warning_parameters_norm;
00596 }
00597
00598
00599
00600
00604
00605 void ConjugateGradient::set_warning_gradient_norm(const double& new_warning_gradient_norm)
00606 {
00607
00608
00609 #ifdef _DEBUG
00610
00611 if(new_warning_gradient_norm < 0.0)
00612 {
00613 std::ostringstream buffer;
00614
00615 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00616 << "void set_warning_gradient_norm(const double&) method.\n"
00617 << "Warning gradient norm must be equal or greater than 0.\n";
00618
00619 throw std::logic_error(buffer.str().c_str());
00620 }
00621
00622 #endif
00623
00624
00625
00626 warning_gradient_norm = new_warning_gradient_norm;
00627 }
00628
00629
00630
00631
00635
00636 void ConjugateGradient::set_warning_training_rate(const double& new_warning_training_rate)
00637 {
00638
00639
00640 #ifdef _DEBUG
00641
00642 if(new_warning_training_rate < 0.0)
00643 {
00644 std::ostringstream buffer;
00645
00646 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00647 << "void set_warning_training_rate(const double&) method.\n"
00648 << "Warning training rate must be equal or greater than 0.\n";
00649
00650 throw std::logic_error(buffer.str().c_str());
00651 }
00652
00653 #endif
00654
00655 warning_training_rate = new_warning_training_rate;
00656 }
00657
00658
00659
00660
00664
00665 void ConjugateGradient::set_error_parameters_norm(const double& new_error_parameters_norm)
00666 {
00667
00668
00669 #ifdef _DEBUG
00670
00671 if(new_error_parameters_norm < 0.0)
00672 {
00673 std::ostringstream buffer;
00674
00675 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00676 << "void set_error_parameters_norm(const double&) method.\n"
00677 << "Error parameters norm must be equal or greater than 0.\n";
00678
00679 throw std::logic_error(buffer.str().c_str());
00680 }
00681
00682 #endif
00683
00684
00685
00686 error_parameters_norm = new_error_parameters_norm;
00687 }
00688
00689
00690
00691
00695
00696 void ConjugateGradient::set_error_gradient_norm(const double& new_error_gradient_norm)
00697 {
00698
00699
00700 #ifdef _DEBUG
00701
00702 if(new_error_gradient_norm < 0.0)
00703 {
00704 std::ostringstream buffer;
00705
00706 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00707 << "void set_error_gradient_norm(const double&) method.\n"
00708 << "Error gradient norm must be equal or greater than 0.\n";
00709
00710 throw std::logic_error(buffer.str().c_str());
00711 }
00712
00713 #endif
00714
00715
00716
00717 error_gradient_norm = new_error_gradient_norm;
00718 }
00719
00720
00721
00722
00726
00727 void ConjugateGradient::set_error_training_rate(const double& new_error_training_rate)
00728 {
00729
00730
00731 #ifdef _DEBUG
00732
00733 if(new_error_training_rate < 0.0)
00734 {
00735 std::ostringstream buffer;
00736
00737 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00738 << "void set_error_training_rate(const double&) method.\n"
00739 << "Error training rate must be equal or greater than 0.\n";
00740
00741 throw std::logic_error(buffer.str().c_str());
00742 }
00743
00744 #endif
00745
00746
00747
00748 error_training_rate = new_error_training_rate;
00749 }
00750
00751
00752
00753
00756
00757 void ConjugateGradient::set_minimum_parameters_increment_norm(const double& new_minimum_parameters_increment_norm)
00758 {
00759
00760
00761 #ifdef _DEBUG
00762
00763 if(new_minimum_parameters_increment_norm < 0.0)
00764 {
00765 std::ostringstream buffer;
00766
00767 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00768 << "void new_minimum_parameters_increment_norm(const double&) method.\n"
00769 << "Minimum parameters increment norm must be equal or greater than 0.\n";
00770
00771 throw std::logic_error(buffer.str().c_str());
00772 }
00773
00774 #endif
00775
00776
00777
00778 minimum_parameters_increment_norm = new_minimum_parameters_increment_norm;
00779 }
00780
00781
00782
00783
00786
00787 void ConjugateGradient::set_minimum_performance_increase(const double& new_minimum_performance_increase)
00788 {
00789
00790
00791 #ifdef _DEBUG
00792
00793 if(new_minimum_performance_increase < 0.0)
00794 {
00795 std::ostringstream buffer;
00796
00797 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00798 << "void set_minimum_performance_increase(const double&) method.\n"
00799 << "Minimum performance improvement must be equal or greater than 0.\n";
00800
00801 throw std::logic_error(buffer.str().c_str());
00802 }
00803
00804 #endif
00805
00806
00807
00808 minimum_performance_increase = new_minimum_performance_increase;
00809 }
00810
00811
00812
00813
00817
00818 void ConjugateGradient::set_performance_goal(const double& new_performance_goal)
00819 {
00820 performance_goal = new_performance_goal;
00821 }
00822
00823
00824
00825
00829
00830 void ConjugateGradient::set_gradient_norm_goal(const double& new_gradient_norm_goal)
00831 {
00832
00833
00834 #ifdef _DEBUG
00835
00836 if(new_gradient_norm_goal < 0.0)
00837 {
00838 std::ostringstream buffer;
00839
00840 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00841 << "void set_gradient_norm_goal(const double&) method.\n"
00842 << "Gradient norm goal must be equal or greater than 0.\n";
00843
00844 throw std::logic_error(buffer.str().c_str());
00845 }
00846
00847 #endif
00848
00849
00850
00851 gradient_norm_goal = new_gradient_norm_goal;
00852 }
00853
00854
00855
00856
00859
00860 void ConjugateGradient::set_maximum_generalization_evaluation_decreases(const unsigned int& new_maximum_generalization_evaluation_decreases)
00861 {
00862
00863
00864 #ifdef _DEBUG
00865
00866 if(new_maximum_generalization_evaluation_decreases < 0)
00867 {
00868 std::ostringstream buffer;
00869
00870 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00871 << "void set_maximum_generalization_evaluation_decreases(const unsigned int&) method.\n"
00872 << "Number of generalization performance decreases must be equal or greater than 0.\n";
00873
00874 throw std::logic_error(buffer.str().c_str());
00875 }
00876
00877 #endif
00878
00879
00880
00881 maximum_generalization_evaluation_decreases = new_maximum_generalization_evaluation_decreases;
00882 }
00883
00884
00885
00886
00889
00890 void ConjugateGradient::set_maximum_epochs_number(const unsigned int& new_maximum_epochs_number)
00891 {
00892
00893
00894 #ifdef _DEBUG
00895
00896 if(new_maximum_epochs_number < 0)
00897 {
00898 std::ostringstream buffer;
00899
00900 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00901 << "void set_maximum_epochs_number(unsigned int) method.\n"
00902 << "Number of epochs must be equal or greater than 0.\n";
00903
00904 throw std::logic_error(buffer.str().c_str());
00905 }
00906
00907 #endif
00908
00909
00910
00911 maximum_epochs_number = new_maximum_epochs_number;
00912 }
00913
00914
00915
00916
00919
00920 void ConjugateGradient::set_maximum_time(const double& new_maximum_time)
00921 {
00922
00923
00924 #ifdef _DEBUG
00925
00926 if(new_maximum_time < 0.0)
00927 {
00928 std::ostringstream buffer;
00929
00930 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
00931 << "void set_maximum_time(const double&) method.\n"
00932 << "Maximum time must be equal or greater than 0.\n";
00933
00934 throw std::logic_error(buffer.str().c_str());
00935 }
00936
00937 #endif
00938
00939
00940
00941 maximum_time = new_maximum_time;
00942 }
00943
00944
00945
00946
00949
00950 void ConjugateGradient::set_reserve_parameters_history(const bool& new_reserve_parameters_history)
00951 {
00952 reserve_parameters_history = new_reserve_parameters_history;
00953 }
00954
00955
00956
00957
00960
00961 void ConjugateGradient::set_reserve_parameters_norm_history(const bool& new_reserve_parameters_norm_history)
00962 {
00963 reserve_parameters_norm_history = new_reserve_parameters_norm_history;
00964 }
00965
00966
00967
00968
00971
00972 void ConjugateGradient::set_reserve_evaluation_history(const bool& new_reserve_evaluation_history)
00973 {
00974 reserve_evaluation_history = new_reserve_evaluation_history;
00975 }
00976
00977
00978
00979
00982
00983 void ConjugateGradient::set_reserve_gradient_history(const bool& new_reserve_gradient_history)
00984 {
00985 reserve_gradient_history = new_reserve_gradient_history;
00986 }
00987
00988
00989
00990
00994
00995 void ConjugateGradient::set_reserve_gradient_norm_history(const bool& new_reserve_gradient_norm_history)
00996 {
00997 reserve_gradient_norm_history = new_reserve_gradient_norm_history;
00998 }
00999
01000
01001
01002
01006
01007 void ConjugateGradient::set_reserve_training_direction_history(const bool& new_reserve_training_direction_history)
01008 {
01009 reserve_training_direction_history = new_reserve_training_direction_history;
01010 }
01011
01012
01013
01014
01018
01019 void ConjugateGradient::set_reserve_training_rate_history(const bool& new_reserve_training_rate_history)
01020 {
01021 reserve_training_rate_history = new_reserve_training_rate_history;
01022 }
01023
01024
01025
01026
01030
01031 void ConjugateGradient::set_reserve_elapsed_time_history(const bool& new_reserve_elapsed_time_history)
01032 {
01033 reserve_elapsed_time_history = new_reserve_elapsed_time_history;
01034 }
01035
01036
01037
01038
01042
01043 void ConjugateGradient::set_reserve_generalization_evaluation_history(const bool& new_reserve_generalization_evaluation_history)
01044 {
01045 reserve_generalization_evaluation_history = new_reserve_generalization_evaluation_history;
01046 }
01047
01048
01049
01050
01054
01055 void ConjugateGradient::set_display_period(const unsigned int& new_display_period)
01056 {
01057
01058
01059 #ifdef _DEBUG
01060
01061 if(new_display_period <= 0)
01062 {
01063 std::ostringstream buffer;
01064
01065 buffer << "OpenNN Exception: TrainingAlgorithm class.\n"
01066 << "void set_display_period(const double&) method.\n"
01067 << "First training rate must be greater than 0.\n";
01068
01069 throw std::logic_error(buffer.str().c_str());
01070 }
01071
01072 #endif
01073
01074 display_period = new_display_period;
01075 }
01076
01077
01078
01079
01080
01085
01086 double ConjugateGradient::calculate_FR_parameter(const Vector<double>& old_gradient, const Vector<double>& gradient) const
01087 {
01088 double FR_parameter = 0.0;
01089
01090 const double numerator = gradient.dot(gradient);
01091 const double denominator = old_gradient.dot(old_gradient);
01092
01093
01094
01095 if(denominator == 0.0)
01096 {
01097 FR_parameter = 0.0;
01098 }
01099 else
01100 {
01101 FR_parameter = numerator/denominator;
01102 }
01103
01104
01105
01106 if(FR_parameter < 0.0)
01107 FR_parameter = 0.0;
01108
01109 if(FR_parameter > 1.0)
01110 FR_parameter = 1.0;
01111
01112 return(FR_parameter);
01113 }
01114
01115
01116
01117
01121
01122 double ConjugateGradient::calculate_PR_parameter(const Vector<double>& old_gradient, const Vector<double>& gradient) const
01123 {
01124 double PR_parameter = 0.0;
01125
01126 const double numerator = (gradient-old_gradient).dot(gradient);
01127 const double denominator = old_gradient.dot(old_gradient);
01128
01129
01130
01131 if(denominator == 0.0)
01132 {
01133 PR_parameter = 0.0;
01134 }
01135 else
01136 {
01137 PR_parameter = numerator/denominator;
01138 }
01139
01140
01141
01142 if(PR_parameter < 0.0)
01143 PR_parameter = 0.0;
01144
01145 if(PR_parameter > 1.0)
01146 PR_parameter = 1.0;
01147
01148 return(PR_parameter);
01149 }
01150
01151
01152
01153
01158
01159 Vector<double> ConjugateGradient::calculate_PR_training_direction
01160 (const Vector<double>& old_gradient, const Vector<double>& gradient, const Vector<double>& old_training_direction) const
01161 {
01162 const double PR_parameter = calculate_PR_parameter(old_gradient, gradient);
01163
01164 const Vector<double> gradient_descent_term = calculate_gradient_descent_training_direction(gradient);
01165 const Vector<double> conjugate_direction_term = old_training_direction*PR_parameter;
01166
01167 const Vector<double> PR_training_direction = gradient_descent_term + conjugate_direction_term;
01168
01169 const double PR_training_direction_norm = PR_training_direction.calculate_norm();
01170
01171 return(PR_training_direction/PR_training_direction_norm);
01172 }
01173
01174
01175
01176
01181
01182 Vector<double> ConjugateGradient::calculate_FR_training_direction
01183 (const Vector<double>& old_gradient, const Vector<double>& gradient, const Vector<double>& old_training_direction) const
01184 {
01185 const double FR_parameter = calculate_FR_parameter(old_gradient, gradient);
01186
01187 const Vector<double> gradient_descent_term = calculate_gradient_descent_training_direction(gradient);
01188 const Vector<double> conjugate_direction_term = old_training_direction*FR_parameter;
01189
01190 const Vector<double> FR_training_direction = gradient_descent_term + conjugate_direction_term;
01191
01192 const double FR_training_direction_norm = FR_training_direction.calculate_norm();
01193
01194 return(FR_training_direction/FR_training_direction_norm);
01195 }
01196
01197
01198
01199
01204
01205 Vector<double> ConjugateGradient::calculate_training_direction
01206 (const Vector<double>& old_gradient, const Vector<double>& gradient, const Vector<double>& old_training_direction) const
01207 {
01208 switch(training_direction_method)
01209 {
01210 case FR:
01211 {
01212 return(calculate_FR_training_direction(old_gradient, gradient, old_training_direction));
01213 }
01214 break;
01215
01216 case PR:
01217 {
01218 return(calculate_PR_training_direction(old_gradient, gradient, old_training_direction));
01219 }
01220 break;
01221
01222 default:
01223 {
01224 std::ostringstream buffer;
01225
01226 buffer << "OpenNN Exception: ConjugateGradient class.\n"
01227 << "Vector<double> calculate_training_direction(const Vector<double>&, const Vector<double>&, const Vector<double>&) const method.\n"
01228 << "Unknown training direction method: " << training_direction_method << ".\n";
01229
01230 throw std::logic_error(buffer.str().c_str());
01231 }
01232 break;
01233 }
01234 }
01235
01236
01237
01238
01241
01242 Vector<double> ConjugateGradient::calculate_gradient_descent_training_direction(const Vector<double>& gradient) const
01243 {
01244 double gradient_norm = gradient.calculate_norm();
01245
01246 return(gradient*(-1.0)/gradient_norm);
01247 }
01248
01249
01250
01251
01254
01255 void ConjugateGradient::ConjugateGradientResults::resize_training_history(const unsigned int& new_size)
01256 {
01257 parameters_history.resize(new_size);
01258 parameters_norm_history.resize(new_size);
01259
01260 evaluation_history.resize(new_size);
01261 generalization_evaluation_history.resize(new_size);
01262 gradient_history.resize(new_size);
01263 gradient_norm_history.resize(new_size);
01264
01265 training_direction_history.resize(new_size);
01266 training_rate_history.resize(new_size);
01267 elapsed_time_history.resize(new_size);
01268 }
01269
01270
01271
01272
01273 std::string ConjugateGradient::ConjugateGradientResults::to_string(void) const
01274 {
01275 std::ostringstream buffer;
01276
01277
01278
01279 if(!parameters_history.empty())
01280 {
01281 if(!parameters_history[0].empty())
01282 {
01283 buffer << "% Parameters history:\n"
01284 << parameters_history << "\n";
01285 }
01286 }
01287
01288
01289
01290 if(!parameters_norm_history.empty())
01291 {
01292 buffer << "% Parameters norm history:\n"
01293 << parameters_norm_history << "\n";
01294 }
01295
01296
01297
01298 if(!evaluation_history.empty())
01299 {
01300 buffer << "% Evaluation history:\n"
01301 << evaluation_history << "\n";
01302 }
01303
01304
01305
01306 if(!generalization_evaluation_history.empty())
01307 {
01308 buffer << "% Generalization evaluation history:\n"
01309 << generalization_evaluation_history << "\n";
01310 }
01311
01312
01313
01314 if(!gradient_history.empty())
01315 {
01316 if(!gradient_history[0].empty())
01317 {
01318 buffer << "% Gradient history:\n"
01319 << gradient_history << "\n";
01320 }
01321 }
01322
01323
01324
01325 if(!gradient_norm_history.empty())
01326 {
01327 buffer << "% Gradient norm history:\n"
01328 << gradient_norm_history << "\n";
01329 }
01330
01331
01332
01333 if(!training_direction_history.empty())
01334 {
01335 if(!training_direction_history[0].empty())
01336 {
01337 buffer << "% Training direction history:\n"
01338 << training_direction_history << "\n";
01339 }
01340 }
01341
01342
01343
01344 if(!training_rate_history.empty())
01345 {
01346 buffer << "% Training rate history:\n"
01347 << training_rate_history << "\n";
01348 }
01349
01350
01351
01352 if(!elapsed_time_history.empty())
01353 {
01354 buffer << "% Elapsed time history:\n"
01355 << elapsed_time_history << "\n";
01356 }
01357
01358 return(buffer.str());
01359 }
01360
01361
01362
01363
01366
01367 ConjugateGradient::ConjugateGradientResults* ConjugateGradient::perform_training(void)
01368 {
01369 std::ostringstream buffer;
01370
01371 buffer << "OpenNN Exception: ConjugateGradient class.\n"
01372 << "ConjugateGradientResults* perform_training(void) method.\n"
01373 << "This method is under development.\n";
01374
01375 throw std::logic_error(buffer.str().c_str());
01376
01377
01378
01379
01380
01381
01382
01383
01384
01385
01386
01387
01388
01389
01390
01391
01392
01393
01394
01395
01396
01397
01398
01399
01400
01401
01402
01403
01404
01405
01406
01407
01408
01409
01410
01411
01412
01413
01414
01415
01416
01417
01418
01419
01420
01421
01422
01423
01424
01425
01426
01427
01428
01429
01430
01431
01432
01433
01434
01435
01436
01437
01438
01439
01440
01441
01442
01443
01444
01445
01446
01447
01448
01449
01450
01451
01452
01453
01454
01455
01456
01457
01458
01459
01460
01461
01462
01463
01464
01465
01466
01467
01468
01469
01470
01471
01472
01473
01474
01475
01476
01477
01478
01479
01480
01481
01482
01483
01484
01485
01486
01487
01488
01489
01490
01491
01492
01493
01494
01495
01496
01497
01498
01499
01500
01501
01502
01503
01504
01505
01506
01507
01508
01509
01510
01511
01512
01513
01514
01515
01516
01517
01518
01519
01520
01521
01522
01523
01524
01525
01526
01527
01528
01529
01530
01531
01532
01533
01534
01535
01536
01537
01538
01539
01540
01541
01542
01543
01544
01545
01546
01547
01548
01549
01550
01551
01552
01553
01554
01555
01556
01557
01558
01559
01560
01561
01562
01563
01564
01565
01566
01567
01568
01569
01570
01571
01572
01573
01574
01575
01576
01577
01578
01579
01580
01581
01582
01583
01584
01585
01586
01587
01588
01589
01590
01591
01592
01593
01594
01595
01596
01597
01598
01599
01600
01601
01602
01603
01604
01605
01606
01607
01608
01609
01610
01611
01612
01613
01614
01615
01616
01617
01618
01619
01620
01621
01622
01623
01624
01625
01626
01627
01628
01629
01630
01631
01632
01633
01634
01635
01636
01637
01638
01639
01640
01641
01642
01643
01644
01645
01646
01647
01648
01649
01650
01651
01652
01653
01654
01655
01656
01657
01658
01659
01660
01661
01662
01663
01664
01665
01666
01667
01668
01669
01670
01671
01672
01673
01674
01675
01676
01677
01678
01679
01680
01681
01682
01683
01684
01685
01686
01687
01688
01689
01690
01691
01692
01693
01694
01695
01696
01697
01698
01699
01700
01701
01702
01703
01704
01705
01706
01707
01708
01709
01710
01711
01712
01713
01714
01715
01716
01717
01718
01719
01720
01721
01722
01723
01724
01725
01726
01727
01728
01729
01730
01731
01732
01733
01734
01735
01736
01737
01738
01739
01740
01741
01742
01743
01744
01745
01746
01747
01748
01749
01750
01751
01752
01753
01754
01755
01756
01757
01758
01759
01760
01761
01762
01763
01764
01765
01766
01767
01768 }
01769
01770
01771
01772
01773 std::string ConjugateGradient::write_training_algorithm_type(void) const
01774 {
01775 return("CONJUGATE_GRADIENT");
01776 }
01777
01778
01779
01780
01783
01784 TiXmlElement* ConjugateGradient::to_XML(void) const
01785 {
01786 std::ostringstream buffer;
01787
01788
01789
01790 TiXmlElement* conjugate_gradient_element = new TiXmlElement("GonjugateGradient");
01791 conjugate_gradient_element->SetAttribute("Version", 4);
01792
01793
01794 {
01795 TiXmlElement* training_direction_method_element = new TiXmlElement("TrainingDirectionMethod");
01796 conjugate_gradient_element->LinkEndChild(training_direction_method_element);
01797
01798 TiXmlText* training_direction_method_text = new TiXmlText(write_training_direction_method().c_str());
01799 training_direction_method_element->LinkEndChild(training_direction_method_text);
01800 }
01801
01802
01803 {
01804 TiXmlElement* training_rate_algorithm_element = training_rate_algorithm.to_XML();
01805 conjugate_gradient_element->LinkEndChild(training_rate_algorithm_element);
01806 }
01807
01808
01809 {
01810 TiXmlElement* warning_parameters_norm_element = new TiXmlElement("WarningParametersNorm");
01811 conjugate_gradient_element->LinkEndChild(warning_parameters_norm_element);
01812
01813 buffer.str("");
01814 buffer << warning_parameters_norm;
01815
01816 TiXmlText* warning_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01817 warning_parameters_norm_element->LinkEndChild(warning_parameters_norm_text);
01818 }
01819
01820
01821 {
01822 TiXmlElement* warning_gradient_norm_element = new TiXmlElement("WarningGradientNorm");
01823 conjugate_gradient_element->LinkEndChild(warning_gradient_norm_element);
01824
01825 buffer.str("");
01826 buffer << warning_gradient_norm;
01827
01828 TiXmlText* warning_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01829 warning_gradient_norm_element->LinkEndChild(warning_gradient_norm_text);
01830 }
01831
01832
01833 {
01834 TiXmlElement* warning_training_rate_element = new TiXmlElement("WarningTrainingRate");
01835 conjugate_gradient_element->LinkEndChild(warning_training_rate_element);
01836
01837 buffer.str("");
01838 buffer << warning_training_rate;
01839
01840 TiXmlText* warning_training_rate_text = new TiXmlText(buffer.str().c_str());
01841 warning_training_rate_element->LinkEndChild(warning_training_rate_text);
01842 }
01843
01844
01845 {
01846 TiXmlElement* error_parameters_norm_element = new TiXmlElement("ErrorParametersNorm");
01847 conjugate_gradient_element->LinkEndChild(error_parameters_norm_element);
01848
01849 buffer.str("");
01850 buffer << error_parameters_norm;
01851
01852 TiXmlText* error_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01853 error_parameters_norm_element->LinkEndChild(error_parameters_norm_text);
01854 }
01855
01856
01857 {
01858 TiXmlElement* error_gradient_norm_element = new TiXmlElement("ErrorGradientNorm");
01859 conjugate_gradient_element->LinkEndChild(error_gradient_norm_element);
01860
01861 buffer.str("");
01862 buffer << error_gradient_norm;
01863
01864 TiXmlText* error_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01865 error_gradient_norm_element->LinkEndChild(error_gradient_norm_text);
01866 }
01867
01868
01869 {
01870 TiXmlElement* error_training_rate_element = new TiXmlElement("ErrorTrainingRate");
01871 conjugate_gradient_element->LinkEndChild(error_training_rate_element);
01872
01873 buffer.str("");
01874 buffer << error_training_rate;
01875
01876 TiXmlText* error_training_rate_text = new TiXmlText(buffer.str().c_str());
01877 error_training_rate_element->LinkEndChild(error_training_rate_text);
01878 }
01879
01880
01881 {
01882 TiXmlElement* minimum_parameters_increment_norm_element = new TiXmlElement("MinimumParametersIncrement");
01883 conjugate_gradient_element->LinkEndChild(minimum_parameters_increment_norm_element);
01884
01885 buffer.str("");
01886 buffer << minimum_parameters_increment_norm;
01887
01888 TiXmlText* minimum_parameters_increment_norm_text = new TiXmlText(buffer.str().c_str());
01889 minimum_parameters_increment_norm_element->LinkEndChild(minimum_parameters_increment_norm_text);
01890 }
01891
01892
01893 {
01894 TiXmlElement* minimum_performance_increase_element = new TiXmlElement("MinimumPerformanceIncrease");
01895 conjugate_gradient_element->LinkEndChild(minimum_performance_increase_element);
01896
01897 buffer.str("");
01898 buffer << minimum_performance_increase;
01899
01900 TiXmlText* minimum_performance_increase_text = new TiXmlText(buffer.str().c_str());
01901 minimum_performance_increase_element->LinkEndChild(minimum_performance_increase_text);
01902 }
01903
01904
01905 {
01906 TiXmlElement* performance_goal_element = new TiXmlElement("PerformanceGoal");
01907 conjugate_gradient_element->LinkEndChild(performance_goal_element);
01908
01909 buffer.str("");
01910 buffer << performance_goal;
01911
01912 TiXmlText* performance_goal_text = new TiXmlText(buffer.str().c_str());
01913 performance_goal_element->LinkEndChild(performance_goal_text);
01914 }
01915
01916
01917 {
01918 TiXmlElement* gradient_norm_goal_element = new TiXmlElement("GradientNormGoal");
01919 conjugate_gradient_element->LinkEndChild(gradient_norm_goal_element);
01920
01921 buffer.str("");
01922 buffer << gradient_norm_goal;
01923
01924 TiXmlText* gradient_norm_goal_text = new TiXmlText(buffer.str().c_str());
01925 gradient_norm_goal_element->LinkEndChild(gradient_norm_goal_text);
01926 }
01927
01928
01929 {
01930 TiXmlElement* maximum_generalization_evaluation_decreases_element = new TiXmlElement("MaximumGeneralizationEvaluationDecreases");
01931 conjugate_gradient_element->LinkEndChild(maximum_generalization_evaluation_decreases_element);
01932
01933 buffer.str("");
01934 buffer << maximum_generalization_evaluation_decreases;
01935
01936 TiXmlText* maximum_generalization_evaluation_decreases_text = new TiXmlText(buffer.str().c_str());
01937 maximum_generalization_evaluation_decreases_element->LinkEndChild(maximum_generalization_evaluation_decreases_text);
01938 }
01939
01940
01941 {
01942 TiXmlElement* maximum_epochs_number_element = new TiXmlElement("MaximumEpochsNumber");
01943 conjugate_gradient_element->LinkEndChild(maximum_epochs_number_element);
01944
01945 buffer.str("");
01946 buffer << maximum_epochs_number;
01947
01948 TiXmlText* maximum_epochs_number_text = new TiXmlText(buffer.str().c_str());
01949 maximum_epochs_number_element->LinkEndChild(maximum_epochs_number_text);
01950 }
01951
01952
01953 {
01954 TiXmlElement* maximum_time_element = new TiXmlElement("MaximumTime");
01955 conjugate_gradient_element->LinkEndChild(maximum_time_element);
01956
01957 buffer.str("");
01958 buffer << maximum_time;
01959
01960 TiXmlText* maximum_time_text = new TiXmlText(buffer.str().c_str());
01961 maximum_time_element->LinkEndChild(maximum_time_text);
01962 }
01963
01964
01965 {
01966 TiXmlElement* reserve_parameters_history_element = new TiXmlElement("ReserveParametersHistory");
01967 conjugate_gradient_element->LinkEndChild(reserve_parameters_history_element);
01968
01969 buffer.str("");
01970 buffer << reserve_parameters_history;
01971
01972 TiXmlText* reserve_parameters_history_text = new TiXmlText(buffer.str().c_str());
01973 reserve_parameters_history_element->LinkEndChild(reserve_parameters_history_text);
01974 }
01975
01976
01977 {
01978 TiXmlElement* reserve_parameters_norm_history_element = new TiXmlElement("ReserveParametersNormHistory");
01979 conjugate_gradient_element->LinkEndChild(reserve_parameters_norm_history_element);
01980
01981 buffer.str("");
01982 buffer << reserve_parameters_norm_history;
01983
01984 TiXmlText* reserve_parameters_norm_history_text = new TiXmlText(buffer.str().c_str());
01985 reserve_parameters_norm_history_element->LinkEndChild(reserve_parameters_norm_history_text);
01986 }
01987
01988
01989 {
01990 TiXmlElement* reserve_evaluation_history_element = new TiXmlElement("ReservePerformanceHistory");
01991 conjugate_gradient_element->LinkEndChild(reserve_evaluation_history_element);
01992
01993 buffer.str("");
01994 buffer << reserve_evaluation_history;
01995
01996 TiXmlText* reserve_evaluation_history_text = new TiXmlText(buffer.str().c_str());
01997 reserve_evaluation_history_element->LinkEndChild(reserve_evaluation_history_text);
01998 }
01999
02000
02001 {
02002 TiXmlElement* reserve_gradient_history_element = new TiXmlElement("ReserveGradientHistory");
02003 conjugate_gradient_element->LinkEndChild(reserve_gradient_history_element);
02004
02005 buffer.str("");
02006 buffer << reserve_gradient_history;
02007
02008 TiXmlText* reserve_gradient_history_text = new TiXmlText(buffer.str().c_str());
02009 reserve_gradient_history_element->LinkEndChild(reserve_gradient_history_text);
02010 }
02011
02012
02013 {
02014 TiXmlElement* reserve_gradient_norm_history_element = new TiXmlElement("ReserveGradientNormHistory");
02015 conjugate_gradient_element->LinkEndChild(reserve_gradient_norm_history_element);
02016
02017 buffer.str("");
02018 buffer << reserve_gradient_norm_history;
02019
02020 TiXmlText* reserve_gradient_norm_history_text = new TiXmlText(buffer.str().c_str());
02021 reserve_gradient_norm_history_element->LinkEndChild(reserve_gradient_norm_history_text);
02022 }
02023
02024
02025 {
02026 TiXmlElement* reserve_training_direction_history_element = new TiXmlElement("ReserveTrainingDirectionHistory");
02027 conjugate_gradient_element->LinkEndChild(reserve_training_direction_history_element);
02028
02029 buffer.str("");
02030 buffer << reserve_training_direction_history;
02031
02032 TiXmlText* reserve_training_direction_history_text = new TiXmlText(buffer.str().c_str());
02033 reserve_training_direction_history_element->LinkEndChild(reserve_training_direction_history_text);
02034 }
02035
02036
02037 {
02038 TiXmlElement* reserve_training_rate_history_element = new TiXmlElement("ReserveTrainingRateHistory");
02039 conjugate_gradient_element->LinkEndChild(reserve_training_rate_history_element);
02040
02041 buffer.str("");
02042 buffer << reserve_training_rate_history;
02043
02044 TiXmlText* reserve_training_rate_history_text = new TiXmlText(buffer.str().c_str());
02045 reserve_training_rate_history_element->LinkEndChild(reserve_training_rate_history_text);
02046 }
02047
02048
02049 {
02050 TiXmlElement* reserve_elapsed_time_history_element = new TiXmlElement("ReserveElapsedTimeHistory");
02051 conjugate_gradient_element->LinkEndChild(reserve_elapsed_time_history_element);
02052
02053 buffer.str("");
02054 buffer << reserve_elapsed_time_history;
02055
02056 TiXmlText* reserve_elapsed_time_history_text = new TiXmlText(buffer.str().c_str());
02057 reserve_elapsed_time_history_element->LinkEndChild(reserve_elapsed_time_history_text);
02058 }
02059
02060
02061 {
02062 TiXmlElement* reserve_generalization_evaluation_history_element = new TiXmlElement("ReserveGeneralizationPerformanceHistory");
02063 conjugate_gradient_element->LinkEndChild(reserve_generalization_evaluation_history_element);
02064
02065 buffer.str("");
02066 buffer << reserve_generalization_evaluation_history;
02067
02068 TiXmlText* reserve_generalization_evaluation_history_text = new TiXmlText(buffer.str().c_str());
02069 reserve_generalization_evaluation_history_element->LinkEndChild(reserve_generalization_evaluation_history_text);
02070 }
02071
02072
02073 {
02074 TiXmlElement* display_period_element = new TiXmlElement("DisplayPeriod");
02075 conjugate_gradient_element->LinkEndChild(display_period_element);
02076
02077 buffer.str("");
02078 buffer << display_period;
02079
02080 TiXmlText* display_period_text = new TiXmlText(buffer.str().c_str());
02081 display_period_element->LinkEndChild(display_period_text);
02082 }
02083
02084
02085 {
02086 TiXmlElement* display_element = new TiXmlElement("Display");
02087 conjugate_gradient_element->LinkEndChild(display_element);
02088
02089 buffer.str("");
02090 buffer << display;
02091
02092 TiXmlText* display_text = new TiXmlText(buffer.str().c_str());
02093 display_element->LinkEndChild(display_text);
02094 }
02095
02096 return(conjugate_gradient_element);
02097 }
02098
02099
02100
02101
02105
02106 void ConjugateGradient::from_XML(TiXmlElement* conjugate_gradient_element)
02107 {
02108 if(conjugate_gradient_element)
02109 {
02110
02111 {
02112 TiXmlElement* training_direction_method_element = conjugate_gradient_element->FirstChildElement("TrainingDirectionMethod");
02113
02114 if(training_direction_method_element)
02115 {
02116 const std::string new_training_direction_method = training_direction_method_element->GetText();
02117
02118 try
02119 {
02120 set_training_direction_method(new_training_direction_method);
02121 }
02122 catch(std::exception& e)
02123 {
02124 std::cout << e.what() << std::endl;
02125 }
02126 }
02127 }
02128
02129
02130 {
02131 TiXmlElement* training_rate_algorithm_element = conjugate_gradient_element->FirstChildElement("TrainingRateAlgorithm");
02132
02133 if(training_rate_algorithm_element)
02134 {
02135 try
02136 {
02137 training_rate_algorithm.from_XML(training_rate_algorithm_element);
02138 }
02139 catch(std::exception& e)
02140 {
02141 std::cout << e.what() << std::endl;
02142 }
02143 }
02144 }
02145
02146
02147 {
02148 TiXmlElement* warning_parameters_norm_element = conjugate_gradient_element->FirstChildElement("WarningParametersNorm");
02149
02150 if(warning_parameters_norm_element)
02151 {
02152 const double new_warning_parameters_norm = atof(warning_parameters_norm_element->GetText());
02153
02154 try
02155 {
02156 set_warning_parameters_norm(new_warning_parameters_norm);
02157 }
02158 catch(std::exception& e)
02159 {
02160 std::cout << e.what() << std::endl;
02161 }
02162 }
02163 }
02164
02165
02166 {
02167 TiXmlElement* warning_gradient_norm_element = conjugate_gradient_element->FirstChildElement("WarningGradientNorm");
02168
02169 if(warning_gradient_norm_element)
02170 {
02171 const double new_warning_gradient_norm = atof(warning_gradient_norm_element->GetText());
02172
02173 try
02174 {
02175 set_warning_gradient_norm(new_warning_gradient_norm);
02176 }
02177 catch(std::exception& e)
02178 {
02179 std::cout << e.what() << std::endl;
02180 }
02181 }
02182 }
02183
02184
02185 {
02186 TiXmlElement* warning_training_rate_element = conjugate_gradient_element->FirstChildElement("WarningTrainingRate");
02187
02188 if(warning_training_rate_element)
02189 {
02190 double new_warning_training_rate = atof(warning_training_rate_element->GetText());
02191
02192 try
02193 {
02194 set_warning_training_rate(new_warning_training_rate);
02195 }
02196 catch(std::exception& e)
02197 {
02198 std::cout << e.what() << std::endl;
02199 }
02200 }
02201 }
02202
02203
02204 {
02205 TiXmlElement* error_parameters_norm_element = conjugate_gradient_element->FirstChildElement("ErrorParametersNorm");
02206
02207 if(error_parameters_norm_element)
02208 {
02209 double new_error_parameters_norm = atof(error_parameters_norm_element->GetText());
02210
02211 try
02212 {
02213 set_error_parameters_norm(new_error_parameters_norm);
02214 }
02215 catch(std::exception& e)
02216 {
02217 std::cout << e.what() << std::endl;
02218 }
02219 }
02220 }
02221
02222
02223 {
02224 TiXmlElement* error_gradient_norm_element = conjugate_gradient_element->FirstChildElement("ErrorGradientNorm");
02225
02226 if(error_gradient_norm_element)
02227 {
02228 double new_error_gradient_norm = atof(error_gradient_norm_element->GetText());
02229
02230 try
02231 {
02232 set_error_gradient_norm(new_error_gradient_norm);
02233 }
02234 catch(std::exception& e)
02235 {
02236 std::cout << e.what() << std::endl;
02237 }
02238 }
02239 }
02240
02241
02242 {
02243 TiXmlElement* error_training_rate_element = conjugate_gradient_element->FirstChildElement("ErrorTrainingRate");
02244
02245 if(error_training_rate_element)
02246 {
02247 double new_error_training_rate = atof(error_training_rate_element->GetText());
02248
02249 try
02250 {
02251 set_error_training_rate(new_error_training_rate);
02252 }
02253 catch(std::exception& e)
02254 {
02255 std::cout << e.what() << std::endl;
02256 }
02257 }
02258 }
02259
02260
02261 {
02262 TiXmlElement* minimum_parameters_increment_norm_element = conjugate_gradient_element->FirstChildElement("MinimumParametersIncrementNorm");
02263
02264 if(minimum_parameters_increment_norm_element)
02265 {
02266 double new_minimum_parameters_increment_norm = atof(minimum_parameters_increment_norm_element->GetText());
02267
02268 try
02269 {
02270 set_minimum_parameters_increment_norm(new_minimum_parameters_increment_norm);
02271 }
02272 catch(std::exception& e)
02273 {
02274 std::cout << e.what() << std::endl;
02275 }
02276 }
02277 }
02278
02279
02280 {
02281 TiXmlElement* minimum_performance_increase_element = conjugate_gradient_element->FirstChildElement("MinimumPerformanceIncrease");
02282
02283 if(minimum_performance_increase_element)
02284 {
02285 double new_minimum_performance_increase = atof(minimum_performance_increase_element->GetText());
02286
02287 try
02288 {
02289 set_minimum_performance_increase(new_minimum_performance_increase);
02290 }
02291 catch(std::exception& e)
02292 {
02293 std::cout << e.what() << std::endl;
02294 }
02295 }
02296 }
02297
02298
02299 {
02300 TiXmlElement* performance_goal_element = conjugate_gradient_element->FirstChildElement("PerformanceGoal");
02301
02302 if(performance_goal_element)
02303 {
02304 double new_performance_goal = atof(performance_goal_element->GetText());
02305
02306 try
02307 {
02308 set_performance_goal(new_performance_goal);
02309 }
02310 catch(std::exception& e)
02311 {
02312 std::cout << e.what() << std::endl;
02313 }
02314 }
02315 }
02316
02317
02318 {
02319 TiXmlElement* gradient_norm_goal_element = conjugate_gradient_element->FirstChildElement("GradientNormGoal");
02320
02321 if(gradient_norm_goal_element)
02322 {
02323 double new_gradient_norm_goal = atof(gradient_norm_goal_element->GetText());
02324
02325 try
02326 {
02327 set_gradient_norm_goal(new_gradient_norm_goal);
02328 }
02329 catch(std::exception& e)
02330 {
02331 std::cout << e.what() << std::endl;
02332 }
02333 }
02334 }
02335
02336
02337 {
02338 TiXmlElement* maximum_generalization_evaluation_decreases_element = conjugate_gradient_element->FirstChildElement("MaximumGeneralizationEvaluationDecreases");
02339
02340 if(maximum_generalization_evaluation_decreases_element)
02341 {
02342 unsigned int new_maximum_generalization_evaluation_decreases = atoi(maximum_generalization_evaluation_decreases_element->GetText());
02343
02344 try
02345 {
02346 set_maximum_generalization_evaluation_decreases(new_maximum_generalization_evaluation_decreases);
02347 }
02348 catch(std::exception& e)
02349 {
02350 std::cout << e.what() << std::endl;
02351 }
02352 }
02353 }
02354
02355
02356 {
02357 TiXmlElement* maximum_epochs_number_element = conjugate_gradient_element->FirstChildElement("MaximumEpochsNumber");
02358
02359 if(maximum_epochs_number_element)
02360 {
02361 unsigned int new_maximum_epochs_number = atoi(maximum_epochs_number_element->GetText());
02362
02363 try
02364 {
02365 set_maximum_epochs_number(new_maximum_epochs_number);
02366 }
02367 catch(std::exception& e)
02368 {
02369 std::cout << e.what() << std::endl;
02370 }
02371 }
02372 }
02373
02374
02375 {
02376 TiXmlElement* maximum_time_element = conjugate_gradient_element->FirstChildElement("MaximumTime");
02377
02378 if(maximum_time_element)
02379 {
02380 double new_maximum_time = atof(maximum_time_element->GetText());
02381
02382 try
02383 {
02384 set_maximum_time(new_maximum_time);
02385 }
02386 catch(std::exception& e)
02387 {
02388 std::cout << e.what() << std::endl;
02389 }
02390 }
02391 }
02392
02393
02394 {
02395 TiXmlElement* reserve_parameters_history_element = conjugate_gradient_element->FirstChildElement("ReserveParametersHistory");
02396
02397 if(reserve_parameters_history_element)
02398 {
02399 std::string new_reserve_parameters_history = reserve_parameters_history_element->GetText();
02400
02401 try
02402 {
02403 set_reserve_parameters_history(new_reserve_parameters_history != "0");
02404 }
02405 catch(std::exception& e)
02406 {
02407 std::cout << e.what() << std::endl;
02408 }
02409 }
02410 }
02411
02412
02413 {
02414 TiXmlElement* reserve_parameters_norm_history_element = conjugate_gradient_element->FirstChildElement("ReserveParametersNormHistory");
02415
02416 if(reserve_parameters_norm_history_element)
02417 {
02418 std::string new_reserve_parameters_norm_history = reserve_parameters_norm_history_element->GetText();
02419
02420 try
02421 {
02422 set_reserve_parameters_norm_history(new_reserve_parameters_norm_history != "0");
02423 }
02424 catch(std::exception& e)
02425 {
02426 std::cout << e.what() << std::endl;
02427 }
02428 }
02429 }
02430
02431
02432 {
02433 TiXmlElement* reserve_evaluation_history_element = conjugate_gradient_element->FirstChildElement("ReservePerformanceHistory");
02434
02435 if(reserve_evaluation_history_element)
02436 {
02437 std::string new_reserve_evaluation_history = reserve_evaluation_history_element->GetText();
02438
02439 try
02440 {
02441 set_reserve_evaluation_history(new_reserve_evaluation_history != "0");
02442 }
02443 catch(std::exception& e)
02444 {
02445 std::cout << e.what() << std::endl;
02446 }
02447 }
02448 }
02449
02450
02451 {
02452 TiXmlElement* reserve_gradient_history_element = conjugate_gradient_element->FirstChildElement("ReserveGradientHistory");
02453
02454 if(reserve_gradient_history_element)
02455 {
02456 std::string new_reserve_gradient_history = reserve_gradient_history_element->GetText();
02457
02458 try
02459 {
02460 set_reserve_gradient_history(new_reserve_gradient_history != "0");
02461 }
02462 catch(std::exception& e)
02463 {
02464 std::cout << e.what() << std::endl;
02465 }
02466 }
02467
02468
02469 {
02470 TiXmlElement* reserve_gradient_norm_history_element = conjugate_gradient_element->FirstChildElement("ReserveGradientNormHistory");
02471
02472 if(reserve_gradient_norm_history_element)
02473 {
02474 std::string new_reserve_gradient_norm_history = reserve_gradient_norm_history_element->GetText();
02475
02476 try
02477 {
02478 set_reserve_gradient_norm_history(new_reserve_gradient_norm_history != "0");
02479 }
02480 catch(std::exception& e)
02481 {
02482 std::cout << e.what() << std::endl;
02483 }
02484 }
02485 }
02486
02487
02488 {
02489 TiXmlElement* reserve_training_direction_history_element = conjugate_gradient_element->FirstChildElement("ReserveTrainingDirectionHistory");
02490
02491 if(reserve_training_direction_history_element)
02492 {
02493 std::string new_reserve_training_direction_history = reserve_training_direction_history_element->GetText();
02494
02495 try
02496 {
02497 set_reserve_training_direction_history(new_reserve_training_direction_history != "0");
02498 }
02499 catch(std::exception& e)
02500 {
02501 std::cout << e.what() << std::endl;
02502 }
02503 }
02504 }
02505
02506
02507 {
02508 TiXmlElement* reserve_training_rate_history_element = conjugate_gradient_element->FirstChildElement("ReserveTrainingRateHistory");
02509
02510 if(reserve_training_rate_history_element)
02511 {
02512 std::string new_reserve_training_rate_history = reserve_training_rate_history_element->GetText();
02513
02514 try
02515 {
02516 set_reserve_training_rate_history(new_reserve_training_rate_history != "0");
02517 }
02518 catch(std::exception& e)
02519 {
02520 std::cout << e.what() << std::endl;
02521 }
02522 }
02523 }
02524
02525
02526 {
02527 TiXmlElement* reserve_elapsed_time_history_element = conjugate_gradient_element->FirstChildElement("ReserveElapsedTimeHistory");
02528
02529 if(reserve_elapsed_time_history_element)
02530 {
02531 std::string new_reserve_elapsed_time_history = reserve_elapsed_time_history_element->GetText();
02532
02533 try
02534 {
02535 set_reserve_elapsed_time_history(new_reserve_elapsed_time_history != "0");
02536 }
02537 catch(std::exception& e)
02538 {
02539 std::cout << e.what() << std::endl;
02540 }
02541 }
02542 }
02543
02544
02545 {
02546 TiXmlElement* reserve_generalization_evaluation_history_element = conjugate_gradient_element->FirstChildElement("ReserveGeneralizationPerformanceHistory");
02547
02548 if(reserve_generalization_evaluation_history_element)
02549 {
02550 std::string new_reserve_generalization_evaluation_history = reserve_generalization_evaluation_history_element->GetText();
02551
02552 try
02553 {
02554 set_reserve_generalization_evaluation_history(new_reserve_generalization_evaluation_history != "0");
02555 }
02556 catch(std::exception& e)
02557 {
02558 std::cout << e.what() << std::endl;
02559 }
02560 }
02561 }
02562
02563
02564 {
02565 TiXmlElement* display_period_element = conjugate_gradient_element->FirstChildElement("DisplayPeriod");
02566
02567 if(display_period_element)
02568 {
02569 unsigned int new_display_period = atoi(display_period_element->GetText());
02570
02571 try
02572 {
02573 set_display_period(new_display_period);
02574 }
02575 catch(std::exception& e)
02576 {
02577 std::cout << e.what() << std::endl;
02578 }
02579 }
02580 }
02581
02582
02583 {
02584 TiXmlElement* display_element = conjugate_gradient_element->FirstChildElement("Display");
02585
02586 if(display_element)
02587 {
02588 std::string new_display = display_element->GetText();
02589
02590 try
02591 {
02592 set_display(new_display != "0");
02593 }
02594 catch(std::exception& e)
02595 {
02596 std::cout << e.what() << std::endl;
02597 }
02598 }
02599 }
02600 }
02601 }
02602 }
02603 }
02604
02605
02606
02607
02608
02609
02610
02611
02612
02613
02614
02615
02616
02617
02618
02619
02620