00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <string>
00019 #include <sstream>
00020 #include <iostream>
00021 #include <fstream>
00022 #include <stdlib.h>
00023 #include <math.h>
00024 #include <time.h>
00025
00026
00027
00028 #include "newton_method.h"
00029
00030
00031
00032 #include "../../parsers/tinyxml/tinyxml.h"
00033
00034 namespace OpenNN
00035 {
00036
00037
00038
00042
00043 NewtonMethod::NewtonMethod(void)
00044 : TrainingAlgorithm()
00045 {
00046 set_default();
00047 }
00048
00049
00050
00051
00056
00057 NewtonMethod::NewtonMethod(PerformanceFunctional* new_performance_functional_pointer)
00058 : TrainingAlgorithm(new_performance_functional_pointer)
00059 {
00060 training_rate_algorithm.set_performance_functional_pointer(new_performance_functional_pointer);
00061
00062 set_default();
00063 }
00064
00065
00066
00067
00071
00072 NewtonMethod::NewtonMethod(TiXmlElement* Newton_method_element)
00073 : TrainingAlgorithm(Newton_method_element)
00074 {
00075 set_default();
00076
00077 from_XML(Newton_method_element);
00078 }
00079
00080
00081
00082
00084
00085 NewtonMethod::~NewtonMethod(void)
00086 {
00087 }
00088
00089
00090
00091
00092
00093
00095
00096 const TrainingRateAlgorithm& NewtonMethod::get_training_rate_algorithm(void) const
00097 {
00098 return(training_rate_algorithm);
00099 }
00100
00101
00102
00103
00105
00106 TrainingRateAlgorithm* NewtonMethod::get_training_rate_algorithm_pointer(void)
00107 {
00108 return(&training_rate_algorithm);
00109 }
00110
00111
00112
00113
00116
00117 const double& NewtonMethod::get_warning_parameters_norm(void) const
00118 {
00119 return(warning_parameters_norm);
00120 }
00121
00122
00123
00124
00127
00128 const double& NewtonMethod::get_warning_gradient_norm(void) const
00129 {
00130 return(warning_gradient_norm);
00131 }
00132
00133
00134
00135
00138
00139 const double& NewtonMethod::get_warning_training_rate(void) const
00140 {
00141 return(warning_training_rate);
00142 }
00143
00144
00145
00146
00149
00150 const double& NewtonMethod::get_error_parameters_norm(void) const
00151 {
00152 return(error_parameters_norm);
00153 }
00154
00155
00156
00157
00160
00161 const double& NewtonMethod::get_error_gradient_norm(void) const
00162 {
00163 return(error_gradient_norm);
00164 }
00165
00166
00167
00168
00171
00172 const double& NewtonMethod::get_error_training_rate(void) const
00173 {
00174 return(error_training_rate);
00175 }
00176
00177
00178
00179
00181
00182 const double& NewtonMethod::get_minimum_parameters_increment_norm(void) const
00183 {
00184 return(minimum_parameters_increment_norm);
00185 }
00186
00187
00188
00189
00191
00192 const double& NewtonMethod::get_minimum_performance_increase(void) const
00193 {
00194 return(minimum_performance_increase);
00195 }
00196
00197
00198
00199
00202
00203 const double& NewtonMethod::get_performance_goal(void) const
00204 {
00205 return(performance_goal);
00206 }
00207
00208
00209
00210
00213
00214 const double& NewtonMethod::get_gradient_norm_goal(void) const
00215 {
00216 return(gradient_norm_goal);
00217 }
00218
00219
00220
00221
00223
00224 const unsigned int& NewtonMethod::get_maximum_generalization_evaluation_decreases(void) const
00225 {
00226 return(maximum_generalization_evaluation_decreases);
00227 }
00228
00229
00230
00231
00233
00234 const unsigned int& NewtonMethod::get_maximum_epochs_number(void) const
00235 {
00236 return(maximum_epochs_number);
00237 }
00238
00239
00240
00241
00243
00244 const double& NewtonMethod::get_maximum_time(void) const
00245 {
00246 return(maximum_time);
00247 }
00248
00249
00250
00251
00253
00254 const bool& NewtonMethod::get_reserve_parameters_history(void) const
00255 {
00256 return(reserve_parameters_history);
00257 }
00258
00259
00260
00261
00263
00264 const bool& NewtonMethod::get_reserve_parameters_norm_history(void) const
00265 {
00266 return(reserve_parameters_norm_history);
00267 }
00268
00269
00270
00271
00273
00274 const bool& NewtonMethod::get_reserve_evaluation_history(void) const
00275 {
00276 return(reserve_evaluation_history);
00277 }
00278
00279
00280
00281
00283
00284 const bool& NewtonMethod::get_reserve_gradient_history(void) const
00285 {
00286 return(reserve_gradient_history);
00287 }
00288
00289
00290
00291
00293
00294 const bool& NewtonMethod::get_reserve_gradient_norm_history(void) const
00295 {
00296 return(reserve_gradient_norm_history);
00297 }
00298
00299
00300
00301
00303
00304 const bool& NewtonMethod::get_reserve_inverse_Hessian_history(void) const
00305 {
00306 return(reserve_inverse_Hessian_history);
00307 }
00308
00309
00310
00311
00313
00314 const bool& NewtonMethod::get_reserve_training_direction_history(void) const
00315 {
00316 return(reserve_training_direction_history);
00317 }
00318
00319
00320
00321
00323
00324 const bool& NewtonMethod::get_reserve_training_rate_history(void) const
00325 {
00326 return(reserve_training_rate_history);
00327 }
00328
00329
00330
00331
00333
00334 const bool& NewtonMethod::get_reserve_elapsed_time_history(void) const
00335 {
00336 return(reserve_elapsed_time_history);
00337 }
00338
00339
00340
00341
00343
00344 const bool& NewtonMethod::get_reserve_generalization_evaluation_history(void) const
00345 {
00346 return(reserve_generalization_evaluation_history);
00347 }
00348
00349
00350
00351
00353
00354 const unsigned int& NewtonMethod::get_display_period(void) const
00355 {
00356 return(display_period);
00357 }
00358
00359
00360
00361
00362 void NewtonMethod::set_default(void)
00363 {
00364
00365
00366 warning_parameters_norm = 1.0e6;
00367 warning_gradient_norm = 1.0e6;
00368 warning_training_rate = 1.0e6;
00369
00370 error_parameters_norm = 1.0e9;
00371 error_gradient_norm = 1.0e9;
00372 error_training_rate = 1.0e9;
00373
00374
00375
00376 minimum_parameters_increment_norm = 0.0;
00377
00378 minimum_performance_increase = 0.0;
00379 performance_goal = -1.0e99;
00380 gradient_norm_goal = 0.0;
00381 maximum_generalization_evaluation_decreases = 1000000;
00382
00383 maximum_epochs_number = 1000;
00384 maximum_time = 1000.0;
00385
00386
00387
00388 reserve_parameters_history = false;
00389 reserve_parameters_norm_history = false;
00390
00391 reserve_evaluation_history = true;
00392 reserve_gradient_history = false;
00393 reserve_gradient_norm_history = false;
00394 reserve_generalization_evaluation_history = false;
00395
00396 reserve_training_direction_history = false;
00397 reserve_training_rate_history = false;
00398 reserve_elapsed_time_history = false;
00399
00400
00401
00402 display = true;
00403 display_period = 100;
00404
00405 }
00406
00407
00408
00409
00413
00414 void NewtonMethod::set_warning_parameters_norm(const double& new_warning_parameters_norm)
00415 {
00416
00417
00418 #ifdef _DEBUG
00419
00420 if(new_warning_parameters_norm < 0.0)
00421 {
00422 std::ostringstream buffer;
00423
00424 buffer << "OpenNN Exception: NewtonMethod class.\n"
00425 << "void set_warning_parameters_norm(const double&) method.\n"
00426 << "Warning parameters norm must be equal or greater than 0.\n";
00427
00428 throw std::logic_error(buffer.str().c_str());
00429 }
00430
00431 #endif
00432
00433
00434
00435 warning_parameters_norm = new_warning_parameters_norm;
00436 }
00437
00438
00439
00440
00444
00445 void NewtonMethod::set_warning_gradient_norm(const double& new_warning_gradient_norm)
00446 {
00447
00448
00449 #ifdef _DEBUG
00450
00451 if(new_warning_gradient_norm < 0.0)
00452 {
00453 std::ostringstream buffer;
00454
00455 buffer << "OpenNN Exception: NewtonMethod class.\n"
00456 << "void set_warning_gradient_norm(const double&) method.\n"
00457 << "Warning gradient norm must be equal or greater than 0.\n";
00458
00459 throw std::logic_error(buffer.str().c_str());
00460 }
00461
00462 #endif
00463
00464
00465
00466 warning_gradient_norm = new_warning_gradient_norm;
00467 }
00468
00469
00470
00471
00475
00476 void NewtonMethod::set_warning_training_rate(const double& new_warning_training_rate)
00477 {
00478
00479
00480 #ifdef _DEBUG
00481
00482 if(new_warning_training_rate < 0.0)
00483 {
00484 std::ostringstream buffer;
00485
00486 buffer << "OpenNN Exception: NewtonMethod class.\n"
00487 << "void set_warning_training_rate(const double&) method.\n"
00488 << "Warning training rate must be equal or greater than 0.\n";
00489
00490 throw std::logic_error(buffer.str().c_str());
00491 }
00492
00493 #endif
00494
00495 warning_training_rate = new_warning_training_rate;
00496 }
00497
00498
00499
00500
00504
00505 void NewtonMethod::set_error_parameters_norm(const double& new_error_parameters_norm)
00506 {
00507
00508
00509 #ifdef _DEBUG
00510
00511 if(new_error_parameters_norm < 0.0)
00512 {
00513 std::ostringstream buffer;
00514
00515 buffer << "OpenNN Exception: NewtonMethod class.\n"
00516 << "void set_error_parameters_norm(const double&) method.\n"
00517 << "Error parameters norm must be equal or greater than 0.\n";
00518
00519 throw std::logic_error(buffer.str().c_str());
00520 }
00521
00522 #endif
00523
00524
00525
00526 error_parameters_norm = new_error_parameters_norm;
00527 }
00528
00529
00530
00531
00535
00536 void NewtonMethod::set_error_gradient_norm(const double& new_error_gradient_norm)
00537 {
00538
00539
00540 #ifdef _DEBUG
00541
00542 if(new_error_gradient_norm < 0.0)
00543 {
00544 std::ostringstream buffer;
00545
00546 buffer << "OpenNN Exception: NewtonMethod class.\n"
00547 << "void set_error_gradient_norm(const double&) method.\n"
00548 << "Error gradient norm must be equal or greater than 0.\n";
00549
00550 throw std::logic_error(buffer.str().c_str());
00551 }
00552
00553 #endif
00554
00555
00556
00557 error_gradient_norm = new_error_gradient_norm;
00558 }
00559
00560
00561
00562
00566
00567 void NewtonMethod::set_error_training_rate(const double& new_error_training_rate)
00568 {
00569
00570
00571 #ifdef _DEBUG
00572
00573 if(new_error_training_rate < 0.0)
00574 {
00575 std::ostringstream buffer;
00576
00577 buffer << "OpenNN Exception: NewtonMethod class.\n"
00578 << "void set_error_training_rate(const double&) method.\n"
00579 << "Error training rate must be equal or greater than 0.\n";
00580
00581 throw std::logic_error(buffer.str().c_str());
00582 }
00583
00584 #endif
00585
00586
00587
00588 error_training_rate = new_error_training_rate;
00589 }
00590
00591
00592
00593
00596
00597 void NewtonMethod::set_minimum_parameters_increment_norm(const double& new_minimum_parameters_increment_norm)
00598 {
00599
00600
00601 #ifdef _DEBUG
00602
00603 if(new_minimum_parameters_increment_norm < 0.0)
00604 {
00605 std::ostringstream buffer;
00606
00607 buffer << "OpenNN Exception: NewtonMethod class.\n"
00608 << "void new_minimum_parameters_increment_norm(const double&) method.\n"
00609 << "Minimum parameters increment norm must be equal or greater than 0.\n";
00610
00611 throw std::logic_error(buffer.str().c_str());
00612 }
00613
00614 #endif
00615
00616
00617
00618 minimum_parameters_increment_norm = new_minimum_parameters_increment_norm;
00619 }
00620
00621
00622
00623
00626
00627 void NewtonMethod::set_minimum_performance_increase(const double& new_minimum_performance_increase)
00628 {
00629
00630
00631 #ifdef _DEBUG
00632
00633 if(new_minimum_performance_increase < 0.0)
00634 {
00635 std::ostringstream buffer;
00636
00637 buffer << "OpenNN Exception: NewtonMethod class.\n"
00638 << "void set_minimum_performance_increase(const double&) method.\n"
00639 << "Minimum performance improvement must be equal or greater than 0.\n";
00640
00641 throw std::logic_error(buffer.str().c_str());
00642 }
00643
00644 #endif
00645
00646
00647
00648 minimum_performance_increase = new_minimum_performance_increase;
00649 }
00650
00651
00652
00653
00657
00658 void NewtonMethod::set_performance_goal(const double& new_performance_goal)
00659 {
00660 performance_goal = new_performance_goal;
00661 }
00662
00663
00664
00665
00669
00670 void NewtonMethod::set_gradient_norm_goal(const double& new_gradient_norm_goal)
00671 {
00672
00673
00674 #ifdef _DEBUG
00675
00676 if(new_gradient_norm_goal < 0.0)
00677 {
00678 std::ostringstream buffer;
00679
00680 buffer << "OpenNN Exception: NewtonMethod class.\n"
00681 << "void set_gradient_norm_goal(const double&) method.\n"
00682 << "Gradient norm goal must be equal or greater than 0.\n";
00683
00684 throw std::logic_error(buffer.str().c_str());
00685 }
00686
00687 #endif
00688
00689
00690
00691 gradient_norm_goal = new_gradient_norm_goal;
00692 }
00693
00694
00695
00696
00699
00700 void NewtonMethod::set_maximum_generalization_evaluation_decreases(const unsigned int& new_maximum_generalization_evaluation_decreases)
00701 {
00702
00703
00704 #ifdef _DEBUG
00705
00706 if(new_maximum_generalization_evaluation_decreases < 0)
00707 {
00708 std::ostringstream buffer;
00709
00710 buffer << "OpenNN Exception: NewtonMethod class.\n"
00711 << "void set_maximum_generalization_evaluation_decreases(const unsigned int&) method.\n"
00712 << "Number of generalization performance decreases must be equal or greater than 0.\n";
00713
00714 throw std::logic_error(buffer.str().c_str());
00715 }
00716
00717 #endif
00718
00719
00720
00721 maximum_generalization_evaluation_decreases = new_maximum_generalization_evaluation_decreases;
00722 }
00723
00724
00725
00726
00729
00730 void NewtonMethod::set_maximum_epochs_number(const unsigned int& new_maximum_epochs_number)
00731 {
00732
00733
00734 #ifdef _DEBUG
00735
00736 if(new_maximum_epochs_number < 0)
00737 {
00738 std::ostringstream buffer;
00739
00740 buffer << "OpenNN Exception: NewtonMethod class.\n"
00741 << "void set_maximum_epochs_number(unsigned int) method.\n"
00742 << "Number of epochs must be equal or greater than 0.\n";
00743
00744 throw std::logic_error(buffer.str().c_str());
00745 }
00746
00747 #endif
00748
00749
00750
00751 maximum_epochs_number = new_maximum_epochs_number;
00752 }
00753
00754
00755
00756
00759
00760 void NewtonMethod::set_maximum_time(const double& new_maximum_time)
00761 {
00762
00763
00764 #ifdef _DEBUG
00765
00766 if(new_maximum_time < 0.0)
00767 {
00768 std::ostringstream buffer;
00769
00770 buffer << "OpenNN Exception: NewtonMethod class.\n"
00771 << "void set_maximum_time(const double&) method.\n"
00772 << "Maximum time must be equal or greater than 0.\n";
00773
00774 throw std::logic_error(buffer.str().c_str());
00775 }
00776
00777 #endif
00778
00779
00780
00781 maximum_time = new_maximum_time;
00782 }
00783
00784
00785
00786
00789
00790 void NewtonMethod::set_reserve_parameters_history(const bool& new_reserve_parameters_history)
00791 {
00792 reserve_parameters_history = new_reserve_parameters_history;
00793 }
00794
00795
00796
00797
00800
00801 void NewtonMethod::set_reserve_parameters_norm_history(const bool& new_reserve_parameters_norm_history)
00802 {
00803 reserve_parameters_norm_history = new_reserve_parameters_norm_history;
00804 }
00805
00806
00807
00808
00811
00812 void NewtonMethod::set_reserve_evaluation_history(const bool& new_reserve_evaluation_history)
00813 {
00814 reserve_evaluation_history = new_reserve_evaluation_history;
00815 }
00816
00817
00818
00819
00822
00823 void NewtonMethod::set_reserve_gradient_history(const bool& new_reserve_gradient_history)
00824 {
00825 reserve_gradient_history = new_reserve_gradient_history;
00826 }
00827
00828
00829
00830
00834
00835 void NewtonMethod::set_reserve_gradient_norm_history(const bool& new_reserve_gradient_norm_history)
00836 {
00837 reserve_gradient_norm_history = new_reserve_gradient_norm_history;
00838 }
00839
00840
00841
00842
00846
00847 void NewtonMethod::set_reserve_inverse_Hessian_history(const bool& new_reserve_inverse_Hessian_history)
00848 {
00849 reserve_inverse_Hessian_history = new_reserve_inverse_Hessian_history;
00850 }
00851
00852
00853
00854
00858
00859 void NewtonMethod::set_reserve_training_direction_history(const bool& new_reserve_training_direction_history)
00860 {
00861 reserve_training_direction_history = new_reserve_training_direction_history;
00862 }
00863
00864
00865
00866
00870
00871 void NewtonMethod::set_reserve_training_rate_history(const bool& new_reserve_training_rate_history)
00872 {
00873 reserve_training_rate_history = new_reserve_training_rate_history;
00874 }
00875
00876
00877
00878
00882
00883 void NewtonMethod::set_reserve_elapsed_time_history(const bool& new_reserve_elapsed_time_history)
00884 {
00885 reserve_elapsed_time_history = new_reserve_elapsed_time_history;
00886 }
00887
00888
00889
00890
00894
00895 void NewtonMethod::set_reserve_generalization_evaluation_history(const bool& new_reserve_generalization_evaluation_history)
00896 {
00897 reserve_generalization_evaluation_history = new_reserve_generalization_evaluation_history;
00898 }
00899
00900
00901
00902
00906
00907 void NewtonMethod::set_display_period(const unsigned int& new_display_period)
00908 {
00909
00910
00911 #ifdef _DEBUG
00912
00913 if(new_display_period <= 0)
00914 {
00915 std::ostringstream buffer;
00916
00917 buffer << "OpenNN Exception: NewtonMethod class.\n"
00918 << "void set_display_period(const double&) method.\n"
00919 << "First training rate must be greater than 0.\n";
00920
00921 throw std::logic_error(buffer.str().c_str());
00922 }
00923
00924 #endif
00925
00926 display_period = new_display_period;
00927 }
00928
00929
00930
00931
00932
00935
00936 Vector<double> NewtonMethod::calculate_gradient_descent_training_direction(const Vector<double>& gradient) const
00937 {
00938 double gradient_norm = gradient.calculate_norm();
00939
00940 return(gradient*(-1.0/gradient_norm));
00941 }
00942
00943
00944
00945
00949
00950 Vector<double> NewtonMethod::calculate_training_direction
00951 (const Vector<double>& gradient, const Matrix<double>& inverse_Hessian) const
00952 {
00953 Vector<double> training_direction = inverse_Hessian.dot(gradient)*(-1.0);
00954
00955 double training_direction_norm = training_direction.calculate_norm();
00956
00957 return(training_direction/training_direction_norm);
00958 }
00959
00960
00961
00962
00965
00966 void NewtonMethod::NewtonMethodResults::resize_training_history(const unsigned int& new_size)
00967 {
00968 parameters_history.resize(new_size);
00969 parameters_norm_history.resize(new_size);
00970
00971 evaluation_history.resize(new_size);
00972 generalization_evaluation_history.resize(new_size);
00973 gradient_history.resize(new_size);
00974 gradient_norm_history.resize(new_size);
00975 inverse_Hessian_history.resize(new_size);
00976
00977 training_direction_history.resize(new_size);
00978 training_rate_history.resize(new_size);
00979 elapsed_time_history.resize(new_size);
00980 }
00981
00982
00983
00984
00986
00987 std::string NewtonMethod::NewtonMethodResults::to_string(void) const
00988 {
00989 std::ostringstream buffer;
00990
00991
00992
00993 if(!parameters_history.empty())
00994 {
00995 if(!parameters_history[0].empty())
00996 {
00997 buffer << "% Parameters history:\n"
00998 << parameters_history << "\n";
00999 }
01000 }
01001
01002
01003
01004 if(!parameters_norm_history.empty())
01005 {
01006 buffer << "% Parameters norm history:\n"
01007 << parameters_norm_history << "\n";
01008 }
01009
01010
01011
01012 if(!evaluation_history.empty())
01013 {
01014 buffer << "% Evaluation history:\n"
01015 << evaluation_history << "\n";
01016 }
01017
01018
01019
01020 if(!generalization_evaluation_history.empty())
01021 {
01022 buffer << "% Generalization evaluation history:\n"
01023 << generalization_evaluation_history << "\n";
01024 }
01025
01026
01027
01028 if(!gradient_history.empty())
01029 {
01030 if(!gradient_history[0].empty())
01031 {
01032 buffer << "% Gradient history:\n"
01033 << gradient_history << "\n";
01034 }
01035 }
01036
01037
01038
01039 if(!gradient_norm_history.empty())
01040 {
01041 buffer << "% Gradient norm history:\n"
01042 << gradient_norm_history << "\n";
01043 }
01044
01045
01046
01047 if(!inverse_Hessian_history.empty())
01048 {
01049 if(!inverse_Hessian_history[0].empty())
01050 {
01051 buffer << "% Inverse Hessian history:\n"
01052 << inverse_Hessian_history << "\n";
01053 }
01054 }
01055
01056
01057
01058 if(!training_direction_history.empty())
01059 {
01060 if(!training_direction_history[0].empty())
01061 {
01062 buffer << "% Training direction history:\n"
01063 << training_direction_history << "\n";
01064 }
01065 }
01066
01067
01068
01069 if(!training_rate_history.empty())
01070 {
01071 buffer << "% Training rate history:\n"
01072 << training_rate_history << "\n";
01073 }
01074
01075
01076
01077 if(!elapsed_time_history.empty())
01078 {
01079 buffer << "% Elapsed time history:\n"
01080 << elapsed_time_history << "\n";
01081 }
01082
01083 return(buffer.str());
01084 }
01085
01086
01087
01088
01092
01093 NewtonMethod::NewtonMethodResults* NewtonMethod::perform_training(void)
01094 {
01095 std::ostringstream buffer;
01096
01097 buffer << "OpenNN Exception: NewtonMethod class.\n"
01098 << "NewtonMethodResults* perform_training(void) method.\n"
01099 << "This method is under development.\n";
01100
01101 throw std::logic_error(buffer.str().c_str());
01102
01103
01104
01105
01106
01107
01108
01109
01110
01111
01112
01113
01114
01115
01116
01117
01118
01119
01120
01121
01122
01123
01124
01125
01126
01127
01128
01129
01130
01131
01132
01133
01134
01135
01136
01137
01138
01139
01140
01141
01142
01143
01144
01145
01146
01147
01148
01149
01150
01151
01152
01153
01154
01155
01156
01157
01158
01159
01160
01161
01162
01163
01164
01165
01166
01167
01168
01169
01170
01171
01172
01173
01174
01175
01176
01177
01178
01179
01180
01181
01182
01183
01184
01185
01186
01187
01188
01189
01190
01191
01192
01193
01194
01195
01196
01197
01198
01199
01200
01201
01202
01203
01204
01205
01206
01207
01208
01209
01210
01211
01212
01213
01214
01215
01216
01217
01218
01219
01220
01221
01222
01223
01224
01225
01226
01227
01228
01229
01230
01231
01232
01233
01234
01235
01236
01237
01238
01239
01240
01241
01242
01243
01244
01245
01246
01247
01248
01249
01250
01251
01252
01253
01254
01255
01256
01257
01258
01259
01260
01261
01262
01263
01264
01265
01266
01267
01268
01269
01270
01271
01272
01273
01274
01275
01276
01277
01278
01279
01280
01281
01282
01283
01284
01285
01286
01287
01288
01289
01290
01291
01292
01293
01294
01295
01296
01297
01298
01299
01300
01301
01302
01303
01304
01305
01306
01307
01308
01309
01310
01311
01312
01313
01314
01315
01316
01317
01318
01319
01320
01321
01322
01323
01324
01325
01326
01327
01328
01329
01330
01331
01332
01333
01334
01335
01336
01337
01338
01339
01340
01341
01342
01343
01344
01345
01346
01347
01348
01349
01350
01351
01352
01353
01354
01355
01356
01357
01358
01359
01360
01361
01362
01363
01364
01365
01366
01367
01368
01369
01370
01371
01372
01373
01374
01375
01376
01377
01378
01379
01380
01381
01382
01383
01384
01385
01386
01387
01388
01389
01390
01391
01392
01393
01394
01395
01396
01397
01398
01399
01400
01401
01402
01403
01404
01405
01406
01407
01408
01409
01410
01411
01412
01413
01414
01415
01416
01417
01418
01419
01420
01421
01422
01423
01424
01425
01426
01427
01428
01429
01430
01431
01432
01433
01434
01435
01436
01437
01438
01439
01440
01441
01442
01443
01444
01445
01446
01447
01448
01449
01450
01451
01452
01453
01454
01455
01456
01457
01458
01459
01460
01461
01462 }
01463
01464
01465
01466
01467 std::string NewtonMethod::write_training_algorithm_type(void) const
01468 {
01469 return("NEWTON_METHOD");
01470 }
01471
01472
01473
01474
01489
01490 TiXmlElement* NewtonMethod::to_XML(void) const
01491 {
01492 std::ostringstream buffer;
01493
01494
01495
01496 TiXmlElement* Newton_method_element = new TiXmlElement("NewtonMethod");
01497 Newton_method_element->SetAttribute("Version", 4);
01498
01499
01500
01501 TiXmlElement* training_rate_algoritm_element = training_rate_algorithm.to_XML();
01502 Newton_method_element->LinkEndChild(training_rate_algoritm_element);
01503
01504
01505
01506 TiXmlElement* warning_parameters_norm_element = new TiXmlElement("WarningParametersNorm");
01507 Newton_method_element->LinkEndChild(warning_parameters_norm_element);
01508
01509 buffer.str("");
01510 buffer << warning_parameters_norm;
01511
01512 TiXmlText* warning_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01513 warning_parameters_norm_element->LinkEndChild(warning_parameters_norm_text);
01514
01515
01516
01517 TiXmlElement* warning_gradient_norm_element = new TiXmlElement("WarningGradientNorm");
01518 Newton_method_element->LinkEndChild(warning_gradient_norm_element);
01519
01520 buffer.str("");
01521 buffer << warning_gradient_norm;
01522
01523 TiXmlText* warning_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01524 warning_gradient_norm_element->LinkEndChild(warning_gradient_norm_text);
01525
01526
01527
01528 TiXmlElement* warning_training_rate_element = new TiXmlElement("WarningTrainingRate");
01529 Newton_method_element->LinkEndChild(warning_training_rate_element);
01530
01531 buffer.str("");
01532 buffer << warning_training_rate;
01533
01534 TiXmlText* warning_training_rate_text = new TiXmlText(buffer.str().c_str());
01535 warning_training_rate_element->LinkEndChild(warning_training_rate_text);
01536
01537
01538
01539 TiXmlElement* error_parameters_norm_element = new TiXmlElement("ErrorParametersNorm");
01540 Newton_method_element->LinkEndChild(error_parameters_norm_element);
01541
01542 buffer.str("");
01543 buffer << error_parameters_norm;
01544
01545 TiXmlText* error_parameters_norm_text = new TiXmlText(buffer.str().c_str());
01546 error_parameters_norm_element->LinkEndChild(error_parameters_norm_text);
01547
01548
01549
01550 TiXmlElement* error_gradient_norm_element = new TiXmlElement("ErrorGradientNorm");
01551 Newton_method_element->LinkEndChild(error_gradient_norm_element);
01552
01553 buffer.str("");
01554 buffer << error_gradient_norm;
01555
01556 TiXmlText* error_gradient_norm_text = new TiXmlText(buffer.str().c_str());
01557 error_gradient_norm_element->LinkEndChild(error_gradient_norm_text);
01558
01559
01560
01561 TiXmlElement* error_training_rate_element = new TiXmlElement("ErrorTrainingRate");
01562 Newton_method_element->LinkEndChild(error_training_rate_element);
01563
01564 buffer.str("");
01565 buffer << error_training_rate;
01566
01567 TiXmlText* error_training_rate_text = new TiXmlText(buffer.str().c_str());
01568 error_training_rate_element->LinkEndChild(error_training_rate_text);
01569
01570
01571
01572 TiXmlElement* minimum_parameters_increment_norm_element = new TiXmlElement("MinimumParametersIncrement");
01573 Newton_method_element->LinkEndChild(minimum_parameters_increment_norm_element);
01574
01575 buffer.str("");
01576 buffer << minimum_parameters_increment_norm;
01577
01578 TiXmlText* minimum_parameters_increment_norm_text = new TiXmlText(buffer.str().c_str());
01579 minimum_parameters_increment_norm_element->LinkEndChild(minimum_parameters_increment_norm_text);
01580
01581
01582
01583 TiXmlElement* minimum_performance_increase_element = new TiXmlElement("MinimumPerformanceIncrease");
01584 Newton_method_element->LinkEndChild(minimum_performance_increase_element);
01585
01586 buffer.str("");
01587 buffer << minimum_performance_increase;
01588
01589 TiXmlText* minimum_performance_increase_text = new TiXmlText(buffer.str().c_str());
01590 minimum_performance_increase_element->LinkEndChild(minimum_performance_increase_text);
01591
01592
01593
01594 TiXmlElement* performance_goal_element = new TiXmlElement("PerformanceGoal");
01595 Newton_method_element->LinkEndChild(performance_goal_element);
01596
01597 buffer.str("");
01598 buffer << performance_goal;
01599
01600 TiXmlText* performance_goal_text = new TiXmlText(buffer.str().c_str());
01601 performance_goal_element->LinkEndChild(performance_goal_text);
01602
01603
01604
01605 TiXmlElement* gradient_norm_goal_element = new TiXmlElement("GradientNormGoal");
01606 Newton_method_element->LinkEndChild(gradient_norm_goal_element);
01607
01608 buffer.str("");
01609 buffer << gradient_norm_goal;
01610
01611 TiXmlText* gradient_norm_goal_text = new TiXmlText(buffer.str().c_str());
01612 gradient_norm_goal_element->LinkEndChild(gradient_norm_goal_text);
01613
01614
01615
01616 TiXmlElement* maximum_generalization_evaluation_decreases_element = new TiXmlElement("MaximumGeneralizationEvaluationDecreases");
01617 Newton_method_element->LinkEndChild(maximum_generalization_evaluation_decreases_element);
01618
01619 buffer.str("");
01620 buffer << maximum_generalization_evaluation_decreases;
01621
01622 TiXmlText* maximum_generalization_evaluation_decreases_text = new TiXmlText(buffer.str().c_str());
01623 maximum_generalization_evaluation_decreases_element->LinkEndChild(maximum_generalization_evaluation_decreases_text);
01624
01625
01626
01627 TiXmlElement* maximum_epochs_number_element = new TiXmlElement("MaximumEpochsNumber");
01628 Newton_method_element->LinkEndChild(maximum_epochs_number_element);
01629
01630 buffer.str("");
01631 buffer << maximum_epochs_number;
01632
01633 TiXmlText* maximum_epochs_number_text = new TiXmlText(buffer.str().c_str());
01634 maximum_epochs_number_element->LinkEndChild(maximum_epochs_number_text);
01635
01636
01637
01638 TiXmlElement* maximum_time_element = new TiXmlElement("MaximumTime");
01639 Newton_method_element->LinkEndChild(maximum_time_element);
01640
01641 buffer.str("");
01642 buffer << maximum_time;
01643
01644 TiXmlText* maximum_time_text = new TiXmlText(buffer.str().c_str());
01645 maximum_time_element->LinkEndChild(maximum_time_text);
01646
01647
01648
01649 TiXmlElement* reserve_parameters_history_element = new TiXmlElement("ReserveParametersHistory");
01650 Newton_method_element->LinkEndChild(reserve_parameters_history_element);
01651
01652 buffer.str("");
01653 buffer << reserve_parameters_history;
01654
01655 TiXmlText* reserve_parameters_history_text = new TiXmlText(buffer.str().c_str());
01656 reserve_parameters_history_element->LinkEndChild(reserve_parameters_history_text);
01657
01658
01659
01660 TiXmlElement* reserve_parameters_norm_history_element = new TiXmlElement("ReserveParametersNormHistory");
01661 Newton_method_element->LinkEndChild(reserve_parameters_norm_history_element);
01662
01663 buffer.str("");
01664 buffer << reserve_parameters_norm_history;
01665
01666 TiXmlText* reserve_parameters_norm_history_text = new TiXmlText(buffer.str().c_str());
01667 reserve_parameters_norm_history_element->LinkEndChild(reserve_parameters_norm_history_text);
01668
01669
01670
01671 TiXmlElement* reserve_evaluation_history_element = new TiXmlElement("ReservePerformanceHistory");
01672 Newton_method_element->LinkEndChild(reserve_evaluation_history_element);
01673
01674 buffer.str("");
01675 buffer << reserve_evaluation_history;
01676
01677 TiXmlText* reserve_evaluation_history_text = new TiXmlText(buffer.str().c_str());
01678 reserve_evaluation_history_element->LinkEndChild(reserve_evaluation_history_text);
01679
01680
01681
01682 TiXmlElement* reserve_gradient_history_element = new TiXmlElement("ReserveGradientHistory");
01683 Newton_method_element->LinkEndChild(reserve_gradient_history_element);
01684
01685 buffer.str("");
01686 buffer << reserve_gradient_history;
01687
01688 TiXmlText* reserve_gradient_history_text = new TiXmlText(buffer.str().c_str());
01689 reserve_gradient_history_element->LinkEndChild(reserve_gradient_history_text);
01690
01691
01692
01693 TiXmlElement* reserve_gradient_norm_history_element = new TiXmlElement("ReserveGradientNormHistory");
01694 Newton_method_element->LinkEndChild(reserve_gradient_norm_history_element);
01695
01696 buffer.str("");
01697 buffer << reserve_gradient_norm_history;
01698
01699 TiXmlText* reserve_gradient_norm_history_text = new TiXmlText(buffer.str().c_str());
01700 reserve_gradient_norm_history_element->LinkEndChild(reserve_gradient_norm_history_text);
01701
01702
01703
01704 TiXmlElement* reserve_inverse_Hessian_history_element = new TiXmlElement("ReserveInverseHessianHistory");
01705 Newton_method_element->LinkEndChild(reserve_inverse_Hessian_history_element);
01706
01707 buffer.str("");
01708 buffer << reserve_inverse_Hessian_history;
01709
01710 TiXmlText* reserve_inverse_Hessian_history_text = new TiXmlText(buffer.str().c_str());
01711 reserve_inverse_Hessian_history_element->LinkEndChild(reserve_inverse_Hessian_history_text);
01712
01713
01714
01715 TiXmlElement* reserve_training_direction_history_element = new TiXmlElement("ReserveTrainingDirectionHistory");
01716 Newton_method_element->LinkEndChild(reserve_training_direction_history_element);
01717
01718 buffer.str("");
01719 buffer << reserve_training_direction_history;
01720
01721 TiXmlText* reserve_training_direction_history_text = new TiXmlText(buffer.str().c_str());
01722 reserve_training_direction_history_element->LinkEndChild(reserve_training_direction_history_text);
01723
01724
01725
01726 TiXmlElement* reserve_training_rate_history_element = new TiXmlElement("ReserveTrainingRateHistory");
01727 Newton_method_element->LinkEndChild(reserve_training_rate_history_element);
01728
01729 buffer.str("");
01730 buffer << reserve_training_rate_history;
01731
01732 TiXmlText* reserve_training_rate_history_text = new TiXmlText(buffer.str().c_str());
01733 reserve_training_rate_history_element->LinkEndChild(reserve_training_rate_history_text);
01734
01735
01736
01737 TiXmlElement* reserve_elapsed_time_history_element = new TiXmlElement("ReserveElapsedTimeHistory");
01738 Newton_method_element->LinkEndChild(reserve_elapsed_time_history_element);
01739
01740 buffer.str("");
01741 buffer << reserve_elapsed_time_history;
01742
01743 TiXmlText* reserve_elapsed_time_history_text = new TiXmlText(buffer.str().c_str());
01744 reserve_elapsed_time_history_element->LinkEndChild(reserve_elapsed_time_history_text);
01745
01746
01747
01748 TiXmlElement* reserve_generalization_evaluation_history_element = new TiXmlElement("ReserveGeneralizationPerformanceHistory");
01749 Newton_method_element->LinkEndChild(reserve_generalization_evaluation_history_element);
01750
01751 buffer.str("");
01752 buffer << reserve_generalization_evaluation_history;
01753
01754 TiXmlText* reserve_generalization_evaluation_history_text = new TiXmlText(buffer.str().c_str());
01755 reserve_generalization_evaluation_history_element->LinkEndChild(reserve_generalization_evaluation_history_text);
01756
01757
01758
01759 TiXmlElement* display_period_element = new TiXmlElement("DisplayPeriod");
01760 Newton_method_element->LinkEndChild(display_period_element);
01761
01762 buffer.str("");
01763 buffer << display_period;
01764
01765 TiXmlText* display_period_text = new TiXmlText(buffer.str().c_str());
01766 display_period_element->LinkEndChild(display_period_text);
01767
01768
01769
01770 TiXmlElement* display_element = new TiXmlElement("Display");
01771 Newton_method_element->LinkEndChild(display_element);
01772
01773 buffer.str("");
01774 buffer << display;
01775
01776 TiXmlText* display_text = new TiXmlText(buffer.str().c_str());
01777 display_element->LinkEndChild(display_text);
01778
01779 return(Newton_method_element);
01780 }
01781
01782
01783
01784
01785 void NewtonMethod::from_XML(TiXmlElement* newton_method_element)
01786 {
01787
01788
01789 TiXmlElement* training_rate_algorithm_element = newton_method_element->FirstChildElement("TrainingRateAlgorithm");
01790
01791 if(training_rate_algorithm_element)
01792 {
01793 try
01794 {
01795 training_rate_algorithm.from_XML(training_rate_algorithm_element);
01796 }
01797 catch(std::exception& e)
01798 {
01799 std::cout << e.what() << std::endl;
01800 }
01801 }
01802
01803
01804
01805 TiXmlElement* warning_parameters_norm_element = newton_method_element->FirstChildElement("WarningParametersNorm");
01806
01807 if(warning_parameters_norm_element)
01808 {
01809 double new_warning_parameters_norm = atof(warning_parameters_norm_element->GetText());
01810
01811 try
01812 {
01813 set_warning_parameters_norm(new_warning_parameters_norm);
01814 }
01815 catch(std::exception& e)
01816 {
01817 std::cout << e.what() << std::endl;
01818 }
01819 }
01820
01821
01822
01823 TiXmlElement* warning_gradient_norm_element = newton_method_element->FirstChildElement("WarningGradientNorm");
01824
01825 if(warning_gradient_norm_element)
01826 {
01827 double new_warning_gradient_norm = atof(warning_gradient_norm_element->GetText());
01828
01829 try
01830 {
01831 set_warning_gradient_norm(new_warning_gradient_norm);
01832 }
01833 catch(std::exception& e)
01834 {
01835 std::cout << e.what() << std::endl;
01836 }
01837 }
01838
01839
01840
01841 TiXmlElement* warning_training_rate_element = newton_method_element->FirstChildElement("WarningTrainingRate");
01842
01843 if(warning_training_rate_element)
01844 {
01845 double new_warning_training_rate = atof(warning_training_rate_element->GetText());
01846
01847 try
01848 {
01849 set_warning_training_rate(new_warning_training_rate);
01850 }
01851 catch(std::exception& e)
01852 {
01853 std::cout << e.what() << std::endl;
01854 }
01855 }
01856
01857
01858
01859 TiXmlElement* error_parameters_norm_element = newton_method_element->FirstChildElement("ErrorParametersNorm");
01860
01861 if(error_parameters_norm_element)
01862 {
01863 double new_error_parameters_norm = atof(error_parameters_norm_element->GetText());
01864
01865 try
01866 {
01867 set_error_parameters_norm(new_error_parameters_norm);
01868 }
01869 catch(std::exception& e)
01870 {
01871 std::cout << e.what() << std::endl;
01872 }
01873 }
01874
01875
01876
01877 TiXmlElement* error_gradient_norm_element = newton_method_element->FirstChildElement("ErrorGradientNorm");
01878
01879 if(error_gradient_norm_element)
01880 {
01881 double new_error_gradient_norm = atof(error_gradient_norm_element->GetText());
01882
01883 try
01884 {
01885 set_error_gradient_norm(new_error_gradient_norm);
01886 }
01887 catch(std::exception& e)
01888 {
01889 std::cout << e.what() << std::endl;
01890 }
01891 }
01892
01893
01894
01895 TiXmlElement* error_training_rate_element = newton_method_element->FirstChildElement("ErrorTrainingRate");
01896
01897 if(error_training_rate_element)
01898 {
01899 double new_error_training_rate = atof(error_training_rate_element->GetText());
01900
01901 try
01902 {
01903 set_error_training_rate(new_error_training_rate);
01904 }
01905 catch(std::exception& e)
01906 {
01907 std::cout << e.what() << std::endl;
01908 }
01909 }
01910
01911
01912
01913 TiXmlElement* minimum_parameters_increment_norm_element = newton_method_element->FirstChildElement("MinimumParametersIncrementNorm");
01914
01915 if(minimum_parameters_increment_norm_element)
01916 {
01917 double new_minimum_parameters_increment_norm = atof(minimum_parameters_increment_norm_element->GetText());
01918
01919 try
01920 {
01921 set_minimum_parameters_increment_norm(new_minimum_parameters_increment_norm);
01922 }
01923 catch(std::exception& e)
01924 {
01925 std::cout << e.what() << std::endl;
01926 }
01927 }
01928
01929
01930
01931 TiXmlElement* minimum_performance_increase_element = newton_method_element->FirstChildElement("MinimumPerformanceIncrease");
01932
01933 if(minimum_performance_increase_element)
01934 {
01935 double new_minimum_performance_increase = atof(minimum_performance_increase_element->GetText());
01936
01937 try
01938 {
01939 set_minimum_performance_increase(new_minimum_performance_increase);
01940 }
01941 catch(std::exception& e)
01942 {
01943 std::cout << e.what() << std::endl;
01944 }
01945 }
01946
01947
01948
01949 TiXmlElement* performance_goal_element = newton_method_element->FirstChildElement("PerformanceGoal");
01950
01951 if(performance_goal_element)
01952 {
01953 double new_performance_goal = atof(performance_goal_element->GetText());
01954
01955 try
01956 {
01957 set_performance_goal(new_performance_goal);
01958 }
01959 catch(std::exception& e)
01960 {
01961 std::cout << e.what() << std::endl;
01962 }
01963 }
01964
01965
01966
01967 TiXmlElement* gradient_norm_goal_element = newton_method_element->FirstChildElement("GradientNormGoal");
01968
01969 if(gradient_norm_goal_element)
01970 {
01971 double new_gradient_norm_goal = atof(gradient_norm_goal_element->GetText());
01972
01973 try
01974 {
01975 set_gradient_norm_goal(new_gradient_norm_goal);
01976 }
01977 catch(std::exception& e)
01978 {
01979 std::cout << e.what() << std::endl;
01980 }
01981 }
01982
01983
01984
01985 TiXmlElement* maximum_generalization_evaluation_decreases_element = newton_method_element->FirstChildElement("MaximumGeneralizationEvaluationDecreases");
01986
01987 if(maximum_generalization_evaluation_decreases_element)
01988 {
01989 unsigned int new_maximum_generalization_evaluation_decreases = atoi(maximum_generalization_evaluation_decreases_element->GetText());
01990
01991 try
01992 {
01993 set_maximum_generalization_evaluation_decreases(new_maximum_generalization_evaluation_decreases);
01994 }
01995 catch(std::exception& e)
01996 {
01997 std::cout << e.what() << std::endl;
01998 }
01999 }
02000
02001
02002
02003 TiXmlElement* maximum_epochs_number_element = newton_method_element->FirstChildElement("MaximumEpochsNumber");
02004
02005 if(maximum_epochs_number_element)
02006 {
02007 unsigned int new_maximum_epochs_number = atoi(maximum_epochs_number_element->GetText());
02008
02009 try
02010 {
02011 set_maximum_epochs_number(new_maximum_epochs_number);
02012 }
02013 catch(std::exception& e)
02014 {
02015 std::cout << e.what() << std::endl;
02016 }
02017 }
02018
02019
02020
02021 TiXmlElement* maximum_time_element = newton_method_element->FirstChildElement("MaximumTime");
02022
02023 if(maximum_time_element)
02024 {
02025 double new_maximum_time = atof(maximum_time_element->GetText());
02026
02027 try
02028 {
02029 set_maximum_time(new_maximum_time);
02030 }
02031 catch(std::exception& e)
02032 {
02033 std::cout << e.what() << std::endl;
02034 }
02035 }
02036
02037
02038
02039 TiXmlElement* reserve_parameters_history_element = newton_method_element->FirstChildElement("ReserveParametersHistory");
02040
02041 if(reserve_parameters_history_element)
02042 {
02043 std::string new_reserve_parameters_history = reserve_parameters_history_element->GetText();
02044
02045 try
02046 {
02047 set_reserve_parameters_history(new_reserve_parameters_history != "0");
02048 }
02049 catch(std::exception& e)
02050 {
02051 std::cout << e.what() << std::endl;
02052 }
02053 }
02054
02055
02056
02057 TiXmlElement* reserve_parameters_norm_history_element = newton_method_element->FirstChildElement("ReserveParametersNormHistory");
02058
02059 if(reserve_parameters_norm_history_element)
02060 {
02061 std::string new_reserve_parameters_norm_history = reserve_parameters_norm_history_element->GetText();
02062
02063 try
02064 {
02065 set_reserve_parameters_norm_history(new_reserve_parameters_norm_history != "0");
02066 }
02067 catch(std::exception& e)
02068 {
02069 std::cout << e.what() << std::endl;
02070 }
02071 }
02072
02073
02074
02075 TiXmlElement* reserve_evaluation_history_element = newton_method_element->FirstChildElement("ReservePerformanceHistory");
02076
02077 if(reserve_evaluation_history_element)
02078 {
02079 std::string new_reserve_evaluation_history = reserve_evaluation_history_element->GetText();
02080
02081 try
02082 {
02083 set_reserve_evaluation_history(new_reserve_evaluation_history != "0");
02084 }
02085 catch(std::exception& e)
02086 {
02087 std::cout << e.what() << std::endl;
02088 }
02089 }
02090
02091
02092
02093 TiXmlElement* reserve_gradient_history_element = newton_method_element->FirstChildElement("ReserveGradientHistory");
02094
02095 if(reserve_gradient_history_element)
02096 {
02097 std::string new_reserve_gradient_history = reserve_gradient_history_element->GetText();
02098
02099 try
02100 {
02101 set_reserve_gradient_history(new_reserve_gradient_history != "0");
02102 }
02103 catch(std::exception& e)
02104 {
02105 std::cout << e.what() << std::endl;
02106 }
02107 }
02108
02109
02110
02111 TiXmlElement* reserve_gradient_norm_history_element = newton_method_element->FirstChildElement("ReserveGradientNormHistory");
02112
02113 if(reserve_gradient_norm_history_element)
02114 {
02115 std::string new_reserve_gradient_norm_history = reserve_gradient_norm_history_element->GetText();
02116
02117 try
02118 {
02119 set_reserve_gradient_norm_history(new_reserve_gradient_norm_history != "0");
02120 }
02121 catch(std::exception& e)
02122 {
02123 std::cout << e.what() << std::endl;
02124 }
02125 }
02126
02127
02128
02129 TiXmlElement* reserve_training_direction_history_element = newton_method_element->FirstChildElement("ReserveTrainingDirectionHistory");
02130
02131 if(reserve_training_direction_history_element)
02132 {
02133 std::string new_reserve_training_direction_history = reserve_training_direction_history_element->GetText();
02134
02135 try
02136 {
02137 set_reserve_training_direction_history(new_reserve_training_direction_history != "0");
02138 }
02139 catch(std::exception& e)
02140 {
02141 std::cout << e.what() << std::endl;
02142 }
02143 }
02144
02145
02146
02147 TiXmlElement* reserve_training_rate_history_element = newton_method_element->FirstChildElement("ReserveTrainingRateHistory");
02148
02149 if(reserve_training_rate_history_element)
02150 {
02151 std::string new_reserve_training_rate_history = reserve_training_rate_history_element->GetText();
02152
02153 try
02154 {
02155 set_reserve_training_rate_history(new_reserve_training_rate_history != "0");
02156 }
02157 catch(std::exception& e)
02158 {
02159 std::cout << e.what() << std::endl;
02160 }
02161 }
02162
02163
02164
02165 TiXmlElement* reserve_elapsed_time_history_element = newton_method_element->FirstChildElement("ReserveElapsedTimeHistory");
02166
02167 if(reserve_elapsed_time_history_element)
02168 {
02169 std::string new_reserve_elapsed_time_history = reserve_elapsed_time_history_element->GetText();
02170
02171 try
02172 {
02173 set_reserve_elapsed_time_history(new_reserve_elapsed_time_history != "0");
02174 }
02175 catch(std::exception& e)
02176 {
02177 std::cout << e.what() << std::endl;
02178 }
02179 }
02180
02181
02182
02183 TiXmlElement* reserve_generalization_evaluation_history_element = newton_method_element->FirstChildElement("ReserveGeneralizationPerformanceHistory");
02184
02185 if(reserve_generalization_evaluation_history_element)
02186 {
02187 std::string new_reserve_generalization_evaluation_history = reserve_generalization_evaluation_history_element->GetText();
02188
02189 try
02190 {
02191 set_reserve_generalization_evaluation_history(new_reserve_generalization_evaluation_history != "0");
02192 }
02193 catch(std::exception& e)
02194 {
02195 std::cout << e.what() << std::endl;
02196 }
02197 }
02198
02199
02200
02201 TiXmlElement* display_period_element = newton_method_element->FirstChildElement("DisplayPeriod");
02202
02203 if(display_period_element)
02204 {
02205 unsigned int new_display_period = atoi(display_period_element->GetText());
02206
02207 try
02208 {
02209 set_display_period(new_display_period);
02210 }
02211 catch(std::exception& e)
02212 {
02213 std::cout << e.what() << std::endl;
02214 }
02215 }
02216
02217
02218
02219 TiXmlElement* display_element = newton_method_element->FirstChildElement("Display");
02220
02221 if(display_element)
02222 {
02223 std::string new_display = display_element->GetText();
02224
02225 try
02226 {
02227 set_display(new_display != "0");
02228 }
02229 catch(std::exception& e)
02230 {
02231 std::cout << e.what() << std::endl;
02232 }
02233 }
02234
02235 }
02236
02237
02238
02239
02240 void NewtonMethod::set_reserve_all_training_history(const bool& new_reserve_all_training_history)
02241 {
02242 reserve_parameters_history = new_reserve_all_training_history;
02243 reserve_parameters_norm_history = new_reserve_all_training_history;
02244
02245 reserve_evaluation_history = new_reserve_all_training_history;
02246 reserve_gradient_history = new_reserve_all_training_history;
02247 reserve_gradient_norm_history = new_reserve_all_training_history;
02248 reserve_inverse_Hessian_history = new_reserve_all_training_history;
02249
02250 reserve_training_direction_history = new_reserve_all_training_history;
02251 reserve_training_rate_history = new_reserve_all_training_history;
02252 reserve_elapsed_time_history = new_reserve_all_training_history;
02253
02254 reserve_generalization_evaluation_history = new_reserve_all_training_history;
02255 }
02256
02257 }
02258
02259
02260
02261
02262
02263
02264
02265
02266
02267
02268
02269
02270
02271
02272
02273
02274
02275
02276