00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <cmath>
00019 #include <cstdlib>
00020 #include <fstream>
00021 #include <iostream>
00022 #include <string>
00023 #include <sstream>
00024
00025
00026
00027 #include "perceptron_layer.h"
00028
00029 namespace OpenNN
00030 {
00031
00032
00033
00037
00038 PerceptronLayer::PerceptronLayer(void)
00039 {
00040 set();
00041 }
00042
00043
00044
00045
00052
00053 PerceptronLayer::PerceptronLayer(const unsigned int& new_inputs_number, const unsigned int& new_perceptrons_number)
00054 {
00055 set(new_inputs_number, new_perceptrons_number);
00056 }
00057
00058
00059
00060
00064
00065 PerceptronLayer::PerceptronLayer(const PerceptronLayer& other_perceptron_layer)
00066 {
00067 set(other_perceptron_layer);
00068 }
00069
00070
00071
00072
00075
00076 PerceptronLayer::~PerceptronLayer(void)
00077 {
00078 }
00079
00080
00081
00082
00086
00087 PerceptronLayer& PerceptronLayer::operator = (const PerceptronLayer& other_perceptron_layer)
00088 {
00089 if(this != &other_perceptron_layer)
00090 {
00091 perceptrons = other_perceptron_layer.perceptrons;
00092
00093 display = other_perceptron_layer.display;
00094 }
00095
00096 return(*this);
00097 }
00098
00099
00100
00101
00102
00103
00108
00109 bool PerceptronLayer::operator == (const PerceptronLayer& other_perceptron_layer) const
00110 {
00111 if(perceptrons == other_perceptron_layer.perceptrons
00112 && display == other_perceptron_layer.display)
00113 {
00114 return(true);
00115 }
00116 else
00117 {
00118 return(false);
00119 }
00120 }
00121
00122
00123
00124
00125
00126
00127
00129
00130 bool PerceptronLayer::is_empty(void) const
00131 {
00132 if(perceptrons.empty())
00133 {
00134 return(true);
00135 }
00136 else
00137 {
00138 return(false);
00139 }
00140 }
00141
00142
00143
00144
00146
00147 const Vector<Perceptron>& PerceptronLayer::get_perceptrons(void) const
00148 {
00149 return(perceptrons);
00150 }
00151
00152
00153
00154
00156
00157 unsigned int PerceptronLayer::count_inputs_number(void) const
00158 {
00159 if(is_empty())
00160 {
00161 return(0);
00162 }
00163 else
00164 {
00165 return(perceptrons[0].count_inputs_number());
00166 }
00167 }
00168
00169
00170
00171
00173
00174 unsigned int PerceptronLayer::count_perceptrons_number(void) const
00175 {
00176 const unsigned int& perceptrons_number = perceptrons.size();
00177
00178 return(perceptrons_number);
00179 }
00180
00181
00182
00183
00186
00187 const Perceptron& PerceptronLayer::get_perceptron(const unsigned int& index) const
00188 {
00189
00190
00191 #ifdef _DEBUG
00192
00193 const unsigned int perceptrons_number = count_perceptrons_number();
00194
00195 if(index >= perceptrons_number)
00196 {
00197 std::ostringstream buffer;
00198
00199 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00200 << "const Perceptron& get_perceptron(const unsigned int&) const method.\n"
00201 << "Index of perceptron must be less than layer size.\n";
00202
00203 throw std::logic_error(buffer.str());
00204 }
00205
00206 #endif
00207
00208 return(perceptrons[index]);
00209 }
00210
00211
00212
00213
00215
00216 unsigned int PerceptronLayer::count_parameters_number(void) const
00217 {
00218 const unsigned int perceptrons_number = count_perceptrons_number();
00219
00220 unsigned int parameters_number = 0;
00221
00222 for(unsigned int i = 0; i < perceptrons_number; i++)
00223 {
00224 parameters_number += perceptrons[i].count_parameters_number();
00225 }
00226
00227 return(parameters_number);
00228 }
00229
00230
00231
00232
00235
00236 Vector<unsigned int> PerceptronLayer::count_cumulative_parameters_number(void) const
00237 {
00238 const unsigned int perceptrons_number = count_perceptrons_number();
00239
00240 Vector<unsigned int> cumulative_parameters_number(perceptrons_number);
00241
00242 if(perceptrons_number > 0)
00243 {
00244 cumulative_parameters_number[0] = perceptrons[0].count_parameters_number();
00245
00246 for(unsigned int i = 1; i < perceptrons_number; i++)
00247 {
00248 cumulative_parameters_number[i] = cumulative_parameters_number[i-1] + perceptrons[i].count_parameters_number();
00249 }
00250 }
00251
00252 return(cumulative_parameters_number);
00253 }
00254
00255
00256
00257
00261
00262 Vector<double> PerceptronLayer::arrange_biases(void) const
00263 {
00264 const unsigned int perceptrons_number = count_perceptrons_number();
00265
00266 Vector<double> biases(perceptrons_number);
00267
00268 for(unsigned int i = 0; i < perceptrons_number; i++)
00269 {
00270 biases[i] = perceptrons[i].get_bias();
00271 }
00272
00273 return(biases);
00274 }
00275
00276
00277
00278
00283
00284 Matrix<double> PerceptronLayer::arrange_synaptic_weights(void) const
00285 {
00286 const unsigned int perceptrons_number = count_perceptrons_number();
00287
00288 const unsigned int inputs_number = count_inputs_number();
00289
00290 Matrix<double> synaptic_weights(perceptrons_number, inputs_number);
00291
00292 for(unsigned int i = 0; i < perceptrons_number; i++)
00293 {
00294 for(unsigned int j = 0; j < inputs_number; j++)
00295 {
00296 synaptic_weights[i][j] = perceptrons[i].get_synaptic_weight(j);
00297 }
00298 }
00299
00300 return(synaptic_weights);
00301 }
00302
00303
00304
00305
00309
00310 Vector<double> PerceptronLayer::arrange_parameters(void) const
00311 {
00312 const unsigned int perceptrons_number = count_perceptrons_number();
00313
00314 if(perceptrons_number == 0)
00315 {
00316 Vector<double> parameters;
00317
00318 return(parameters);
00319 }
00320 else
00321 {
00322 const unsigned int parameters_number = count_parameters_number();
00323
00324 Vector<double> parameters(parameters_number);
00325
00326 const unsigned int perceptron_parameters_number = perceptrons[0].count_parameters_number();
00327
00328 Vector<double> perceptron_parameters(perceptron_parameters_number);
00329
00330 unsigned int position = 0;
00331
00332 for(unsigned int i = 0; i < perceptrons_number; i++)
00333 {
00334 perceptron_parameters = perceptrons[i].arrange_parameters();
00335 parameters.tuck_in(position, perceptron_parameters);
00336 position += perceptron_parameters_number;
00337 }
00338
00339 return(parameters);
00340 }
00341 }
00342
00343
00344
00345
00348
00349 const Perceptron::ActivationFunction& PerceptronLayer::get_activation_function(void) const
00350 {
00351 const unsigned int perceptrons_number = count_perceptrons_number();
00352
00353 if(perceptrons_number > 0)
00354 {
00355 return(perceptrons[0].get_activation_function());
00356 }
00357 else
00358 {
00359 std::ostringstream buffer;
00360
00361 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00362 << "Perceptron::ActivationFunction& get_activation_function(void) method.\n"
00363 << "PerceptronLayer is empty.\n";
00364
00365 throw std::logic_error(buffer.str());
00366 }
00367 }
00368
00369
00370
00371
00374
00375 std::string PerceptronLayer::write_activation_function_name(void) const
00376 {
00377 const Perceptron::ActivationFunction activation_function = get_activation_function();
00378
00379 switch(activation_function)
00380 {
00381 case Perceptron::Logistic:
00382 {
00383 return("Logistic");
00384 }
00385 break;
00386
00387 case Perceptron::HyperbolicTangent:
00388 {
00389 return("HyperbolicTangent");
00390 }
00391 break;
00392
00393 case Perceptron::Threshold:
00394 {
00395 return("Threshold");
00396 }
00397 break;
00398
00399 case Perceptron::SymmetricThreshold:
00400 {
00401 return("SymmetricThreshold");
00402 }
00403 break;
00404
00405 case Perceptron::Linear:
00406 {
00407 return("Linear");
00408 }
00409 break;
00410
00411 default:
00412 {
00413 std::ostringstream buffer;
00414
00415 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00416 << "std::string write_activation_function_name(void) const method.\n"
00417 << "Unknown layer activation function.\n";
00418
00419 throw std::logic_error(buffer.str());
00420 }
00421 break;
00422 }
00423 }
00424
00425
00426
00427
00430
00431 const bool& PerceptronLayer::get_display(void) const
00432 {
00433 return(display);
00434 }
00435
00436
00437
00438
00441
00442 void PerceptronLayer::set(void)
00443 {
00444 perceptrons.set();
00445
00446 set_default();
00447 }
00448
00449
00450
00451
00454
00455 void PerceptronLayer::set(const Vector<Perceptron>& new_perceptrons)
00456 {
00457 perceptrons = new_perceptrons;
00458
00459 set_default();
00460 }
00461
00462
00463
00464
00469
00470 void PerceptronLayer::set(const unsigned int& new_inputs_number, const unsigned int& new_perceptrons_number)
00471 {
00472 perceptrons.set(new_perceptrons_number);
00473
00474 for(unsigned int i = 0; i < new_perceptrons_number; i++)
00475 {
00476 perceptrons[i].set_inputs_number(new_inputs_number);
00477 }
00478
00479 set_default();
00480 }
00481
00482
00483
00484
00487
00488 void PerceptronLayer::set(const PerceptronLayer& other_perceptron_layer)
00489 {
00490 perceptrons = other_perceptron_layer.perceptrons;
00491
00492 display = other_perceptron_layer.display;
00493 }
00494
00495
00496
00497
00500
00501 void PerceptronLayer::set_perceptrons(const Vector<Perceptron>& new_perceptrons)
00502 {
00503 perceptrons = new_perceptrons;
00504 }
00505
00506
00507
00508
00512
00513 void PerceptronLayer::set_perceptron(const unsigned int& i, const Perceptron& new_perceptron)
00514 {
00515 perceptrons[i] = new_perceptron;
00516 }
00517
00518
00519
00520
00525
00526 void PerceptronLayer::set_default(void)
00527 {
00528 display = true;
00529 }
00530
00531
00532
00533
00537
00538 void PerceptronLayer::set_inputs_number(const unsigned int& new_inputs_number)
00539 {
00540 const unsigned int perceptrons_number = count_perceptrons_number();
00541
00542 for(unsigned int i = 0; i < perceptrons_number; i++)
00543 {
00544 perceptrons[i].set_inputs_number(new_inputs_number);
00545 }
00546 }
00547
00548
00549
00550
00554
00555 void PerceptronLayer::set_perceptrons_number(const unsigned int& new_perceptrons_number)
00556 {
00557 const unsigned int perceptrons_number = count_perceptrons_number();
00558 const unsigned int inputs_number = count_inputs_number();
00559
00560 if(perceptrons_number > 0)
00561 {
00562 const Perceptron::ActivationFunction& activation_function = get_activation_function();
00563 perceptrons.set(new_perceptrons_number);
00564 set_activation_function(activation_function);
00565 }
00566 else
00567 {
00568 perceptrons.set(new_perceptrons_number);
00569 }
00570
00571 set_inputs_number(inputs_number);
00572 }
00573
00574
00575
00576
00579
00580 void PerceptronLayer::set_biases(const Vector<double>& new_biases)
00581 {
00582 const unsigned int perceptrons_number = count_perceptrons_number();
00583
00584
00585
00586 #ifdef _DEBUG
00587
00588 const unsigned int new_biases_size = new_biases.size();
00589
00590 if(new_biases_size != perceptrons_number)
00591 {
00592 std::ostringstream buffer;
00593
00594 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00595 << "void set_biases(const Vector<double>&) method.\n"
00596 << "Size must be equal to number of perceptrons.\n";
00597
00598 throw std::logic_error(buffer.str());
00599 }
00600
00601 #endif
00602
00603
00604
00605 for(unsigned int i = 0; i < perceptrons_number; i++)
00606 {
00607 perceptrons[i].set_bias(new_biases[i]);
00608 }
00609 }
00610
00611
00612
00613
00619
00620 void PerceptronLayer::set_synaptic_weights(const Matrix<double>& new_synaptic_weights)
00621 {
00622 const unsigned int inputs_number = count_inputs_number();
00623 const unsigned int perceptrons_number = count_perceptrons_number();
00624
00625
00626
00627 #ifdef _DEBUG
00628
00629 const unsigned int rows_number = new_synaptic_weights.get_rows_number();
00630 const unsigned int columns_number = new_synaptic_weights.get_columns_number();
00631
00632 std::ostringstream buffer;
00633
00634 if(rows_number != perceptrons_number)
00635 {
00636 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00637 << "void set_synaptic_weights(const Matrix<double>&) method.\n"
00638 << "Number of rows must be equal to size of layer.\n";
00639
00640 throw std::logic_error(buffer.str());
00641 }
00642 else if(columns_number != inputs_number)
00643 {
00644 std::ostringstream buffer;
00645
00646 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00647 << "void set_synaptic_weights(const Matrix<double>&) method.\n"
00648 << "Number of columns must be equal to number of inputs.\n";
00649
00650 throw std::logic_error(buffer.str());
00651 }
00652
00653 #endif
00654
00655 for(unsigned int i = 0; i < perceptrons_number; i++)
00656 {
00657 for(unsigned int j = 0; j < inputs_number; j++)
00658 {
00659 perceptrons[i].set_synaptic_weight(j, new_synaptic_weights[i][j]);
00660 }
00661 }
00662 }
00663
00664
00665
00666
00669
00670 void PerceptronLayer::set_parameters(const Vector<double>& new_parameters)
00671 {
00672 const unsigned int perceptrons_number = count_perceptrons_number();
00673
00674
00675
00676 #ifdef _DEBUG
00677
00678 const unsigned int parameters_number = count_parameters_number();
00679
00680 const unsigned int new_parameters_size = new_parameters.size();
00681
00682 if(new_parameters_size != parameters_number)
00683 {
00684 std::ostringstream buffer;
00685
00686 buffer << "OpenNN Exception: PerceptronLayer class.\n"
00687 << "void set_parameters(const Vector<double>&) method.\n"
00688 << "Size of new parameters vector must be equal to number of parameters.\n";
00689
00690 throw std::logic_error(buffer.str());
00691 }
00692
00693 #endif
00694
00695 if(perceptrons_number != 0)
00696 {
00697 const unsigned int perceptron_parameters_number = perceptrons[0].count_parameters_number();
00698
00699 Vector<double> perceptron_parameters(perceptron_parameters_number);
00700
00701 unsigned int position = 0;
00702
00703 for(unsigned int i = 0; i < perceptrons_number; i++)
00704 {
00705 perceptron_parameters = new_parameters.take_out(position, perceptron_parameters_number);
00706 perceptrons[i].set_parameters(perceptron_parameters);
00707 position += perceptron_parameters_number;
00708 }
00709 }
00710 }
00711
00712
00713
00714
00717
00718 void PerceptronLayer::set_activation_function(const Perceptron::ActivationFunction& new_activation_function)
00719 {
00720 const unsigned int perceptrons_number = count_perceptrons_number();
00721
00722 for(unsigned int i = 0; i < perceptrons_number; i++)
00723 {
00724 perceptrons[i].set_activation_function(new_activation_function);
00725 }
00726 }
00727
00728
00729
00730
00734
00735 void PerceptronLayer::set_activation_function(const std::string& new_activation_function)
00736 {
00737 const unsigned int perceptrons_number = count_perceptrons_number();
00738
00739 for(unsigned int i = 0; i < perceptrons_number; i++)
00740 {
00741 perceptrons[i].set_activation_function(new_activation_function);
00742 }
00743 }
00744
00745
00746
00747
00752
00753 void PerceptronLayer::set_display(const bool& new_display)
00754 {
00755 display = new_display;
00756 }
00757
00758
00759
00760
00762
00763 void PerceptronLayer::grow_input(void)
00764 {
00765 const unsigned int perceptrons_number = count_perceptrons_number();
00766
00767 for(unsigned int i = 0; i < perceptrons_number; i++)
00768 {
00769 perceptrons[i].grow_input();
00770 }
00771 }
00772
00773
00774
00775
00777
00778 void PerceptronLayer::grow_perceptron(void)
00779 {
00780 const unsigned int inputs_number = count_inputs_number();
00781
00782 Perceptron perceptron(inputs_number);
00783
00784 perceptron.initialize_parameters(0.0);
00785
00786 perceptrons.push_back(perceptron);
00787 }
00788
00789
00790
00791
00794
00795 void PerceptronLayer::prune_input(const unsigned int& index)
00796 {
00797 const unsigned int perceptrons_number = count_perceptrons_number();
00798
00799 for(unsigned int i = 0; i < perceptrons_number; i++)
00800 {
00801 perceptrons[i].prune_input(index);
00802 }
00803 }
00804
00805
00806
00807
00810
00811 void PerceptronLayer::prune_perceptron(const unsigned int& index)
00812 {
00813 perceptrons.erase(perceptrons.begin() + index-1);
00814 }
00815
00816
00817
00818
00821
00822 void PerceptronLayer::initialize_random(void)
00823 {
00824 const unsigned int inputs_number = rand()%10 + 1;
00825 const unsigned int perceptrons_number = rand()%10 + 1;
00826
00827 set(inputs_number, perceptrons_number);
00828
00829 set_display(true);
00830 }
00831
00832
00833
00834
00837
00838 void PerceptronLayer::initialize_biases(const double& value)
00839 {
00840 const unsigned int perceptrons_number = count_perceptrons_number();
00841
00842 for(unsigned int i = 0; i < perceptrons_number; i++)
00843 {
00844 perceptrons[i].initialize_bias(value);
00845 }
00846 }
00847
00848
00849
00850
00853
00854 void PerceptronLayer::initialize_synaptic_weights(const double& value)
00855 {
00856 const unsigned int perceptrons_number = count_perceptrons_number();
00857
00858 for(unsigned int i = 0; i < perceptrons_number; i++)
00859 {
00860 perceptrons[i].initialize_synaptic_weights(value);
00861 }
00862 }
00863
00864
00865
00866
00869
00870 void PerceptronLayer::initialize_parameters(const double& value)
00871 {
00872 const unsigned int parameters_number = count_parameters_number();
00873
00874 const Vector<double> parameters(parameters_number, value);
00875
00876 set_parameters(parameters);
00877 }
00878
00879
00880
00881
00884
00885 void PerceptronLayer::initialize_parameters_uniform(void)
00886 {
00887 const unsigned int parameters_number = count_parameters_number();
00888
00889 Vector<double> parameters(parameters_number);
00890
00891 parameters.initialize_uniform();
00892
00893 set_parameters(parameters);
00894 }
00895
00896
00897
00898
00903
00904 void PerceptronLayer::initialize_parameters_uniform(const double& minimum, const double& maximum)
00905 {
00906 const unsigned int parameters_number = count_parameters_number();
00907
00908 Vector<double> parameters(parameters_number);
00909
00910 parameters.initialize_uniform(minimum, maximum);
00911
00912 set_parameters(parameters);
00913 }
00914
00915
00916
00917
00922
00923 void PerceptronLayer::initialize_parameters_uniform(const Vector<double>& minimum, const Vector<double>& maximum)
00924 {
00925 const unsigned int parameters_number = count_parameters_number();
00926
00927 Vector<double> parameters(parameters_number);
00928
00929 parameters.initialize_uniform(minimum, maximum);
00930
00931 set_parameters(parameters);
00932 }
00933
00934
00935
00936
00943
00944 void PerceptronLayer::initialize_parameters_uniform(const Vector< Vector<double> >& minimum_maximum)
00945 {
00946 const unsigned int parameters_number = count_parameters_number();
00947
00948 Vector<double> parameters(parameters_number);
00949
00950 parameters.initialize_uniform(minimum_maximum[0], minimum_maximum[1]);
00951
00952 set_parameters(parameters);
00953 }
00954
00955
00956
00957
00960
00961 void PerceptronLayer::initialize_parameters_normal(void)
00962 {
00963 const unsigned int parameters_number = count_parameters_number();
00964
00965 Vector<double> parameters(parameters_number);
00966
00967 parameters.initialize_normal();
00968
00969 set_parameters(parameters);
00970 }
00971
00972
00973
00974
00979
00980 void PerceptronLayer::initialize_parameters_normal(const double& mean, const double& standard_deviation)
00981 {
00982 const unsigned int parameters_number = count_parameters_number();
00983
00984 Vector<double> parameters(parameters_number);
00985
00986 parameters.initialize_normal(mean, standard_deviation);
00987
00988 set_parameters(parameters);
00989 }
00990
00991
00992
00993
00998
00999 void PerceptronLayer::initialize_parameters_normal(const Vector<double>& mean, const Vector<double>& standard_deviation)
01000 {
01001 const unsigned int parameters_number = count_parameters_number();
01002
01003 Vector<double> parameters(parameters_number);
01004
01005 parameters.initialize_normal(mean, standard_deviation);
01006
01007 set_parameters(parameters);
01008 }
01009
01010
01011
01012
01019
01020 void PerceptronLayer::initialize_parameters_normal(const Vector< Vector<double> >& mean_standard_deviation)
01021 {
01022 const unsigned int parameters_number = count_parameters_number();
01023
01024 Vector<double> parameters(parameters_number);
01025
01026 parameters.initialize_normal(mean_standard_deviation[0], mean_standard_deviation[1]);
01027
01028 set_parameters(parameters);
01029 }
01030
01031
01032
01033
01035
01036 double PerceptronLayer::calculate_parameters_norm(void) const
01037 {
01038 return(arrange_parameters().calculate_norm());
01039 }
01040
01041
01042
01043
01046
01047 Vector<double> PerceptronLayer::calculate_combination(const Vector<double>& inputs) const
01048 {
01049
01050
01051 #ifdef _DEBUG
01052
01053 const unsigned int inputs_size = inputs.size();
01054
01055 const unsigned int inputs_number = count_inputs_number();
01056
01057 if(inputs_size != inputs_number)
01058 {
01059 std::ostringstream buffer;
01060
01061 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01062 << "Vector<double> calculate_combination(const Vector<double>&) const method.\n"
01063 << "Size of inputs to layer must be equal to number of layer inputs.\n";
01064
01065 throw std::logic_error(buffer.str());
01066 }
01067
01068 #endif
01069
01070 const unsigned int perceptrons_number = count_perceptrons_number();
01071
01072
01073
01074 Vector<double> combination(perceptrons_number);
01075
01076 for(unsigned int i = 0; i < perceptrons_number; i++)
01077 {
01078 combination[i] = perceptrons[i].calculate_combination(inputs);
01079 }
01080
01081 return(combination);
01082 }
01083
01084
01085
01086
01089
01090 Matrix<double> PerceptronLayer::calculate_combination_Jacobian(const Vector<double>&) const
01091 {
01092 return(arrange_synaptic_weights());
01093 }
01094
01095
01096
01097
01100
01101 Vector< Matrix<double> > PerceptronLayer::calculate_combination_Hessian_form(const Vector<double>&) const
01102 {
01103 const unsigned int inputs_number = count_inputs_number();
01104 const unsigned int perceptrons_number = count_perceptrons_number();
01105
01106 Vector< Matrix<double> > combination_Hessian_form(perceptrons_number);
01107
01108 for(unsigned int i = 0; i < perceptrons_number; i++)
01109 {
01110 combination_Hessian_form[i].set(inputs_number, inputs_number, 0.0);
01111 }
01112
01113 return(combination_Hessian_form);
01114 }
01115
01116
01117
01118
01122
01123 Vector<double> PerceptronLayer::calculate_combination_parameters(const Vector<double>& inputs, const Vector<double>& parameters) const
01124 {
01125
01126
01127 #ifdef _DEBUG
01128
01129 const unsigned int inputs_size = inputs.size();
01130 const unsigned int inputs_number = count_inputs_number();
01131
01132 if(inputs_size != inputs_number)
01133 {
01134 std::ostringstream buffer;
01135
01136 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01137 << "Vector<double> calculate_combination_parameters(i, const Vector<double>&, const Vector<double>&) const method.\n"
01138 << "Size of layer inputs (" << inputs_size << ") must be equal to number of layer inputs (" << inputs_number << ").\n";
01139
01140 throw std::logic_error(buffer.str());
01141 }
01142
01143 const unsigned int parameters_size = parameters.size();
01144
01145 const unsigned int parameters_number = count_parameters_number();
01146
01147 if(parameters_size != parameters_number)
01148 {
01149 std::ostringstream buffer;
01150
01151 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01152 << "Vector<double> calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
01153 << "Size of layer parameters (" << parameters_size << ") must be equal to number of lasyer parameters (" << parameters_number << ").\n";
01154
01155 throw std::logic_error(buffer.str());
01156 }
01157
01158 #endif
01159
01160 PerceptronLayer copy(*this);
01161
01162 copy.set_parameters(parameters);
01163
01164 return(copy.calculate_combination(inputs));
01165 }
01166
01167
01168
01169
01173
01174 Matrix<double> PerceptronLayer::calculate_combination_parameters_Jacobian(const Vector<double>& inputs, const Vector<double>&) const
01175 {
01176 const unsigned int perceptrons_number = count_perceptrons_number();
01177 const unsigned int parameters_number = count_parameters_number();
01178 const unsigned int inputs_number = count_inputs_number();
01179
01180 Matrix<double> combination_parameters_Jacobian(perceptrons_number, parameters_number, 0.0);
01181
01182 unsigned int column_index;
01183
01184 for(unsigned int i = 0; i < perceptrons_number; i++)
01185 {
01186
01187
01188 column_index = (1 + inputs_number)*i;
01189 combination_parameters_Jacobian[i][column_index] = 1.0;
01190
01191
01192
01193 for(unsigned int j = 0; j < inputs_number; j++)
01194 {
01195 column_index = 1 + (1 + inputs_number)*i + j;
01196 combination_parameters_Jacobian[i][column_index] = inputs[j];
01197 }
01198 }
01199
01200 return(combination_parameters_Jacobian);
01201 }
01202
01203
01204
01205
01209
01210 Vector< Matrix<double> > PerceptronLayer::calculate_combination_parameters_Hessian_form(const Vector<double>&, const Vector<double>&) const
01211 {
01212 const unsigned int perceptrons_number = count_perceptrons_number();
01213
01214 Vector< Matrix<double> > combination_parameters_Hessian_form(perceptrons_number);
01215
01216 const unsigned int parameters_number = count_parameters_number();
01217
01218 for(unsigned int i = 0; i < perceptrons_number; i++)
01219 {
01220 combination_parameters_Hessian_form[i].set(parameters_number, parameters_number, 0.0);
01221 }
01222
01223 return(combination_parameters_Hessian_form);
01224 }
01225
01226
01227
01228
01231
01232 Vector<double> PerceptronLayer::calculate_activation(const Vector<double>& combination) const
01233 {
01234 const unsigned int perceptrons_number = count_perceptrons_number();
01235
01236
01237
01238 #ifdef _DEBUG
01239
01240 const unsigned int combination_size = combination.size();
01241
01242 if(combination_size != perceptrons_number)
01243 {
01244 std::ostringstream buffer;
01245
01246 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01247 << "Vector<double> calculate_activation(const Vector<double>&) const method.\n"
01248 << "Combination size must be equal to number of neurons.\n";
01249
01250 throw std::logic_error(buffer.str());
01251 }
01252
01253 #endif
01254
01255
01256
01257 Vector<double> activation(perceptrons_number);
01258
01259 for(unsigned int i = 0; i < perceptrons_number; i++)
01260 {
01261 activation[i] = perceptrons[i].calculate_activation(combination[i]);
01262 }
01263
01264 return(activation);
01265 }
01266
01267
01268
01269
01272
01273 Vector<double> PerceptronLayer::calculate_activation_derivative(const Vector<double>& combination) const
01274 {
01275 const unsigned int perceptrons_number = count_perceptrons_number();
01276
01277
01278
01279 #ifdef _DEBUG
01280
01281 const unsigned int combination_size = combination.size();
01282
01283 if(combination_size != perceptrons_number)
01284 {
01285 std::ostringstream buffer;
01286
01287 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01288 << "Vector<double> calculate_activation_derivative(const Vector<double>&) const method.\n"
01289 << "Size of combination must be equal to number of neurons.\n";
01290
01291 throw std::logic_error(buffer.str());
01292 }
01293
01294 #endif
01295
01296
01297
01298 Vector<double> activation_derivative(perceptrons_number);
01299
01300 for(unsigned int i = 0; i < perceptrons_number; i++)
01301 {
01302 activation_derivative[i] = perceptrons[i].calculate_activation_derivative(combination[i]);
01303 }
01304
01305 return(activation_derivative);
01306 }
01307
01308
01309
01310
01313
01314 Vector<double> PerceptronLayer::calculate_activation_second_derivative(const Vector<double>& combination) const
01315 {
01316 const unsigned int perceptrons_number = count_perceptrons_number();
01317
01318
01319
01320 #ifdef _DEBUG
01321
01322 const unsigned int combination_size = combination.size();
01323
01324 if(combination_size != perceptrons_number)
01325 {
01326 std::ostringstream buffer;
01327
01328 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01329 << "Vector<double> calculate_activation_second_derivative(const Vector<double>&) const method.\n"
01330 << "Size of combination must be equal to number of neurons.\n";
01331
01332 throw std::logic_error(buffer.str());
01333 }
01334
01335 #endif
01336
01337
01338
01339 Vector<double> activation_second_derivative(perceptrons_number);
01340
01341 for(unsigned int i = 0; i < perceptrons_number; i++)
01342 {
01343 activation_second_derivative[i] = perceptrons[i].calculate_activation_second_derivative(combination[i]);
01344 }
01345
01346 return(activation_second_derivative);
01347 }
01348
01349
01350
01351
01354
01355 Matrix<double> PerceptronLayer::arrange_activation_Jacobian(const Vector<double>& activation_derivative) const
01356 {
01357 const unsigned int perceptrons_number = count_perceptrons_number();
01358
01359 Matrix<double> activation_Jacobian(perceptrons_number, perceptrons_number, 0.0);
01360
01361 activation_Jacobian.set_diagonal(activation_derivative);
01362
01363 return(activation_Jacobian);
01364 }
01365
01366
01367
01368
01371
01372 Vector< Matrix<double> > PerceptronLayer::arrange_activation_Hessian_form(const Vector<double>& activation_second_derivative) const
01373 {
01374 const unsigned int perceptrons_number = count_perceptrons_number();
01375
01376 Vector< Matrix<double> > activation_Hessian_form(perceptrons_number);
01377
01378 for(unsigned int i = 0; i < perceptrons_number; i++)
01379 {
01380 activation_Hessian_form[i].set(perceptrons_number, perceptrons_number, 0.0);
01381 activation_Hessian_form[i][i][i] = activation_second_derivative[i];
01382 }
01383
01384 return(activation_Hessian_form);
01385 }
01386
01387
01388
01389
01392
01393 Vector<double> PerceptronLayer::calculate_outputs(const Vector<double>& inputs) const
01394 {
01395
01396
01397 #ifdef _DEBUG
01398
01399 const unsigned int inputs_size = inputs.size();
01400
01401 const unsigned int inputs_number = count_inputs_number();
01402
01403 if(inputs_size != inputs_number)
01404 {
01405 std::ostringstream buffer;
01406
01407 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01408 << "Vector<double> calculate_outputs(const Vector<double>&) const method.\n"
01409 << "Size of inputs must be equal to number of inputs to layer.\n";
01410
01411 throw std::logic_error(buffer.str());
01412 }
01413
01414 #endif
01415
01416 return(calculate_activation(calculate_combination(inputs)));
01417 }
01418
01419
01420
01421
01427
01428 Matrix<double> PerceptronLayer::calculate_Jacobian(const Vector<double>& inputs) const
01429 {
01430
01431
01432
01433 #ifdef _DEBUG
01434
01435 const unsigned int inputs_number = count_inputs_number();
01436
01437 const unsigned int inputs_size = inputs.size();
01438
01439 if(inputs_size != inputs_number)
01440 {
01441 std::ostringstream buffer;
01442
01443 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01444 << "Matrix<double> calculate_Jacobian(const Vector<double>&) const method.\n"
01445 << "Size of inputs must be equal to number of inputs to layer.\n";
01446
01447 throw std::logic_error(buffer.str());
01448 }
01449
01450 #endif
01451
01452 const Vector<double> combinations = calculate_combination(inputs);
01453
01454 const Vector<double> activation_derivative = calculate_activation_derivative(combinations);
01455
01456
01457
01458 const Matrix<double> synaptic_weights = arrange_synaptic_weights();
01459
01460 return(activation_derivative*synaptic_weights);
01461 }
01462
01463
01464
01465
01468
01469 Vector< Matrix<double> > PerceptronLayer::calculate_Hessian_form(const Vector<double>& inputs) const
01470 {
01471 const unsigned int perceptrons_number = count_perceptrons_number();
01472
01473 const Matrix<double> synaptic_weights = arrange_synaptic_weights();
01474
01475 const Vector<double> combination = calculate_combination(inputs);
01476
01477 const Vector<double> activation_second_derivative = calculate_activation_second_derivative(combination);
01478
01479 Vector< Matrix<double> > activation_Hessian_form(perceptrons_number);
01480
01481 Vector< Matrix<double> > Hessian_form(perceptrons_number);
01482
01483 for(unsigned int i = 0; i < perceptrons_number; i++)
01484 {
01485 activation_Hessian_form[i].set(perceptrons_number, perceptrons_number, 0.0);
01486 activation_Hessian_form[i][i][i] = activation_second_derivative[i];
01487
01488 Hessian_form[i] = synaptic_weights.calculate_transpose().dot(activation_Hessian_form[i]).dot(synaptic_weights);
01489 }
01490
01491 return(Hessian_form);
01492 }
01493
01494
01495
01496
01500
01501 Vector<double> PerceptronLayer::calculate_parameters_output(const Vector<double>& inputs, const Vector<double>& parameters) const
01502 {
01503
01504
01505 #ifdef _DEBUG
01506
01507 const unsigned int inputs_size = inputs.size();
01508
01509 const unsigned int inputs_number = count_inputs_number();
01510
01511 if(inputs_size != inputs_number)
01512 {
01513 std::ostringstream buffer;
01514
01515 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01516 << "Vector<double> calculate_parameters_output(const Vector<double>&, const Vector<double>&) const method.\n"
01517 << "Size of layer inputs (" << inputs_size << ") must be equal to number of layer inputs (" << inputs_number << ").\n";
01518
01519 throw std::logic_error(buffer.str());
01520 }
01521
01522 const unsigned int parameters_size = parameters.size();
01523
01524 const unsigned int parameters_number = count_parameters_number();
01525
01526 if(parameters_size != parameters_number)
01527 {
01528 std::ostringstream buffer;
01529
01530 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01531 << "Vector<double> calculate_parameters_output(const Vector<double>&, const Vector<double>&) const method.\n"
01532 << "Size of parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
01533
01534 throw std::logic_error(buffer.str());
01535 }
01536
01537 #endif
01538
01539 PerceptronLayer copy(*this);
01540
01541 copy.set_parameters(parameters);
01542
01543 return(copy.calculate_outputs(inputs));
01544 }
01545
01546
01547
01548
01555
01556 Matrix<double> PerceptronLayer::calculate_parameters_Jacobian(const Vector<double>& inputs, const Vector<double>& parameters) const
01557 {
01558
01559
01560 #ifdef _DEBUG
01561
01562 const unsigned int inputs_number = count_inputs_number();
01563 const unsigned int inputs_size = inputs.size();
01564
01565 if(inputs_size != inputs_number)
01566 {
01567 std::ostringstream buffer;
01568
01569 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01570 << "Matrix<double> calculate_parameters_Jacobian(const Vector<double>&) const method.\n"
01571 << "Size of inputs must be equal to number of inputs.\n";
01572
01573 throw std::logic_error(buffer.str());
01574 }
01575
01576 #endif
01577
01578 const Vector<double> combination_parameters = calculate_combination_parameters(inputs, parameters);
01579
01580 const Matrix<double> combination_parameters_Jacobian = calculate_combination_parameters_Jacobian(inputs, parameters);
01581
01582 const Vector<double> activation_derivative = calculate_activation_derivative(combination_parameters);
01583
01584 const Matrix<double> activation_Jacobian = arrange_activation_Jacobian(activation_derivative);
01585
01586 return(activation_Jacobian.dot(combination_parameters_Jacobian));
01587 }
01588
01589
01590
01591
01596
01597 Vector< Matrix<double> > PerceptronLayer::calculate_parameters_Hessian_form(const Vector<double>& inputs, const Vector<double>& parameters) const
01598 {
01599
01600
01601 #ifdef _DEBUG
01602
01603 const unsigned int inputs_number = count_inputs_number();
01604 const unsigned int inputs_size = inputs.size();
01605
01606 if(inputs_size != inputs_number)
01607 {
01608 std::ostringstream buffer;
01609
01610 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01611 << "Vector< Matrix<double> > calculate_parameters_Hessian_form(const unsigned int&, const Vector<double>&) const method.\n"
01612 << "Size must be equal to number of inputs of layer.\n";
01613
01614 throw std::logic_error(buffer.str());
01615 }
01616
01617 #endif
01618
01619 const unsigned int perceptrons_number = count_perceptrons_number();
01620
01621 const Vector<double> combination = calculate_combination(inputs);
01622
01623 const Matrix<double> combination_parameters_Jacobian = calculate_combination_parameters_Jacobian(inputs, parameters);
01624
01625 const Vector<double> activation_second_derivative = calculate_activation_second_derivative(combination);
01626
01627 const Vector< Matrix<double> > activation_Hessian_form = arrange_activation_Hessian_form(activation_second_derivative);
01628
01629
01630
01631 Vector< Matrix<double> > parameters_Hessian_form(perceptrons_number);
01632
01633 for(unsigned int i = 0; i < perceptrons_number; i++)
01634 {
01635 parameters_Hessian_form[i] = combination_parameters_Jacobian.calculate_transpose().dot(activation_Hessian_form[i]).dot(combination_parameters_Jacobian);
01636 }
01637
01638 return(parameters_Hessian_form);
01639 }
01640
01641
01642
01643
01647
01648 std::string PerceptronLayer::write_expression(const Vector<std::string>& inputs_name, const Vector<std::string>& outputs_name) const
01649 {
01650 const unsigned int perceptrons_number = count_perceptrons_number();
01651
01652
01653
01654 #ifdef _DEBUG
01655
01656 const unsigned int inputs_number = count_inputs_number();
01657 const unsigned int inputs_name_size = inputs_name.size();
01658
01659 if(inputs_name_size != inputs_number)
01660 {
01661 std::ostringstream buffer;
01662
01663 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01664 << "std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
01665 << "Size of inputs name must be equal to number of layer inputs.\n";
01666
01667 throw std::logic_error(buffer.str());
01668 }
01669
01670 const unsigned int outputs_name_size = outputs_name.size();
01671
01672 if(outputs_name_size != perceptrons_number)
01673 {
01674 std::ostringstream buffer;
01675
01676 buffer << "OpenNN Exception: PerceptronLayer class.\n"
01677 << "std::string write_expression(const Vector<std::string>&, const Vector<std::string>&) const method.\n"
01678 << "Size of outputs name must be equal to number of perceptrons.\n";
01679
01680 throw std::logic_error(buffer.str());
01681 }
01682
01683 #endif
01684
01685 std::ostringstream buffer;
01686
01687 for(unsigned int i = 0; i < perceptrons_number; i++)
01688 {
01689 buffer << perceptrons[i].write_expression(inputs_name, outputs_name[i]);
01690 }
01691
01692 return(buffer.str());
01693 }
01694
01695 }
01696
01697
01698
01699
01700
01701
01702
01703
01704
01705
01706
01707
01708
01709
01710
01711
01712