00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include <iostream>
00019 #include <sstream>
00020 #include <cstdlib>
00021 #include <ctime>
00022
00023
00024
00025 #include "perceptron.h"
00026 #include "../utilities/vector.h"
00027 #include "../utilities/matrix.h"
00028
00029 namespace OpenNN
00030 {
00031
00036
00037 Perceptron::Perceptron(void)
00038 {
00039 set();
00040 }
00041
00042
00049
00050 Perceptron::Perceptron(const unsigned int& new_inputs_number)
00051 {
00052 set(new_inputs_number);
00053 }
00054
00055
00061
00062 Perceptron::Perceptron(const unsigned int& new_inputs_number, const double& new_parameters_value)
00063 {
00064 set(new_inputs_number, new_parameters_value);
00065 }
00066
00067
00071
00072 Perceptron::Perceptron(const Perceptron& other_perceptron)
00073 {
00074 set(other_perceptron);
00075 }
00076
00077
00080
00081 Perceptron::~Perceptron(void)
00082 {
00083 }
00084
00085
00086
00087
00091
00092 Perceptron& Perceptron::operator=(const Perceptron& other_perceptron)
00093 {
00094 if(this != &other_perceptron)
00095 {
00096 bias = other_perceptron.bias;
00097
00098 synaptic_weights = other_perceptron.synaptic_weights;
00099
00100 activation_function = other_perceptron.activation_function;
00101
00102 display = other_perceptron.display;
00103 }
00104
00105 return(*this);
00106 }
00107
00108
00109
00110
00111
00112
00117
00118 bool Perceptron::operator == (const Perceptron& other_perceptron) const
00119 {
00120 if(bias == other_perceptron.bias
00121 && synaptic_weights == other_perceptron.synaptic_weights
00122 && activation_function == other_perceptron.activation_function
00123 && display == other_perceptron.display)
00124 {
00125 return(true);
00126 }
00127 else
00128 {
00129 return(false);
00130 }
00131 }
00132
00133
00134
00135
00136
00137
00139
00140 const Perceptron::ActivationFunction& Perceptron::get_activation_function(void) const
00141 {
00142 return(activation_function);
00143 }
00144
00145
00146
00147
00149
00150 std::string Perceptron::write_activation_function(void) const
00151 {
00152 switch(activation_function)
00153 {
00154 case Perceptron::Logistic:
00155 {
00156 return("Logistic");
00157 }
00158 break;
00159
00160 case Perceptron::HyperbolicTangent:
00161 {
00162 return("HyperbolicTangent");
00163 }
00164 break;
00165
00166 case Perceptron::Threshold:
00167 {
00168 return("Threshold");
00169 }
00170 break;
00171
00172 case Perceptron::SymmetricThreshold:
00173 {
00174 return("SymmetricThreshold");
00175 }
00176 break;
00177
00178 case Perceptron::Linear:
00179 {
00180 return("Linear");
00181 }
00182 break;
00183
00184 default:
00185 {
00186 std::ostringstream buffer;
00187
00188 buffer << "OpenNN Exception: Perceptron class.\n"
00189 << "std::string get_activation_function(void) const method.\n"
00190 << "Unknown activation function.\n";
00191
00192 throw std::logic_error(buffer.str());
00193 }
00194 break;
00195 }
00196 }
00197
00198
00199
00200
00202
00203 unsigned int Perceptron::count_inputs_number(void) const
00204 {
00205 return(synaptic_weights.size());
00206 }
00207
00208
00209
00210
00212
00213 const double& Perceptron::get_bias(void) const
00214 {
00215 return(bias);
00216 }
00217
00218
00219
00220
00222
00223 const Vector<double>& Perceptron::arrange_synaptic_weights(void) const
00224 {
00225 return(synaptic_weights);
00226 }
00227
00228
00229
00230
00233
00234 const double& Perceptron::get_synaptic_weight(const unsigned int& synaptic_weight_index) const
00235 {
00236
00237
00238 #ifdef _DEBUG
00239
00240 const unsigned int inputs_number = count_inputs_number();
00241
00242 if(synaptic_weight_index >= inputs_number)
00243 {
00244 std::ostringstream buffer;
00245
00246 buffer << "OpenNN Exception: Perceptron class.\n"
00247 << "double get_synaptic_weight(const unsigned int&) const method.\n"
00248 << "Index of synaptic weight must be less than number of inputs.\n";
00249
00250 throw std::logic_error(buffer.str());
00251 }
00252
00253 #endif
00254
00255
00256
00257 return(synaptic_weights[synaptic_weight_index]);
00258 }
00259
00260
00261
00262
00265
00266 const bool& Perceptron::get_display(void) const
00267 {
00268 return(display);
00269 }
00270
00271
00272
00273
00275
00276 void Perceptron::set(void)
00277 {
00278 initialize_bias_normal(0.0, 0.2);
00279
00280 synaptic_weights.set();
00281
00282 activation_function = HyperbolicTangent;
00283
00284 display = true;
00285 }
00286
00287
00288
00289
00293
00294 void Perceptron::set(const unsigned int& new_inputs_number)
00295 {
00296
00297
00298 activation_function = HyperbolicTangent;
00299
00300 initialize_bias_normal(0.0, 0.2);
00301
00302 synaptic_weights.set(new_inputs_number);
00303 initialize_synaptic_weights_normal(0.0, 0.2);
00304
00305 display = true;
00306 }
00307
00308
00309
00310
00314
00315 void Perceptron::set(const unsigned int& new_inputs_number, const double& new_parameters_value)
00316 {
00317 bias = new_parameters_value;
00318 synaptic_weights.set(new_inputs_number, new_parameters_value);
00319
00320 activation_function = HyperbolicTangent;
00321
00322 display = true;
00323 }
00324
00325
00326
00327
00330
00331 void Perceptron::set(const Perceptron& other_perceptron)
00332 {
00333 bias = other_perceptron.bias;
00334
00335 synaptic_weights = other_perceptron.synaptic_weights;
00336
00337 activation_function = other_perceptron.activation_function;
00338
00339 display = other_perceptron.display;
00340 }
00341
00342
00343
00344
00347
00348 void Perceptron::set_activation_function(const Perceptron::ActivationFunction& new_activation_function)
00349 {
00350 activation_function = new_activation_function;
00351 }
00352
00353
00354
00355
00359
00360 void Perceptron::set_activation_function(const std::string& new_activation_function_name)
00361 {
00362 if(new_activation_function_name == "Logistic")
00363 {
00364 activation_function = Logistic;
00365 }
00366 else if(new_activation_function_name == "HyperbolicTangent")
00367 {
00368 activation_function = HyperbolicTangent;
00369 }
00370 else if(new_activation_function_name == "Threshold")
00371 {
00372 activation_function = Threshold;
00373 }
00374 else if(new_activation_function_name == "SymmetricThreshold")
00375 {
00376 activation_function = SymmetricThreshold;
00377 }
00378 else if(new_activation_function_name == "Linear")
00379 {
00380 activation_function = Linear;
00381 }
00382 else
00383 {
00384 std::ostringstream buffer;
00385
00386 buffer << "OpenNN Exception: Perceptron class.\n"
00387 << "void set_activation_function(const std::string&) method.\n"
00388 << "Unknown activation function: " << new_activation_function_name << ".\n";
00389
00390 throw std::logic_error(buffer.str());
00391 }
00392 }
00393
00394
00395
00396
00399
00400 void Perceptron::set_bias(const double& new_bias)
00401 {
00402 bias = new_bias;
00403 }
00404
00405
00406
00407
00410
00411 void Perceptron::set_synaptic_weights(const Vector<double>& new_synaptic_weights)
00412 {
00413
00414
00415 #ifdef _DEBUG
00416
00417 const unsigned int inputs_number = count_inputs_number();
00418
00419 if(new_synaptic_weights.size() != inputs_number)
00420 {
00421 std::ostringstream buffer;
00422
00423 buffer << "OpenNN Exception: Perceptron class.\n"
00424 << "void set_synaptic_weights(const Vector<double>&) method.\n"
00425 << "Size of synaptic weights vector must be equal to number of inputs.\n";
00426
00427 throw std::logic_error(buffer.str());
00428 }
00429
00430 #endif
00431
00432
00433
00434 synaptic_weights = new_synaptic_weights;
00435 }
00436
00437
00438
00439
00443
00444 void Perceptron::set_synaptic_weight(const unsigned int& synaptic_weight_index, const double& new_synaptic_weight)
00445 {
00446
00447
00448 #ifdef _DEBUG
00449
00450 const unsigned int inputs_number = count_inputs_number();
00451
00452 if(synaptic_weight_index >= inputs_number)
00453 {
00454 std::ostringstream buffer;
00455
00456 buffer << "OpenNN Exception: Perceptron class.\n"
00457 << "void set_synaptic_weight(const unsigned int&, const double&) method.\n"
00458 << "Index of synaptic weight must be less than number of inputs.\n";
00459
00460 throw std::logic_error(buffer.str());
00461 }
00462
00463 #endif
00464
00465
00466
00467 synaptic_weights[synaptic_weight_index] = new_synaptic_weight;
00468 }
00469
00470
00471
00472
00477
00478 void Perceptron::set_display(const bool& new_display)
00479 {
00480 display = new_display;
00481 }
00482
00483
00484
00485
00490
00491 void Perceptron::set_inputs_number(const unsigned int& new_inputs_number)
00492 {
00493 initialize_bias_normal(0.0,1.0);
00494
00495 synaptic_weights.set(new_inputs_number);
00496 initialize_synaptic_weights_normal(0.0,1.0);
00497 }
00498
00499
00500
00501
00503
00504 unsigned int Perceptron::count_parameters_number(void) const
00505 {
00506 const unsigned int inputs_number = count_inputs_number();
00507
00508 return(1 + inputs_number);
00509 }
00510
00511
00512
00513
00515
00516 Vector<double> Perceptron::arrange_parameters(void) const
00517 {
00518 const unsigned int parameters_number = count_parameters_number();
00519
00520 Vector<double> parameters(parameters_number);
00521
00522 parameters[0] = bias;
00523
00524 const unsigned int inputs_number = count_inputs_number();
00525
00526 for(unsigned int i = 0; i < inputs_number; i++)
00527 {
00528 parameters[(unsigned int)1+i] = synaptic_weights[i];
00529 }
00530
00531 return(parameters);
00532 }
00533
00534
00535
00536
00539
00540 void Perceptron::set_parameters(const Vector<double>& new_parameters)
00541 {
00542 const unsigned int inputs_number = count_inputs_number();
00543
00544
00545
00546 #ifdef _DEBUG
00547
00548 const unsigned int size = new_parameters.size();
00549
00550 if(size != 1+inputs_number)
00551 {
00552 std::ostringstream buffer;
00553
00554 buffer << "OpenNN Exception: Perceptron class.\n"
00555 << "void set_parameters(const Vector<double>&) method.\n"
00556 << "Size must be equal to one plus number of inputs.\n";
00557
00558 throw std::logic_error(buffer.str());
00559 }
00560
00561 #endif
00562
00563 bias = new_parameters[0];
00564
00565 for(unsigned int i = 0; i < inputs_number; i++)
00566 {
00567 synaptic_weights[i] = new_parameters[i+1];
00568 }
00569 }
00570
00571
00572
00573
00576
00577 void Perceptron::initialize_bias(const double& value)
00578 {
00579 bias = value;
00580 }
00581
00582
00583
00584
00588
00589 void Perceptron::initialize_bias_uniform(const double& minimum, const double& maximum)
00590 {
00591
00592
00593 #ifdef _DEBUG
00594
00595 if(minimum > maximum)
00596 {
00597 std::ostringstream buffer;
00598
00599 buffer << "OpenNN Exception: Perceptron class.\n"
00600 << "initialize_bias_uniform(const double&, const double&) method.\n"
00601 << "Minimum value must be less than maximum value.\n";
00602
00603 throw std::logic_error(buffer.str());
00604 }
00605
00606 #endif
00607
00608 bias = calculate_random_uniform(minimum, maximum);
00609 }
00610
00611
00612
00613
00616
00617 void Perceptron::initialize_synaptic_weights(const double& value)
00618 {
00619 synaptic_weights.initialize(value);
00620 }
00621
00622
00623
00624
00628
00629 void Perceptron::initialize_synaptic_weights_uniform(const double& minimum, const double& maximum)
00630 {
00631 synaptic_weights.initialize_uniform(minimum, maximum);
00632 }
00633
00634
00635
00636
00640
00641 void Perceptron::initialize_bias_normal(const double& mean, const double& standard_deviation)
00642 {
00643
00644
00645 #ifdef _DEBUG
00646
00647 if(standard_deviation < 0.0)
00648 {
00649 std::ostringstream buffer;
00650
00651 buffer << "OpenNN Exception: Perceptron class.\n"
00652 << "initialize_bias_normal(const double&, const double&) method.\n"
00653 << "Standard deviation must be equal or greater than zero.\n";
00654
00655 throw std::logic_error(buffer.str());
00656 }
00657
00658 #endif
00659
00660 bias = calculate_random_normal(mean, standard_deviation);
00661 }
00662
00663
00664
00665
00669
00670 void Perceptron::initialize_synaptic_weights_normal(const double& mean, const double& standard_deviation)
00671 {
00672 synaptic_weights.initialize_normal(mean, standard_deviation);
00673 }
00674
00675
00676
00677
00680
00681 void Perceptron::initialize_parameters(const double& value)
00682 {
00683 bias = value;
00684 synaptic_weights.initialize(value);
00685 }
00686
00687
00688
00689
00693
00694 double Perceptron::calculate_combination(const Vector<double>& inputs) const
00695 {
00696
00697
00698 #ifdef _DEBUG
00699
00700 const unsigned int inputs_number = count_inputs_number();
00701
00702 if(inputs_number == 0)
00703 {
00704 std::ostringstream buffer;
00705
00706 buffer << "OpenNN Exception: Perceptron class.\n"
00707 << "calculate_combination(const Vector<double>&) method.\n"
00708 << "Number of inputs must be greater than zero.\n";
00709
00710 throw std::logic_error(buffer.str());
00711 }
00712
00713 const unsigned int inputs_size = inputs.size();
00714
00715 if(inputs_size != inputs_number)
00716 {
00717 std::ostringstream buffer;
00718
00719 buffer << "OpenNN Exception: Perceptron class.\n"
00720 << "double calculate_combination(const Vector<double>&) method.\n"
00721 << "Size of inputs (" << inputs_size << ") must be equal to number of inputs (" << inputs_number << ").\n";
00722
00723 throw std::logic_error(buffer.str());
00724 }
00725
00726 #endif
00727
00728
00729
00730 return(bias + synaptic_weights.dot(inputs));
00731 }
00732
00733
00734
00735
00739
00740 double Perceptron::calculate_combination_parameters(const Vector<double>& inputs, const Vector<double>& parameters) const
00741 {
00742
00743
00744 #ifdef _DEBUG
00745
00746 std::ostringstream buffer;
00747
00748 const unsigned int inputs_size = inputs.size();
00749 const unsigned int inputs_number = count_inputs_number();
00750
00751 if(inputs_size != inputs_number)
00752 {
00753 buffer << "OpenNN Exception: Perceptron class.\n"
00754 << "double calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
00755 << "Size of inputs must be equal to number of inputs.\n";
00756
00757 throw std::logic_error(buffer.str());
00758 }
00759
00760 const unsigned int parameters_size = parameters.size();
00761
00762 const unsigned int parameters_number = count_parameters_number();
00763
00764 if(parameters_size != parameters_number)
00765 {
00766 buffer << "OpenNN Exception: Perceptron class.\n"
00767 << "double calculate_combination_parameters(const Vector<double>&, const Vector<double>&) const method.\n"
00768 << "Size of potential parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
00769
00770 throw std::logic_error(buffer.str());
00771 }
00772
00773 #endif
00774
00775
00776
00777 Perceptron copy(*this);
00778
00779
00780
00781 copy.set_parameters(parameters);
00782
00783
00784
00785 return(copy.calculate_combination(inputs));
00786 }
00787
00788
00789
00790
00794
00795 double Perceptron::calculate_activation(const double& combination) const
00796 {
00797 switch(activation_function)
00798 {
00799 case Perceptron::Logistic:
00800 {
00801 return(1.0/(1.0 + exp(-combination)));
00802 }
00803 break;
00804
00805 case Perceptron::HyperbolicTangent:
00806 {
00807 return(1.0-2.0/(exp(2.0*combination)+1.0));
00808 }
00809 break;
00810
00811 case Perceptron::Threshold:
00812 {
00813 if(combination < 0)
00814 {
00815 return(0.0);
00816 }
00817 else
00818 {
00819 return(1.0);
00820 }
00821 }
00822 break;
00823
00824 case Perceptron::SymmetricThreshold:
00825 {
00826 if(combination < 0)
00827 {
00828 return(-1.0);
00829 }
00830 else
00831 {
00832 return(1.0);
00833 }
00834 }
00835 break;
00836
00837 case Perceptron::Linear:
00838 {
00839 return(combination);
00840 }
00841 break;
00842
00843 default:
00844 {
00845 std::ostringstream buffer;
00846
00847 buffer << "OpenNN Exception: Perceptron class.\n"
00848 << "double calculate_activation(const double&) const method.\n"
00849 << "Unknown activation function.\n";
00850
00851 throw std::logic_error(buffer.str());
00852 }
00853 break;
00854 }
00855 }
00856
00857
00858
00859
00863
00864 double Perceptron::calculate_activation_derivative(const double& combination) const
00865 {
00866 switch(activation_function)
00867 {
00868 case Perceptron::Logistic:
00869 {
00870 const double exponent = exp(-combination);
00871
00872 return(exponent/((1.0+exponent)*(1.0+exponent)));
00873 }
00874 break;
00875
00876 case Perceptron::HyperbolicTangent:
00877 {
00878 return(1.0 - pow(tanh(combination), 2));
00879 }
00880 break;
00881
00882 case Perceptron::Threshold:
00883 {
00884 if(combination != 0.0)
00885 {
00886 return(0.0);
00887 }
00888 else
00889 {
00890 std::ostringstream buffer;
00891
00892 buffer << "OpenNN Exception: Perceptron class.\n"
00893 << "double calculate_activation_derivative(const double&) const method.\n"
00894 << "Threshold activation function is not derivable.\n";
00895
00896 throw std::logic_error(buffer.str());
00897 }
00898 }
00899 break;
00900
00901 case Perceptron::SymmetricThreshold:
00902 {
00903 if(combination != 0.0)
00904 {
00905 return(0.0);
00906 }
00907 else
00908 {
00909 std::ostringstream buffer;
00910
00911 buffer << "OpenNN Exception: Perceptron class.\n"
00912 << "double calculate_activation_derivative(const double&) const method.\n"
00913 << "Symmetric threshold activation function is not derivable.\n";
00914
00915 throw std::logic_error(buffer.str());
00916 }
00917 }
00918 break;
00919
00920 case Perceptron::Linear:
00921 {
00922 return(1.0);
00923 }
00924 break;
00925
00926 default:
00927 {
00928 std::ostringstream buffer;
00929
00930 buffer << "OpenNN Exception: Perceptron class.\n"
00931 << "double calculate_activation_derivative(const double&) const method.\n"
00932 << "Unknown activation function.\n";
00933
00934 throw std::logic_error(buffer.str());
00935 }
00936 break;
00937 }
00938 }
00939
00940
00941
00942
00946
00947 double Perceptron::calculate_activation_second_derivative(const double& combination) const
00948 {
00949 switch(activation_function)
00950 {
00951 case Perceptron::Logistic:
00952 {
00953 const double exponent = exp(combination);
00954
00955 return(-exponent*(exponent-1.0)/((exponent+1.0)*(exponent+1.0)*(exponent+1.0)));
00956 }
00957 break;
00958
00959 case Perceptron::HyperbolicTangent:
00960 {
00961 return(-2.0*tanh(combination)*(1.0 - pow(tanh(combination),2)));
00962 }
00963 break;
00964
00965 case Perceptron::Threshold:
00966 {
00967 if(combination != 0.0)
00968 {
00969 return(0.0);
00970 }
00971 else
00972 {
00973 std::ostringstream buffer;
00974
00975 buffer << "OpenNN Exception: Perceptron class.\n"
00976 << "double calculate_activation_second_derivative(const double&) const method.\n"
00977 << "Threshold activation function is not derivable.\n";
00978
00979 throw std::logic_error(buffer.str());
00980 }
00981 }
00982 break;
00983
00984 case Perceptron::SymmetricThreshold:
00985 {
00986 if(combination != 0.0)
00987 {
00988 return(0.0);
00989 }
00990 else
00991 {
00992 std::ostringstream buffer;
00993
00994 buffer << "OpenNN Exception: Perceptron class.\n"
00995 << "double calculate_activation_second_derivative(const double&) const method.\n"
00996 << "Symmetric threshold activation function is not derivable.\n";
00997
00998 throw std::logic_error(buffer.str());
00999 }
01000 }
01001 break;
01002
01003 case Perceptron::Linear:
01004 {
01005 return(0.0);
01006 }
01007 break;
01008
01009 default:
01010 {
01011 std::ostringstream buffer;
01012
01013 buffer << "OpenNN Exception: Perceptron class.\n"
01014 << "double calculate_activation_second_derivative(const double&) const method.\n"
01015 << "Unknown activation function.\n";
01016
01017 throw std::logic_error(buffer.str());
01018 }
01019 break;
01020 }
01021 }
01022
01023
01024
01025
01029
01030 double Perceptron::calculate_outputs(const Vector<double>& inputs) const
01031 {
01032
01033
01034 #ifdef _DEBUG
01035
01036 const unsigned int size = inputs.size();
01037 const unsigned int inputs_number = count_inputs_number();
01038
01039 if(size != inputs_number)
01040 {
01041 std::ostringstream buffer;
01042
01043 buffer << "OpenNN Exception: Perceptron class.\n"
01044 << "double calculate_outputs(const Vector<double>&) const method.\n"
01045 << "Size must be equal to number of inputs.\n";
01046
01047 throw std::logic_error(buffer.str());
01048 }
01049
01050 #endif
01051
01052
01053
01054 return(calculate_activation(calculate_combination(inputs)));
01055 }
01056
01057
01058
01059
01064
01065 double Perceptron::calculate_parameters_output(const Vector<double>& inputs, const Vector<double>& parameters) const
01066 {
01067
01068
01069 #ifdef _DEBUG
01070
01071 const unsigned int inputs_size = inputs.size();
01072 const unsigned int inputs_number = count_inputs_number();
01073
01074 if(inputs_size != inputs_number)
01075 {
01076 std::ostringstream buffer;
01077
01078 buffer << "OpenNN Exception: Perceptron class.\n"
01079 << "double calculate_parameters_output(const Vector<double>&, const Vector<double>&) const method.\n"
01080 << "Size of inputs must be equal to number of inputs.\n";
01081
01082 throw std::logic_error(buffer.str());
01083 }
01084
01085 const unsigned int parameters_size = parameters.size();
01086
01087 const unsigned int parameters_number = count_parameters_number();
01088
01089 if(parameters_size != parameters_number)
01090 {
01091 std::ostringstream buffer;
01092
01093 buffer << "OpenNN Exception: Perceptron class.\n"
01094 << "double calculate_parameters_output(const Vector<double>&, const Vector<double>&) const method.\n"
01095 << "Size of potential parameters (" << parameters_size << ") must be equal to number of parameters (" << parameters_number << ").\n";
01096
01097 throw std::logic_error(buffer.str());
01098 }
01099
01100 #endif
01101
01102 Perceptron copy(*this);
01103
01104 copy.set_parameters(parameters);
01105
01106 return(copy.calculate_outputs(inputs));
01107 }
01108
01109
01110
01111
01114
01115 Vector<double> Perceptron::calculate_gradient(const Vector<double>& inputs) const
01116 {
01117
01118
01119 #ifdef _DEBUG
01120
01121 const unsigned int size = inputs.size();
01122 const unsigned int inputs_number = count_inputs_number();
01123
01124 if(size != inputs_number)
01125 {
01126 std::ostringstream buffer;
01127
01128 buffer << "OpenNN Exception: Perceptron class.\n"
01129 << "Vector<double> calculate_gradient(const Vector<double>&) const method.\n"
01130 << "Size must be equal to number of inputs.\n";
01131
01132 throw std::logic_error(buffer.str());
01133 }
01134
01135 #endif
01136
01137
01138
01139 const double combination = calculate_combination(inputs);
01140
01141 const double activation_derivative = calculate_activation_derivative(combination);
01142
01143 return(synaptic_weights*activation_derivative);
01144 }
01145
01146
01147
01148
01151
01152 Vector<double> Perceptron::calculate_parameters_gradient(const Vector<double>& inputs) const
01153 {
01154 const unsigned int inputs_number = count_inputs_number();
01155
01156
01157
01158 #ifdef _DEBUG
01159
01160 const unsigned int size = inputs.size();
01161
01162 if(size != inputs_number)
01163 {
01164 std::ostringstream buffer;
01165
01166 buffer << "OpenNN Exception: Perceptron class.\n"
01167 << "double calculate_parameters_gradient(const Vector<double>&) const method.\n"
01168 << "Size must be equal to number of inputs.\n";
01169
01170 throw std::logic_error(buffer.str());
01171 }
01172
01173 #endif
01174
01175
01176
01177 const double combination = calculate_combination(inputs);
01178
01179 const double activation_derivative = calculate_activation_derivative(combination);
01180
01181 Vector<double> parameters_gradient(1+inputs_number);
01182
01183
01184
01185 parameters_gradient[0] = activation_derivative;
01186
01187
01188
01189 for(unsigned int i = 1; i < 1+inputs_number; i++)
01190 {
01191 parameters_gradient[i] = inputs[i-1]*activation_derivative;
01192 }
01193
01194 return(parameters_gradient);
01195 }
01196
01197
01198
01199
01201
01202 Vector<double> Perceptron::calculate_combination_gradient(const Vector<double>&) const
01203 {
01204 return(synaptic_weights);
01205 }
01206
01207
01208
01209
01212
01213 Vector<double> Perceptron::calculate_combination_parameters_gradient(const Vector<double>& inputs) const
01214 {
01215 const unsigned int inputs_number = count_inputs_number();
01216
01217
01218
01219 #ifdef _DEBUG
01220
01221 const unsigned int size = inputs.size();
01222
01223 if(size != inputs_number)
01224 {
01225 std::ostringstream buffer;
01226
01227 buffer << "OpenNN Exception: Perceptron class.\n"
01228 << "double calculate_combination_parameters_gradient(const Vector<double>&) const method.\n"
01229 << "Size must be equal to number of inputs.\n";
01230
01231 throw std::logic_error(buffer.str());
01232 }
01233
01234 #endif
01235
01236
01237
01238 Vector<double> combination_parameters_gradient(1+inputs_number);
01239
01240
01241
01242 combination_parameters_gradient[0] = 1.0;
01243
01244
01245
01246 for(unsigned int i = 1; i < 1+inputs_number; i++)
01247 {
01248 combination_parameters_gradient[i] = inputs[i-1];
01249 }
01250
01251 return(combination_parameters_gradient);
01252 }
01253
01254
01255
01256
01259
01260 Matrix<double> Perceptron::calculate_Hessian(const Vector<double>& inputs) const
01261 {
01262
01263
01264 #ifdef _DEBUG
01265
01266 const unsigned int inputs_number = count_inputs_number();
01267 const unsigned int inputs_size = inputs.size();
01268
01269 if(inputs_size != inputs_number)
01270 {
01271 std::ostringstream buffer;
01272
01273 buffer << "OpenNN Exception: Perceptron class.\n"
01274 << "Matrix<double> calculate_Hessian(const Vector<double>&, const Vector<double>&) const method.\n"
01275 << "Size of inputs must be equal to number of inputs.\n";
01276
01277 throw std::logic_error(buffer.str());
01278 }
01279
01280 #endif
01281
01282 const double combination = calculate_combination(inputs);
01283 const double activation_second_derivative = calculate_activation_second_derivative(combination);
01284
01285 return(synaptic_weights.direct(synaptic_weights)*activation_second_derivative);
01286 }
01287
01288
01289
01290
01293
01294 Matrix<double> Perceptron::calculate_parameters_Hessian(const Vector<double>& inputs) const
01295 {
01296
01297
01298 #ifdef _DEBUG
01299
01300 const unsigned int inputs_size = inputs.size();
01301 const unsigned int inputs_number = count_inputs_number();
01302
01303 if(inputs_size != inputs_number)
01304 {
01305 std::ostringstream buffer;
01306
01307 buffer << "OpenNN Exception: Perceptron class.\n"
01308 << "Matrix<double> calculate_parameters_Hessian(const Vector<double>&) const method.\n"
01309 << "Size of inputs must be equal to number of inputs.\n";
01310
01311 throw std::logic_error(buffer.str());
01312 }
01313
01314 #endif
01315
01316 const double combination = calculate_combination(inputs);
01317 const double activation_second_derivative = calculate_activation_second_derivative(combination);
01318
01319 const unsigned int parameters_number = count_parameters_number();
01320
01321 Matrix<double> parameters_Hessian(parameters_number, parameters_number);
01322
01323
01324
01325 parameters_Hessian[0][0] = activation_second_derivative;
01326
01327
01328
01329 for(unsigned int i = 1; i < parameters_number; i++)
01330 {
01331 parameters_Hessian[0][i] = activation_second_derivative*inputs[i-1];
01332 }
01333
01334
01335
01336 for(unsigned int i = 1; i < parameters_number; i++)
01337 {
01338 for(unsigned int j = 1; j < parameters_number; j++)
01339 {
01340 parameters_Hessian[i][j] = activation_second_derivative*inputs[i-1]*inputs[j-1];
01341 }
01342 }
01343
01344
01345
01346 for(unsigned int i = 0; i < parameters_number; i++)
01347 {
01348 for(unsigned int j = 0; j < i; j++)
01349 {
01350 parameters_Hessian[i][j] = parameters_Hessian[j][i];
01351 }
01352 }
01353
01354 return(parameters_Hessian);
01355 }
01356
01357
01358
01359
01361
01362 Matrix<double> Perceptron::calculate_combination_Hessian(const Vector<double>&) const
01363 {
01364 const unsigned int inputs_number = count_inputs_number();
01365
01366 const Matrix<double> combination_Hessian(inputs_number, inputs_number, 0.0);
01367
01368 return(combination_Hessian);
01369 }
01370
01371
01372
01373
01375
01376 Matrix<double> Perceptron::calculate_combination_parameters_Hessian(const Vector<double>&) const
01377 {
01378 const unsigned int parameters_number = count_parameters_number();
01379
01380 const Matrix<double> Hessian(parameters_number, parameters_number, 0.0);
01381
01382 return(Hessian);
01383 }
01384
01385
01386
01387
01390
01391 void Perceptron::grow_input(void)
01392 {
01393 synaptic_weights.push_back(0.0);
01394 }
01395
01396
01397
01398
01401
01402 void Perceptron::prune_input(const unsigned int& index)
01403 {
01404 synaptic_weights.erase(synaptic_weights.begin()+index-1);
01405 }
01406
01407
01408
01409
01413
01414 std::string Perceptron::write_expression(const Vector<std::string>& inputs_name, const std::string& output_name) const
01415 {
01416 const unsigned int inputs_number = count_inputs_number();
01417
01418 std::string activation_function_name = write_activation_function();
01419
01420 std::ostringstream buffer;
01421
01422 buffer << output_name << "=" << activation_function_name << "("
01423 << bias;
01424
01425 for(unsigned int i = 0; i < inputs_number; i++)
01426 {
01427 if(synaptic_weights[i] >= 0)
01428 {
01429 buffer << "+";
01430 }
01431
01432 buffer << synaptic_weights[i] << "*" << inputs_name[i];
01433 }
01434
01435 buffer << ");\n";
01436
01437 return(buffer.str());
01438 }
01439
01440
01441
01442
01446
01447 double Perceptron::calculate_random_uniform(const double& minimum, const double& maximum) const
01448 {
01449 const double random = (double)rand()/(RAND_MAX+1.0);
01450
01451 const double random_uniform = minimum + (maximum-minimum)*random;
01452
01453 return(random_uniform);
01454 }
01455
01456
01457
01458
01462
01463 double Perceptron::calculate_random_normal(const double& mean, const double& standard_deviation) const
01464 {
01465 double random_uniform_1;
01466
01467 do
01468 {
01469 random_uniform_1 = (double)rand()/(RAND_MAX+1.0);
01470
01471 }while(random_uniform_1 == 0.0);
01472
01473 const double random_uniform_2 = (double)rand()/(RAND_MAX+1.0);
01474
01475
01476
01477 const double pi = 4.0*atan(1.0);
01478
01479 const double random_normal = mean + sqrt(-2.0*log(random_uniform_1))*sin(2.0*pi*random_uniform_2)*standard_deviation;
01480
01481 return(random_normal);
01482 }
01483
01484 }
01485
01486
01487
01488
01489
01490
01491
01492
01493
01494
01495
01496
01497
01498
01499
01500
01501
01502
01503