00001 /****************************************************************************************************************/ 00002 /* */ 00003 /* OpenNN: Open Neural Networks Library */ 00004 /* www.opennn.cimne.com */ 00005 /* */ 00006 /* R A N D O M S E A R C H C L A S S H E A D E R */ 00007 /* */ 00008 /* Roberto Lopez */ 00009 /* International Center for Numerical Methods in Engineering (CIMNE) */ 00010 /* Technical University of Catalonia (UPC) */ 00011 /* Barcelona, Spain */ 00012 /* E-mail: rlopez@cimne.upc.edu */ 00013 /* */ 00014 /****************************************************************************************************************/ 00015 00016 #ifndef __RANDOMSEARCH_H__ 00017 #define __RANDOMSEARCH_H__ 00018 00019 // OpenNN includes 00020 00021 #include "../performance_functional/performance_functional.h" 00022 00023 #include "training_algorithm.h" 00024 00025 // TinyXml includes 00026 00027 #include "../../parsers/tinyxml/tinyxml.h" 00028 00029 namespace OpenNN 00030 { 00031 00035 00036 class RandomSearch : public TrainingAlgorithm 00037 { 00038 00039 public: 00040 00041 // DEFAULT CONSTRUCTOR 00042 00043 explicit RandomSearch(void); 00044 00045 00046 // PERFORMANCE FUNCTIONAL CONSTRUCTOR 00047 00048 explicit RandomSearch(PerformanceFunctional*); 00049 00050 00051 // XML CONSTRUCTOR 00052 00053 explicit RandomSearch(TiXmlElement*); 00054 00055 00056 // DESTRUCTOR 00057 00058 virtual ~RandomSearch(void); 00059 00060 // STRUCTURES 00061 00065 00066 struct RandomSearchResults : public TrainingAlgorithm::Results 00067 { 00068 // TRAINING HISTORY 00069 00071 00072 Vector< Vector<double> > parameters_history; 00073 00075 00076 Vector<double> parameters_norm_history; 00077 00079 00080 Vector<double> evaluation_history; 00081 00083 00084 Vector<double> generalization_evaluation_history; 00085 00087 00088 Vector< Vector<double> > training_direction_history; 00089 00091 00092 Vector<double> training_rate_history; 00093 00095 00096 Vector<double> elapsed_time_history; 00097 00098 // FINAL VALUES 00099 00101 00102 Vector<double> final_parameters; 00103 00105 00106 double final_parameters_norm; 00107 00109 00110 double final_evaluation; 00111 00113 00114 double final_generalization_evaluation; 00115 00117 00118 Vector<double> final_training_direction; 00119 00121 00122 double final_training_rate; 00123 00125 00126 double elapsed_time; 00127 00128 // METHODS 00129 00130 void resize_training_history(const unsigned int&); 00131 00132 std::string to_string(void) const; 00133 }; 00134 00135 00136 // METHODS 00137 00138 // Get methods 00139 00140 const double& get_training_rate_reduction_factor(void) const; 00141 const unsigned int& get_training_rate_reduction_period(void) const; 00142 00143 const bool& get_reserve_parameters_history(void) const; 00144 const bool& get_reserve_parameters_norm_history(void) const; 00145 00146 const bool& get_reserve_evaluation_history(void) const; 00147 00148 // Training parameters 00149 00150 const double& get_warning_parameters_norm(void) const; 00151 const double& get_warning_training_rate(void) const; 00152 00153 const double& get_error_parameters_norm(void) const; 00154 const double& get_error_training_rate(void) const; 00155 00156 // Stopping criteria 00157 00158 const double& get_minimum_parameters_increment_norm(void) const; 00159 00160 const double& get_minimum_performance_increase(void) const; 00161 const double& get_performance_goal(void) const; 00162 const unsigned int& get_maximum_generalization_evaluation_decreases(void) const; 00163 00164 const unsigned int& get_maximum_epochs_number(void) const; 00165 const double& get_maximum_time(void) const; 00166 00167 // Reserve training history 00168 00169 const bool& get_reserve_generalization_evaluation_history(void) const; 00170 00171 const bool& get_reserve_training_direction_history(void) const; 00172 const bool& get_reserve_training_rate_history(void) const; 00173 const bool& get_reserve_elapsed_time_history(void) const; 00174 00175 // Utilities 00176 00177 const unsigned int& get_display_period(void) const; 00178 00179 // Set methods 00180 00181 void set_default(void); 00182 00183 void set_training_rate_reduction_factor(const double&); 00184 void set_training_rate_reduction_period(const unsigned int&); 00185 00186 void set_reserve_parameters_history(const bool&); 00187 void set_reserve_parameters_norm_history(const bool&); 00188 00189 void set_reserve_evaluation_history(const bool&); 00190 00191 // Training parameters 00192 00193 void set_warning_parameters_norm(const double&); 00194 void set_warning_training_rate(const double&); 00195 00196 void set_error_parameters_norm(const double&); 00197 void set_error_training_rate(const double&); 00198 00199 // Stopping criteria 00200 00201 void set_minimum_parameters_increment_norm(const double&); 00202 00203 void set_minimum_performance_increase(const double&); 00204 void set_performance_goal(const double&); 00205 void set_maximum_generalization_evaluation_decreases(const unsigned int&); 00206 00207 void set_maximum_epochs_number(const unsigned int&); 00208 void set_maximum_time(const double&); 00209 00210 // Reserve training history 00211 00212 void set_reserve_generalization_evaluation_history(const bool&); 00213 00214 void set_reserve_training_direction_history(const bool&); 00215 void set_reserve_training_rate_history(const bool&); 00216 void set_reserve_elapsed_time_history(const bool&); 00217 00219 00220 virtual void set_reserve_all_training_history(const bool&); 00221 00222 // Utilities 00223 00224 void set_display_period(const unsigned int&); 00225 00226 // Training methods 00227 00228 Vector<double> calculate_training_direction(void); 00229 00230 RandomSearchResults* perform_training(void); 00231 00232 std::string write_training_algorithm_type(void) const; 00233 00234 // Serialization methods 00235 00236 TiXmlElement* to_XML(void) const; 00237 void from_XML(TiXmlElement*); 00238 00239 private: 00240 00241 // MEMBERS 00242 00243 // TRAINING PARAMETERS 00244 00245 double first_training_rate; 00246 00247 double training_rate_reduction_factor; 00248 00249 unsigned int training_rate_reduction_period; 00250 00252 00253 double warning_parameters_norm; 00254 00256 00257 double warning_training_rate; 00258 00260 00261 double error_parameters_norm; 00262 00264 00265 double error_training_rate; 00266 00267 00268 // STOPPING CRITERIA 00269 00271 00272 double minimum_parameters_increment_norm; 00273 00275 00276 double minimum_performance_increase; 00277 00279 00280 double performance_goal; 00281 00282 00283 unsigned int maximum_generalization_evaluation_decreases; 00284 00286 00287 unsigned int maximum_epochs_number; 00288 00290 00291 double maximum_time; 00292 00293 00294 // TRAINING HISTORY 00295 00296 00297 bool reserve_parameters_history; 00298 bool reserve_parameters_norm_history; 00299 00300 bool reserve_evaluation_history; 00301 00303 00304 bool reserve_generalization_evaluation_history; 00305 00306 00308 00309 bool reserve_training_direction_history; 00310 00311 bool reserve_training_direction_norm_history; 00312 00314 00315 bool reserve_training_rate_history; 00316 00318 00319 bool reserve_elapsed_time_history; 00320 00321 00323 00324 unsigned int display_period; 00325 00326 }; 00327 00328 } 00329 00330 #endif 00331 00332 00333 // OpenNN: Open Neural Networks Library. 00334 // Copyright (C) 2005-2012 Roberto Lopez 00335 // 00336 // This library is free software; you can redistribute it and/or 00337 // modify it under the terms of the GNU Lesser General Public 00338 // License as published by the Free Software Foundation; either 00339 // version 2.1 of the License, or any later version. 00340 // 00341 // This library is distributed in the hope that it will be useful, 00342 // but WITHOUT ANY WARRANTY; without even the implied warranty of 00343 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 00344 // Lesser General Public License for more details. 00345 00346 // You should have received a copy of the GNU Lesser General Public 00347 // License along with this library; if not, write to the Free Software 00348 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA