Package generic_fns :: Module minimise
[hide private]
[frames] | no frames]

Source Code for Module generic_fns.minimise

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2012 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the program relax.                                     # 
  6  #                                                                             # 
  7  # relax is free software; you can redistribute it and/or modify               # 
  8  # it under the terms of the GNU General Public License as published by        # 
  9  # the Free Software Foundation; either version 2 of the License, or           # 
 10  # (at your option) any later version.                                         # 
 11  #                                                                             # 
 12  # relax is distributed in the hope that it will be useful,                    # 
 13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 15  # GNU General Public License for more details.                                # 
 16  #                                                                             # 
 17  # You should have received a copy of the GNU General Public License           # 
 18  # along with relax; if not, write to the Free Software                        # 
 19  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA   # 
 20  #                                                                             # 
 21  ############################################################################### 
 22   
 23  # Module docstring. 
 24  """Module for model minimisation/optimisation.""" 
 25   
 26  # Python module imports. 
 27  from re import search 
 28   
 29  # relax module imports. 
 30  from generic_fns.mol_res_spin import return_spin, spin_loop 
 31  from generic_fns import pipes 
 32  from relax_errors import RelaxError 
 33  import specific_fns 
 34  from status import Status; status = Status() 
 35  from user_functions.data import Uf_tables; uf_tables = Uf_tables() 
 36  from user_functions.objects import Desc_container 
 37   
 38   
39 -def calc(verbosity=1):
40 """Function for calculating the function value. 41 42 @param verbosity: The amount of information to print. The higher the value, the greater 43 the verbosity. 44 @type verbosity: int 45 """ 46 47 # Test if the current data pipe exists. 48 pipes.test() 49 50 # Specific calculate function setup. 51 calculate = specific_fns.setup.get_specific_fn('calculate', cdp.pipe_type) 52 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 53 54 # Deselect spins lacking data: 55 overfit_deselect() 56 57 # Monte Carlo simulation calculation. 58 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 59 # Loop over the simulations. 60 for i in xrange(cdp.sim_number): 61 # Print out. 62 if verbosity: 63 print(("Simulation " + repr(i+1))) 64 65 # Status. 66 if status.current_analysis: 67 status.auto_analysis[status.current_analysis].mc_number = i 68 else: 69 status.mc_number = i 70 71 # Calculation. 72 calculate(verbosity=verbosity-1, sim_index=i) 73 74 # Unset the status. 75 if status.current_analysis: 76 status.auto_analysis[status.current_analysis].mc_number = None 77 else: 78 status.mc_number = None 79 80 # Minimisation. 81 else: 82 calculate(verbosity=verbosity)
83 84
85 -def grid_search(lower=None, upper=None, inc=None, constraints=True, verbosity=1):
86 """The grid search function. 87 88 @param lower: The lower bounds of the grid search which must be equal to the number of 89 parameters in the model. 90 @type lower: array of numbers 91 @param upper: The upper bounds of the grid search which must be equal to the number of 92 parameters in the model. 93 @type upper: array of numbers 94 @param inc: The increments for each dimension of the space for the grid search. The 95 number of elements in the array must equal to the number of parameters in 96 the model. 97 @type inc: array of int 98 @param constraints: If True, constraints are applied during the grid search (elinating parts of 99 the grid). If False, no constraints are used. 100 @type constraints: bool 101 @param verbosity: The amount of information to print. The higher the value, the greater 102 the verbosity. 103 @type verbosity: int 104 """ 105 106 # Test if the current data pipe exists. 107 pipes.test() 108 109 # Specific grid search function. 110 grid_search = specific_fns.setup.get_specific_fn('grid_search', cdp.pipe_type) 111 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 112 113 # Deselect spins lacking data: 114 overfit_deselect() 115 116 # Monte Carlo simulation grid search. 117 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 118 # Loop over the simulations. 119 for i in xrange(cdp.sim_number): 120 # Print out. 121 if verbosity: 122 print(("Simulation " + repr(i+1))) 123 124 # Status. 125 if status.current_analysis: 126 status.auto_analysis[status.current_analysis].mc_number = i 127 else: 128 status.mc_number = i 129 130 # Optimisation. 131 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity-1, sim_index=i) 132 133 # Unset the status. 134 if status.current_analysis: 135 status.auto_analysis[status.current_analysis].mc_number = None 136 else: 137 status.mc_number = None 138 139 # Grid search. 140 else: 141 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity)
142 143
144 -def minimise(min_algor=None, line_search=None, hessian_mod=None, hessian_type=None, func_tol=None, grad_tol=None, max_iter=None, constraints=True, scaling=True, verbosity=1, sim_index=None):
145 """Minimisation function. 146 147 @keyword min_algor: The minimisation algorithm to use. 148 @type min_algor: str 149 @keyword line_search: The line search algorithm which will only be used in combination with the line search and conjugate gradient methods. This will default to the More and Thuente line search. 150 @type line_search: str or None 151 @keyword hessian_mod: The Hessian modification. This will only be used in the algorithms which use the Hessian, and defaults to Gill, Murray, and Wright modified Cholesky algorithm. 152 @type hessian_mod: str or None 153 @keyword hessian_type: The Hessian type. This will only be used in a few trust region algorithms, and defaults to BFGS. 154 @type hessian_type: str or None 155 @keyword func_tol: The function tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 156 @type func_tol: None or float 157 @keyword grad_tol: The gradient tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 158 @type grad_tol: None or float 159 @keyword max_iter: The maximum number of iterations for the algorithm. 160 @type max_iter: int 161 @keyword constraints: If True, constraints are used during optimisation. 162 @type constraints: bool 163 @keyword scaling: If True, diagonal scaling is enabled during optimisation to allow the problem to be better conditioned. 164 @type scaling: bool 165 @keyword verbosity: The amount of information to print. The higher the value, the greater the verbosity. 166 @type verbosity: int 167 @keyword sim_index: The index of the simulation to optimise. This should be None if normal optimisation is desired. 168 @type sim_index: None or int 169 """ 170 171 # Test if the current data pipe exists. 172 pipes.test() 173 174 # Re-package the minimisation algorithm, options, and constraints for the generic_minimise() calls within the specific code. 175 if constraints: 176 min_options = [min_algor] 177 min_algor = 'Method of Multipliers' 178 else: 179 min_options = [] 180 if line_search != None: 181 min_options.append(line_search) 182 if hessian_mod != None: 183 min_options.append(hessian_mod) 184 if hessian_type != None: 185 min_options.append(hessian_type) 186 min_options = tuple(min_options) 187 188 # Specific minimisation function. 189 minimise = specific_fns.setup.get_specific_fn('minimise', cdp.pipe_type) 190 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 191 192 # Deselect spins lacking data: 193 overfit_deselect() 194 195 # Single Monte Carlo simulation. 196 if sim_index != None: 197 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity, sim_index=sim_index) 198 199 # Monte Carlo simulation minimisation. 200 elif hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 201 for i in xrange(cdp.sim_number): 202 # Print out. 203 if verbosity: 204 print(("Simulation " + repr(i+1))) 205 206 # Status. 207 if status.current_analysis: 208 status.auto_analysis[status.current_analysis].mc_number = i 209 else: 210 status.mc_number = i 211 212 # Optimisation. 213 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity-1, sim_index=i) 214 215 # Unset the status. 216 if status.current_analysis: 217 status.auto_analysis[status.current_analysis].mc_number = None 218 else: 219 status.mc_number = None 220 221 # Standard minimisation. 222 else: 223 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity)
224 225
226 -def reset_min_stats(data_pipe=None, spin=None):
227 """Function for resetting the minimisation statistics. 228 229 @param data_pipe: The name of the data pipe to reset the minimisation statisics of. This 230 defaults to the current data pipe. 231 @type data_pipe: str 232 @param spin: The spin data container if spin specific data is to be reset. 233 @type spin: SpinContainer 234 """ 235 236 # The data pipe. 237 if data_pipe == None: 238 data_pipe = pipes.cdp_name() 239 240 # Get the data pipe. 241 dp = pipes.get_pipe(data_pipe) 242 243 244 # Global minimisation statistics. 245 ################################# 246 247 # Chi-squared. 248 if hasattr(dp, 'chi2'): 249 dp.chi2 = None 250 251 # Iteration count. 252 if hasattr(dp, 'iter'): 253 dp.iter = None 254 255 # Function count. 256 if hasattr(dp, 'f_count'): 257 dp.f_count = None 258 259 # Gradient count. 260 if hasattr(dp, 'g_count'): 261 dp.g_count = None 262 263 # Hessian count. 264 if hasattr(dp, 'h_count'): 265 dp.h_count = None 266 267 # Warning. 268 if hasattr(dp, 'warning'): 269 dp.warning = None 270 271 272 # Sequence specific minimisation statistics. 273 ############################################ 274 275 # Loop over all spins. 276 for spin in spin_loop(): 277 # Chi-squared. 278 if hasattr(spin, 'chi2'): 279 spin.chi2 = None 280 281 # Iteration count. 282 if hasattr(spin, 'iter'): 283 spin.iter = None 284 285 # Function count. 286 if hasattr(spin, 'f_count'): 287 spin.f_count = None 288 289 # Gradient count. 290 if hasattr(spin, 'g_count'): 291 spin.g_count = None 292 293 # Hessian count. 294 if hasattr(spin, 'h_count'): 295 spin.h_count = None 296 297 # Warning. 298 if hasattr(spin, 'warning'): 299 spin.warning = None
300 301
302 -def return_conversion_factor(stat_type):
303 """Dummy function for returning 1.0. 304 305 @param stat_type: The name of the statistic. This is unused! 306 @type stat_type: str 307 @return: A conversion factor of 1.0. 308 @rtype: float 309 """ 310 311 return 1.0
312 313 314 return_data_name_doc = Desc_container("Minimisation statistic data type string matching patterns") 315 table = uf_tables.add_table(label="table: min data type patterns", caption="Minimisation statistic data type string matching patterns.") 316 table.add_headings(["Data type", "Object name", "Patterns"]) 317 table.add_row(["Chi-squared statistic", "'chi2'", "'^[Cc]hi2$' or '^[Cc]hi[-_ ][Ss]quare'"]) 318 table.add_row(["Iteration count", "'iter'", "'^[Ii]ter'"]) 319 table.add_row(["Function call count", "'f_count'", "'^[Ff].*[ -_][Cc]ount'"]) 320 table.add_row(["Gradient call count", "'g_count'", "'^[Gg].*[ -_][Cc]ount'"]) 321 table.add_row(["Hessian call count", "'h_count'", "'^[Hh].*[ -_][Cc]ount'"]) 322 return_data_name_doc.add_table(table.label) 323
324 -def return_data_name(name):
325 """Return a unique identifying string for the minimisation parameter. 326 327 @param name: The minimisation parameter. 328 @type name: str 329 @return: The unique parameter identifying string. 330 @rtype: str 331 """ 332 333 # Chi-squared. 334 if search('^[Cc]hi2$', name) or search('^[Cc]hi[-_ ][Ss]quare', name): 335 return 'chi2' 336 337 # Iteration count. 338 if search('^[Ii]ter', name): 339 return 'iter' 340 341 # Function call count. 342 if search('^[Ff].*[ -_][Cc]ount', name): 343 return 'f_count' 344 345 # Gradient call count. 346 if search('^[Gg].*[ -_][Cc]ount', name): 347 return 'g_count' 348 349 # Hessian call count. 350 if search('^[Hh].*[ -_][Cc]ount', name): 351 return 'h_count'
352 353
354 -def return_grace_string(stat_type):
355 """Function for returning the Grace string representing the data type for axis labelling. 356 357 @param stat_type: The name of the statistic to return the Grace string for. 358 @type stat_type: str 359 @return: The Grace string. 360 @rtype: str 361 """ 362 363 # Get the object name. 364 object_name = return_data_name(stat_type) 365 366 # Chi-squared. 367 if object_name == 'chi2': 368 grace_string = '\\xc\\S2' 369 370 # Iteration count. 371 elif object_name == 'iter': 372 grace_string = 'Iteration count' 373 374 # Function call count. 375 elif object_name == 'f_count': 376 grace_string = 'Function call count' 377 378 # Gradient call count. 379 elif object_name == 'g_count': 380 grace_string = 'Gradient call count' 381 382 # Hessian call count. 383 elif object_name == 'h_count': 384 grace_string = 'Hessian call count' 385 386 # Return the Grace string. 387 return grace_string
388 389
390 -def return_units(stat_type):
391 """Dummy function which returns None as the stats have no units. 392 393 @param stat_type: The name of the statistic. This is unused! 394 @type stat_type: str 395 @return: Nothing. 396 @rtype: None 397 """ 398 399 return None
400 401
402 -def return_value(spin=None, stat_type=None, sim=None):
403 """Function for returning the minimisation statistic corresponding to 'stat_type'. 404 405 @param spin: The spin data container if spin specific data is to be reset. 406 @type spin: SpinContainer 407 @param stat_type: The name of the statistic to return the value for. 408 @type stat_type: str 409 @param sim: The index of the simulation to return the value for. If None, then the 410 normal value is returned. 411 @type sim: None or int 412 """ 413 414 # Get the object name. 415 object_name = return_data_name(stat_type) 416 417 # The statistic type does not exist. 418 if not object_name: 419 raise RelaxError("The statistic type " + repr(stat_type) + " does not exist.") 420 421 # The simulation object name. 422 object_sim = object_name + '_sim' 423 424 # Get the global statistic. 425 if spin == None: 426 # Get the statistic. 427 if sim == None: 428 if hasattr(cdp, object_name): 429 stat = getattr(cdp, object_name) 430 else: 431 stat = None 432 433 # Get the simulation statistic. 434 else: 435 if hasattr(cdp, object_sim): 436 stat = getattr(cdp, object_sim)[sim] 437 else: 438 stat = None 439 440 # Residue specific statistic. 441 else: 442 # Get the statistic. 443 if sim == None: 444 if hasattr(spin, object_name): 445 stat = getattr(spin, object_name) 446 else: 447 stat = None 448 449 # Get the simulation statistic. 450 else: 451 if hasattr(spin, object_sim): 452 stat = getattr(spin, object_sim)[sim] 453 else: 454 stat = None 455 456 # Return the statistic (together with None to indicate that there are no errors associated with the statistic). 457 return stat, None
458 459 460 set_doc = """ 461 Minimisation statistic set details 462 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 463 464 This shouldn't really be executed by a user. 465 """ 466
467 -def set(val=None, error=None, param=None, scaling=None, spin_id=None):
468 """Set global or spin specific minimisation parameters. 469 470 @keyword val: The parameter values. 471 @type val: number 472 @keyword param: The parameter names. 473 @type param: str 474 @keyword scaling: Unused. 475 @type scaling: float 476 @keyword spin_id: The spin identification string. 477 @type spin_id: str 478 """ 479 480 # Get the parameter name. 481 param_name = return_data_name(param) 482 483 # Global minimisation stats. 484 if spin_id == None: 485 # Chi-squared. 486 if param_name == 'chi2': 487 cdp.chi2 = val 488 489 # Iteration count. 490 elif param_name == 'iter': 491 cdp.iter = val 492 493 # Function call count. 494 elif param_name == 'f_count': 495 cdp.f_count = val 496 497 # Gradient call count. 498 elif param_name == 'g_count': 499 cdp.g_count = val 500 501 # Hessian call count. 502 elif param_name == 'h_count': 503 cdp.h_count = val 504 505 # Residue specific minimisation. 506 else: 507 # Get the spin. 508 spin = return_spin(spin_id) 509 510 # Chi-squared. 511 if param_name == 'chi2': 512 spin.chi2 = val 513 514 # Iteration count. 515 elif param_name == 'iter': 516 spin.iter = val 517 518 # Function call count. 519 elif param_name == 'f_count': 520 spin.f_count = val 521 522 # Gradient call count. 523 elif param_name == 'g_count': 524 spin.g_count = val 525 526 # Hessian call count. 527 elif param_name == 'h_count': 528 spin.h_count = val
529