Package generic_fns :: Module minimise
[hide private]
[frames] | no frames]

Source Code for Module generic_fns.minimise

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2012 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the program relax (http://www.nmr-relax.com).          # 
  6  #                                                                             # 
  7  # This program is free software: you can redistribute it and/or modify        # 
  8  # it under the terms of the GNU General Public License as published by        # 
  9  # the Free Software Foundation, either version 3 of the License, or           # 
 10  # (at your option) any later version.                                         # 
 11  #                                                                             # 
 12  # This program is distributed in the hope that it will be useful,             # 
 13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 15  # GNU General Public License for more details.                                # 
 16  #                                                                             # 
 17  # You should have received a copy of the GNU General Public License           # 
 18  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 19  #                                                                             # 
 20  ############################################################################### 
 21   
 22  # Module docstring. 
 23  """Module for model minimisation/optimisation.""" 
 24   
 25  # Python module imports. 
 26  from re import search 
 27   
 28  # relax module imports. 
 29  from generic_fns.mol_res_spin import return_spin, spin_loop 
 30  from generic_fns import pipes 
 31  from relax_errors import RelaxError 
 32  import specific_fns 
 33  from status import Status; status = Status() 
 34  from user_functions.data import Uf_tables; uf_tables = Uf_tables() 
 35  from user_functions.objects import Desc_container 
 36   
 37   
38 -def calc(verbosity=1):
39 """Function for calculating the function value. 40 41 @param verbosity: The amount of information to print. The higher the value, the greater 42 the verbosity. 43 @type verbosity: int 44 """ 45 46 # Test if the current data pipe exists. 47 pipes.test() 48 49 # Specific calculate function setup. 50 calculate = specific_fns.setup.get_specific_fn('calculate', cdp.pipe_type) 51 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 52 53 # Deselect spins lacking data: 54 overfit_deselect() 55 56 # Monte Carlo simulation calculation. 57 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 58 # Loop over the simulations. 59 for i in range(cdp.sim_number): 60 # Print out. 61 if verbosity: 62 print("Simulation " + repr(i+1)) 63 64 # Status. 65 if status.current_analysis: 66 status.auto_analysis[status.current_analysis].mc_number = i 67 else: 68 status.mc_number = i 69 70 # Calculation. 71 calculate(verbosity=verbosity-1, sim_index=i) 72 73 # Unset the status. 74 if status.current_analysis: 75 status.auto_analysis[status.current_analysis].mc_number = None 76 else: 77 status.mc_number = None 78 79 # Minimisation. 80 else: 81 calculate(verbosity=verbosity)
82 83
84 -def grid_search(lower=None, upper=None, inc=None, constraints=True, verbosity=1):
85 """The grid search function. 86 87 @param lower: The lower bounds of the grid search which must be equal to the number of 88 parameters in the model. 89 @type lower: array of numbers 90 @param upper: The upper bounds of the grid search which must be equal to the number of 91 parameters in the model. 92 @type upper: array of numbers 93 @param inc: The increments for each dimension of the space for the grid search. The 94 number of elements in the array must equal to the number of parameters in 95 the model. 96 @type inc: array of int 97 @param constraints: If True, constraints are applied during the grid search (elinating parts of 98 the grid). If False, no constraints are used. 99 @type constraints: bool 100 @param verbosity: The amount of information to print. The higher the value, the greater 101 the verbosity. 102 @type verbosity: int 103 """ 104 105 # Test if the current data pipe exists. 106 pipes.test() 107 108 # Specific grid search function. 109 grid_search = specific_fns.setup.get_specific_fn('grid_search', cdp.pipe_type) 110 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 111 112 # Deselect spins lacking data: 113 overfit_deselect() 114 115 # Monte Carlo simulation grid search. 116 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 117 # Loop over the simulations. 118 for i in range(cdp.sim_number): 119 # Print out. 120 if verbosity: 121 print("Simulation " + repr(i+1)) 122 123 # Status. 124 if status.current_analysis: 125 status.auto_analysis[status.current_analysis].mc_number = i 126 else: 127 status.mc_number = i 128 129 # Optimisation. 130 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity-1, sim_index=i) 131 132 # Unset the status. 133 if status.current_analysis: 134 status.auto_analysis[status.current_analysis].mc_number = None 135 else: 136 status.mc_number = None 137 138 # Grid search. 139 else: 140 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity)
141 142
143 -def minimise(min_algor=None, line_search=None, hessian_mod=None, hessian_type=None, func_tol=None, grad_tol=None, max_iter=None, constraints=True, scaling=True, verbosity=1, sim_index=None):
144 """Minimisation function. 145 146 @keyword min_algor: The minimisation algorithm to use. 147 @type min_algor: str 148 @keyword line_search: The line search algorithm which will only be used in combination with the line search and conjugate gradient methods. This will default to the More and Thuente line search. 149 @type line_search: str or None 150 @keyword hessian_mod: The Hessian modification. This will only be used in the algorithms which use the Hessian, and defaults to Gill, Murray, and Wright modified Cholesky algorithm. 151 @type hessian_mod: str or None 152 @keyword hessian_type: The Hessian type. This will only be used in a few trust region algorithms, and defaults to BFGS. 153 @type hessian_type: str or None 154 @keyword func_tol: The function tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 155 @type func_tol: None or float 156 @keyword grad_tol: The gradient tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 157 @type grad_tol: None or float 158 @keyword max_iter: The maximum number of iterations for the algorithm. 159 @type max_iter: int 160 @keyword constraints: If True, constraints are used during optimisation. 161 @type constraints: bool 162 @keyword scaling: If True, diagonal scaling is enabled during optimisation to allow the problem to be better conditioned. 163 @type scaling: bool 164 @keyword verbosity: The amount of information to print. The higher the value, the greater the verbosity. 165 @type verbosity: int 166 @keyword sim_index: The index of the simulation to optimise. This should be None if normal optimisation is desired. 167 @type sim_index: None or int 168 """ 169 170 # Test if the current data pipe exists. 171 pipes.test() 172 173 # Re-package the minimisation algorithm, options, and constraints for the generic_minimise() calls within the specific code. 174 if constraints: 175 min_options = [min_algor] 176 min_algor = 'Method of Multipliers' 177 else: 178 min_options = [] 179 if line_search != None: 180 min_options.append(line_search) 181 if hessian_mod != None: 182 min_options.append(hessian_mod) 183 if hessian_type != None: 184 min_options.append(hessian_type) 185 min_options = tuple(min_options) 186 187 # Specific minimisation function. 188 minimise = specific_fns.setup.get_specific_fn('minimise', cdp.pipe_type) 189 overfit_deselect = specific_fns.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 190 191 # Deselect spins lacking data: 192 overfit_deselect() 193 194 # Single Monte Carlo simulation. 195 if sim_index != None: 196 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity, sim_index=sim_index) 197 198 # Monte Carlo simulation minimisation. 199 elif hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 200 for i in range(cdp.sim_number): 201 # Print out. 202 if verbosity: 203 print("Simulation " + repr(i+1)) 204 205 # Status. 206 if status.current_analysis: 207 status.auto_analysis[status.current_analysis].mc_number = i 208 else: 209 status.mc_number = i 210 211 # Optimisation. 212 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity-1, sim_index=i) 213 214 # Unset the status. 215 if status.current_analysis: 216 status.auto_analysis[status.current_analysis].mc_number = None 217 else: 218 status.mc_number = None 219 220 # Standard minimisation. 221 else: 222 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity)
223 224
225 -def reset_min_stats(data_pipe=None, spin=None):
226 """Function for resetting the minimisation statistics. 227 228 @param data_pipe: The name of the data pipe to reset the minimisation statisics of. This 229 defaults to the current data pipe. 230 @type data_pipe: str 231 @param spin: The spin data container if spin specific data is to be reset. 232 @type spin: SpinContainer 233 """ 234 235 # The data pipe. 236 if data_pipe == None: 237 data_pipe = pipes.cdp_name() 238 239 # Get the data pipe. 240 dp = pipes.get_pipe(data_pipe) 241 242 243 # Global minimisation statistics. 244 ################################# 245 246 # Chi-squared. 247 if hasattr(dp, 'chi2'): 248 dp.chi2 = None 249 250 # Iteration count. 251 if hasattr(dp, 'iter'): 252 dp.iter = None 253 254 # Function count. 255 if hasattr(dp, 'f_count'): 256 dp.f_count = None 257 258 # Gradient count. 259 if hasattr(dp, 'g_count'): 260 dp.g_count = None 261 262 # Hessian count. 263 if hasattr(dp, 'h_count'): 264 dp.h_count = None 265 266 # Warning. 267 if hasattr(dp, 'warning'): 268 dp.warning = None 269 270 271 # Sequence specific minimisation statistics. 272 ############################################ 273 274 # Loop over all spins. 275 for spin in spin_loop(): 276 # Chi-squared. 277 if hasattr(spin, 'chi2'): 278 spin.chi2 = None 279 280 # Iteration count. 281 if hasattr(spin, 'iter'): 282 spin.iter = None 283 284 # Function count. 285 if hasattr(spin, 'f_count'): 286 spin.f_count = None 287 288 # Gradient count. 289 if hasattr(spin, 'g_count'): 290 spin.g_count = None 291 292 # Hessian count. 293 if hasattr(spin, 'h_count'): 294 spin.h_count = None 295 296 # Warning. 297 if hasattr(spin, 'warning'): 298 spin.warning = None
299 300
301 -def return_conversion_factor(stat_type):
302 """Dummy function for returning 1.0. 303 304 @param stat_type: The name of the statistic. This is unused! 305 @type stat_type: str 306 @return: A conversion factor of 1.0. 307 @rtype: float 308 """ 309 310 return 1.0
311 312 313 return_data_name_doc = Desc_container("Minimisation statistic data type string matching patterns") 314 table = uf_tables.add_table(label="table: min data type patterns", caption="Minimisation statistic data type string matching patterns.") 315 table.add_headings(["Data type", "Object name", "Patterns"]) 316 table.add_row(["Chi-squared statistic", "'chi2'", "'^[Cc]hi2$' or '^[Cc]hi[-_ ][Ss]quare'"]) 317 table.add_row(["Iteration count", "'iter'", "'^[Ii]ter'"]) 318 table.add_row(["Function call count", "'f_count'", "'^[Ff].*[ -_][Cc]ount'"]) 319 table.add_row(["Gradient call count", "'g_count'", "'^[Gg].*[ -_][Cc]ount'"]) 320 table.add_row(["Hessian call count", "'h_count'", "'^[Hh].*[ -_][Cc]ount'"]) 321 return_data_name_doc.add_table(table.label) 322
323 -def return_data_name(name):
324 """Return a unique identifying string for the minimisation parameter. 325 326 @param name: The minimisation parameter. 327 @type name: str 328 @return: The unique parameter identifying string. 329 @rtype: str 330 """ 331 332 # Chi-squared. 333 if search('^[Cc]hi2$', name) or search('^[Cc]hi[-_ ][Ss]quare', name): 334 return 'chi2' 335 336 # Iteration count. 337 if search('^[Ii]ter', name): 338 return 'iter' 339 340 # Function call count. 341 if search('^[Ff].*[ -_][Cc]ount', name): 342 return 'f_count' 343 344 # Gradient call count. 345 if search('^[Gg].*[ -_][Cc]ount', name): 346 return 'g_count' 347 348 # Hessian call count. 349 if search('^[Hh].*[ -_][Cc]ount', name): 350 return 'h_count'
351 352
353 -def return_grace_string(stat_type):
354 """Function for returning the Grace string representing the data type for axis labelling. 355 356 @param stat_type: The name of the statistic to return the Grace string for. 357 @type stat_type: str 358 @return: The Grace string. 359 @rtype: str 360 """ 361 362 # Get the object name. 363 object_name = return_data_name(stat_type) 364 365 # Chi-squared. 366 if object_name == 'chi2': 367 grace_string = '\\xc\\S2' 368 369 # Iteration count. 370 elif object_name == 'iter': 371 grace_string = 'Iteration count' 372 373 # Function call count. 374 elif object_name == 'f_count': 375 grace_string = 'Function call count' 376 377 # Gradient call count. 378 elif object_name == 'g_count': 379 grace_string = 'Gradient call count' 380 381 # Hessian call count. 382 elif object_name == 'h_count': 383 grace_string = 'Hessian call count' 384 385 # Return the Grace string. 386 return grace_string
387 388
389 -def return_units(stat_type):
390 """Dummy function which returns None as the stats have no units. 391 392 @param stat_type: The name of the statistic. This is unused! 393 @type stat_type: str 394 @return: Nothing. 395 @rtype: None 396 """ 397 398 return None
399 400
401 -def return_value(spin=None, stat_type=None, sim=None):
402 """Function for returning the minimisation statistic corresponding to 'stat_type'. 403 404 @param spin: The spin data container if spin specific data is to be reset. 405 @type spin: SpinContainer 406 @param stat_type: The name of the statistic to return the value for. 407 @type stat_type: str 408 @param sim: The index of the simulation to return the value for. If None, then the 409 normal value is returned. 410 @type sim: None or int 411 """ 412 413 # Get the object name. 414 object_name = return_data_name(stat_type) 415 416 # The statistic type does not exist. 417 if not object_name: 418 raise RelaxError("The statistic type " + repr(stat_type) + " does not exist.") 419 420 # The simulation object name. 421 object_sim = object_name + '_sim' 422 423 # Get the global statistic. 424 if spin == None: 425 # Get the statistic. 426 if sim == None: 427 if hasattr(cdp, object_name): 428 stat = getattr(cdp, object_name) 429 else: 430 stat = None 431 432 # Get the simulation statistic. 433 else: 434 if hasattr(cdp, object_sim): 435 stat = getattr(cdp, object_sim)[sim] 436 else: 437 stat = None 438 439 # Residue specific statistic. 440 else: 441 # Get the statistic. 442 if sim == None: 443 if hasattr(spin, object_name): 444 stat = getattr(spin, object_name) 445 else: 446 stat = None 447 448 # Get the simulation statistic. 449 else: 450 if hasattr(spin, object_sim): 451 stat = getattr(spin, object_sim)[sim] 452 else: 453 stat = None 454 455 # Return the statistic (together with None to indicate that there are no errors associated with the statistic). 456 return stat, None
457 458 459 set_doc = """ 460 Minimisation statistic set details 461 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 462 463 This shouldn't really be executed by a user. 464 """ 465
466 -def set(val=None, error=None, param=None, scaling=None, spin_id=None):
467 """Set global or spin specific minimisation parameters. 468 469 @keyword val: The parameter values. 470 @type val: number 471 @keyword param: The parameter names. 472 @type param: str 473 @keyword scaling: Unused. 474 @type scaling: float 475 @keyword spin_id: The spin identification string. 476 @type spin_id: str 477 """ 478 479 # Get the parameter name. 480 param_name = return_data_name(param) 481 482 # Global minimisation stats. 483 if spin_id == None: 484 # Chi-squared. 485 if param_name == 'chi2': 486 cdp.chi2 = val 487 488 # Iteration count. 489 elif param_name == 'iter': 490 cdp.iter = val 491 492 # Function call count. 493 elif param_name == 'f_count': 494 cdp.f_count = val 495 496 # Gradient call count. 497 elif param_name == 'g_count': 498 cdp.g_count = val 499 500 # Hessian call count. 501 elif param_name == 'h_count': 502 cdp.h_count = val 503 504 # Residue specific minimisation. 505 else: 506 # Get the spin. 507 spin = return_spin(spin_id) 508 509 # Chi-squared. 510 if param_name == 'chi2': 511 spin.chi2 = val 512 513 # Iteration count. 514 elif param_name == 'iter': 515 spin.iter = val 516 517 # Function call count. 518 elif param_name == 'f_count': 519 spin.f_count = val 520 521 # Gradient call count. 522 elif param_name == 'g_count': 523 spin.g_count = val 524 525 # Hessian call count. 526 elif param_name == 'h_count': 527 spin.h_count = val
528