Package pipe_control :: Module minimise
[hide private]
[frames] | no frames]

Source Code for Module pipe_control.minimise

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2013 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the program relax (http://www.nmr-relax.com).          # 
  6  #                                                                             # 
  7  # This program is free software: you can redistribute it and/or modify        # 
  8  # it under the terms of the GNU General Public License as published by        # 
  9  # the Free Software Foundation, either version 3 of the License, or           # 
 10  # (at your option) any later version.                                         # 
 11  #                                                                             # 
 12  # This program is distributed in the hope that it will be useful,             # 
 13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 15  # GNU General Public License for more details.                                # 
 16  #                                                                             # 
 17  # You should have received a copy of the GNU General Public License           # 
 18  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 19  #                                                                             # 
 20  ############################################################################### 
 21   
 22  # Module docstring. 
 23  """Module for model minimisation/optimisation.""" 
 24   
 25  # Python module imports. 
 26  from re import search 
 27   
 28  # relax module imports. 
 29  from lib.errors import RelaxError 
 30  from multi import Processor_box 
 31  from pipe_control.mol_res_spin import return_spin, spin_loop 
 32  from pipe_control import pipes 
 33  import specific_analyses 
 34  from status import Status; status = Status() 
 35  from user_functions.data import Uf_tables; uf_tables = Uf_tables() 
 36  from user_functions.objects import Desc_container 
 37   
 38   
39 -def calc(verbosity=1):
40 """Function for calculating the function value. 41 42 @param verbosity: The amount of information to print. The higher the value, the greater 43 the verbosity. 44 @type verbosity: int 45 """ 46 47 # Test if the current data pipe exists. 48 pipes.test() 49 50 # Specific calculate function setup. 51 calculate = specific_analyses.setup.get_specific_fn('calculate', cdp.pipe_type) 52 overfit_deselect = specific_analyses.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 53 54 # Deselect spins lacking data: 55 overfit_deselect() 56 57 # Get the Processor box singleton (it contains the Processor instance) and alias the Processor. 58 processor_box = Processor_box() 59 processor = processor_box.processor 60 61 # Monte Carlo simulation calculation. 62 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 63 # Loop over the simulations. 64 for i in range(cdp.sim_number): 65 # Status. 66 if status.current_analysis: 67 status.auto_analysis[status.current_analysis].mc_number = i 68 else: 69 status.mc_number = i 70 71 # Calculation. 72 calculate(verbosity=verbosity-1, sim_index=i) 73 74 # Print out. 75 if verbosity and not processor.is_queued(): 76 print("Simulation " + repr(i+1)) 77 78 # Unset the status. 79 if status.current_analysis: 80 status.auto_analysis[status.current_analysis].mc_number = None 81 else: 82 status.mc_number = None 83 84 # Minimisation. 85 else: 86 calculate(verbosity=verbosity) 87 88 # Execute any queued commands. 89 processor.run_queue()
90 91
92 -def grid_search(lower=None, upper=None, inc=None, constraints=True, verbosity=1):
93 """The grid search function. 94 95 @param lower: The lower bounds of the grid search which must be equal to the number of 96 parameters in the model. 97 @type lower: array of numbers 98 @param upper: The upper bounds of the grid search which must be equal to the number of 99 parameters in the model. 100 @type upper: array of numbers 101 @param inc: The increments for each dimension of the space for the grid search. The 102 number of elements in the array must equal to the number of parameters in 103 the model. 104 @type inc: array of int 105 @param constraints: If True, constraints are applied during the grid search (elinating parts of 106 the grid). If False, no constraints are used. 107 @type constraints: bool 108 @param verbosity: The amount of information to print. The higher the value, the greater 109 the verbosity. 110 @type verbosity: int 111 """ 112 113 # Test if the current data pipe exists. 114 pipes.test() 115 116 # Specific grid search function. 117 grid_search = specific_analyses.setup.get_specific_fn('grid_search', cdp.pipe_type) 118 overfit_deselect = specific_analyses.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 119 120 # Deselect spins lacking data: 121 overfit_deselect() 122 123 # Get the Processor box singleton (it contains the Processor instance) and alias the Processor. 124 processor_box = Processor_box() 125 processor = processor_box.processor 126 127 # Monte Carlo simulation grid search. 128 if hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 129 # Loop over the simulations. 130 for i in range(cdp.sim_number): 131 # Status. 132 if status.current_analysis: 133 status.auto_analysis[status.current_analysis].mc_number = i 134 else: 135 status.mc_number = i 136 137 # Optimisation. 138 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity-1, sim_index=i) 139 140 # Print out. 141 if verbosity and not processor.is_queued(): 142 print("Simulation " + repr(i+1)) 143 144 # Unset the status. 145 if status.current_analysis: 146 status.auto_analysis[status.current_analysis].mc_number = None 147 else: 148 status.mc_number = None 149 150 # Grid search. 151 else: 152 grid_search(lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity) 153 154 # Execute any queued commands. 155 processor.run_queue()
156 157
158 -def minimise(min_algor=None, line_search=None, hessian_mod=None, hessian_type=None, func_tol=None, grad_tol=None, max_iter=None, constraints=True, scaling=True, verbosity=1, sim_index=None):
159 """Minimisation function. 160 161 @keyword min_algor: The minimisation algorithm to use. 162 @type min_algor: str 163 @keyword line_search: The line search algorithm which will only be used in combination with the line search and conjugate gradient methods. This will default to the More and Thuente line search. 164 @type line_search: str or None 165 @keyword hessian_mod: The Hessian modification. This will only be used in the algorithms which use the Hessian, and defaults to Gill, Murray, and Wright modified Cholesky algorithm. 166 @type hessian_mod: str or None 167 @keyword hessian_type: The Hessian type. This will only be used in a few trust region algorithms, and defaults to BFGS. 168 @type hessian_type: str or None 169 @keyword func_tol: The function tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 170 @type func_tol: None or float 171 @keyword grad_tol: The gradient tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 172 @type grad_tol: None or float 173 @keyword max_iter: The maximum number of iterations for the algorithm. 174 @type max_iter: int 175 @keyword constraints: If True, constraints are used during optimisation. 176 @type constraints: bool 177 @keyword scaling: If True, diagonal scaling is enabled during optimisation to allow the problem to be better conditioned. 178 @type scaling: bool 179 @keyword verbosity: The amount of information to print. The higher the value, the greater the verbosity. 180 @type verbosity: int 181 @keyword sim_index: The index of the simulation to optimise. This should be None if normal optimisation is desired. 182 @type sim_index: None or int 183 """ 184 185 # Test if the current data pipe exists. 186 pipes.test() 187 188 # Re-package the minimisation algorithm, options, and constraints for the generic_minimise() calls within the specific code. 189 if constraints: 190 min_options = [min_algor] 191 192 # Determine the constraint algorithm to use. 193 fn = specific_analyses.setup.get_specific_fn('constraint_algorithm', cdp.pipe_type) 194 min_algor = fn() 195 else: 196 min_options = [] 197 if line_search != None: 198 min_options.append(line_search) 199 if hessian_mod != None: 200 min_options.append(hessian_mod) 201 if hessian_type != None: 202 min_options.append(hessian_type) 203 min_options = tuple(min_options) 204 205 # Specific minimisation function. 206 minimise = specific_analyses.setup.get_specific_fn('minimise', cdp.pipe_type) 207 overfit_deselect = specific_analyses.setup.get_specific_fn('overfit_deselect', cdp.pipe_type) 208 209 # Deselect spins lacking data: 210 overfit_deselect() 211 212 # Get the Processor box singleton (it contains the Processor instance) and alias the Processor. 213 processor_box = Processor_box() 214 processor = processor_box.processor 215 216 # Single Monte Carlo simulation. 217 if sim_index != None: 218 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity, sim_index=sim_index) 219 220 # Monte Carlo simulation minimisation. 221 elif hasattr(cdp, 'sim_state') and cdp.sim_state == 1: 222 for i in range(cdp.sim_number): 223 # Status. 224 if status.current_analysis: 225 status.auto_analysis[status.current_analysis].mc_number = i 226 else: 227 status.mc_number = i 228 229 # Optimisation. 230 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity-1, sim_index=i) 231 232 # Print out. 233 if verbosity and not processor.is_queued(): 234 print("Simulation " + repr(i+1)) 235 236 # Unset the status. 237 if status.current_analysis: 238 status.auto_analysis[status.current_analysis].mc_number = None 239 else: 240 status.mc_number = None 241 242 # Standard minimisation. 243 else: 244 minimise(min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iter, constraints=constraints, scaling=scaling, verbosity=verbosity) 245 246 # Execute any queued commands. 247 processor.run_queue()
248 249
250 -def reset_min_stats(data_pipe=None, spin=None):
251 """Function for resetting the minimisation statistics. 252 253 @param data_pipe: The name of the data pipe to reset the minimisation statisics of. This 254 defaults to the current data pipe. 255 @type data_pipe: str 256 @param spin: The spin data container if spin specific data is to be reset. 257 @type spin: SpinContainer 258 """ 259 260 # The data pipe. 261 if data_pipe == None: 262 data_pipe = pipes.cdp_name() 263 264 # Get the data pipe. 265 dp = pipes.get_pipe(data_pipe) 266 267 268 # Global minimisation statistics. 269 ################################# 270 271 # Chi-squared. 272 if hasattr(dp, 'chi2'): 273 dp.chi2 = None 274 275 # Iteration count. 276 if hasattr(dp, 'iter'): 277 dp.iter = None 278 279 # Function count. 280 if hasattr(dp, 'f_count'): 281 dp.f_count = None 282 283 # Gradient count. 284 if hasattr(dp, 'g_count'): 285 dp.g_count = None 286 287 # Hessian count. 288 if hasattr(dp, 'h_count'): 289 dp.h_count = None 290 291 # Warning. 292 if hasattr(dp, 'warning'): 293 dp.warning = None 294 295 296 # Sequence specific minimisation statistics. 297 ############################################ 298 299 # Loop over all spins. 300 for spin in spin_loop(): 301 # Chi-squared. 302 if hasattr(spin, 'chi2'): 303 spin.chi2 = None 304 305 # Iteration count. 306 if hasattr(spin, 'iter'): 307 spin.iter = None 308 309 # Function count. 310 if hasattr(spin, 'f_count'): 311 spin.f_count = None 312 313 # Gradient count. 314 if hasattr(spin, 'g_count'): 315 spin.g_count = None 316 317 # Hessian count. 318 if hasattr(spin, 'h_count'): 319 spin.h_count = None 320 321 # Warning. 322 if hasattr(spin, 'warning'): 323 spin.warning = None
324 325
326 -def return_conversion_factor(stat_type):
327 """Dummy function for returning 1.0. 328 329 @param stat_type: The name of the statistic. This is unused! 330 @type stat_type: str 331 @return: A conversion factor of 1.0. 332 @rtype: float 333 """ 334 335 return 1.0
336 337 338 return_data_name_doc = Desc_container("Minimisation statistic data type string matching patterns") 339 table = uf_tables.add_table(label="table: min data type patterns", caption="Minimisation statistic data type string matching patterns.") 340 table.add_headings(["Data type", "Object name", "Patterns"]) 341 table.add_row(["Chi-squared statistic", "'chi2'", "'^[Cc]hi2$' or '^[Cc]hi[-_ ][Ss]quare'"]) 342 table.add_row(["Iteration count", "'iter'", "'^[Ii]ter'"]) 343 table.add_row(["Function call count", "'f_count'", "'^[Ff].*[ -_][Cc]ount'"]) 344 table.add_row(["Gradient call count", "'g_count'", "'^[Gg].*[ -_][Cc]ount'"]) 345 table.add_row(["Hessian call count", "'h_count'", "'^[Hh].*[ -_][Cc]ount'"]) 346 return_data_name_doc.add_table(table.label) 347
348 -def return_data_name(name):
349 """Return a unique identifying string for the minimisation parameter. 350 351 @param name: The minimisation parameter. 352 @type name: str 353 @return: The unique parameter identifying string. 354 @rtype: str 355 """ 356 357 # Chi-squared. 358 if search('^[Cc]hi2$', name) or search('^[Cc]hi[-_ ][Ss]quare', name): 359 return 'chi2' 360 361 # Iteration count. 362 if search('^[Ii]ter', name): 363 return 'iter' 364 365 # Function call count. 366 if search('^[Ff].*[ -_][Cc]ount', name): 367 return 'f_count' 368 369 # Gradient call count. 370 if search('^[Gg].*[ -_][Cc]ount', name): 371 return 'g_count' 372 373 # Hessian call count. 374 if search('^[Hh].*[ -_][Cc]ount', name): 375 return 'h_count'
376 377
378 -def return_grace_string(stat_type):
379 """Function for returning the Grace string representing the data type for axis labelling. 380 381 @param stat_type: The name of the statistic to return the Grace string for. 382 @type stat_type: str 383 @return: The Grace string. 384 @rtype: str 385 """ 386 387 # Get the object name. 388 object_name = return_data_name(stat_type) 389 390 # Chi-squared. 391 if object_name == 'chi2': 392 grace_string = '\\xc\\S2' 393 394 # Iteration count. 395 elif object_name == 'iter': 396 grace_string = 'Iteration count' 397 398 # Function call count. 399 elif object_name == 'f_count': 400 grace_string = 'Function call count' 401 402 # Gradient call count. 403 elif object_name == 'g_count': 404 grace_string = 'Gradient call count' 405 406 # Hessian call count. 407 elif object_name == 'h_count': 408 grace_string = 'Hessian call count' 409 410 # Return the Grace string. 411 return grace_string
412 413
414 -def return_units(stat_type):
415 """Dummy function which returns None as the stats have no units. 416 417 @param stat_type: The name of the statistic. This is unused! 418 @type stat_type: str 419 @return: Nothing. 420 @rtype: None 421 """ 422 423 return None
424 425
426 -def return_value(spin=None, stat_type=None, sim=None):
427 """Function for returning the minimisation statistic corresponding to 'stat_type'. 428 429 @param spin: The spin data container if spin specific data is to be reset. 430 @type spin: SpinContainer 431 @param stat_type: The name of the statistic to return the value for. 432 @type stat_type: str 433 @param sim: The index of the simulation to return the value for. If None, then the 434 normal value is returned. 435 @type sim: None or int 436 """ 437 438 # Get the object name. 439 object_name = return_data_name(stat_type) 440 441 # The statistic type does not exist. 442 if not object_name: 443 raise RelaxError("The statistic type " + repr(stat_type) + " does not exist.") 444 445 # The simulation object name. 446 object_sim = object_name + '_sim' 447 448 # Get the global statistic. 449 if spin == None: 450 # Get the statistic. 451 if sim == None: 452 if hasattr(cdp, object_name): 453 stat = getattr(cdp, object_name) 454 else: 455 stat = None 456 457 # Get the simulation statistic. 458 else: 459 if hasattr(cdp, object_sim): 460 stat = getattr(cdp, object_sim)[sim] 461 else: 462 stat = None 463 464 # Residue specific statistic. 465 else: 466 # Get the statistic. 467 if sim == None: 468 if hasattr(spin, object_name): 469 stat = getattr(spin, object_name) 470 else: 471 stat = None 472 473 # Get the simulation statistic. 474 else: 475 if hasattr(spin, object_sim): 476 stat = getattr(spin, object_sim)[sim] 477 else: 478 stat = None 479 480 # Return the statistic (together with None to indicate that there are no errors associated with the statistic). 481 return stat, None
482 483 484 set_doc = """ 485 Minimisation statistic set details 486 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 487 488 This shouldn't really be executed by a user. 489 """ 490
491 -def set(val=None, error=None, param=None, scaling=None, spin_id=None):
492 """Set global or spin specific minimisation parameters. 493 494 @keyword val: The parameter values. 495 @type val: number 496 @keyword param: The parameter names. 497 @type param: str 498 @keyword scaling: Unused. 499 @type scaling: float 500 @keyword spin_id: The spin identification string. 501 @type spin_id: str 502 """ 503 504 # Get the parameter name. 505 param_name = return_data_name(param) 506 507 # Global minimisation stats. 508 if spin_id == None: 509 # Chi-squared. 510 if param_name == 'chi2': 511 cdp.chi2 = val 512 513 # Iteration count. 514 elif param_name == 'iter': 515 cdp.iter = val 516 517 # Function call count. 518 elif param_name == 'f_count': 519 cdp.f_count = val 520 521 # Gradient call count. 522 elif param_name == 'g_count': 523 cdp.g_count = val 524 525 # Hessian call count. 526 elif param_name == 'h_count': 527 cdp.h_count = val 528 529 # Residue specific minimisation. 530 else: 531 # Get the spin. 532 spin = return_spin(spin_id) 533 534 # Chi-squared. 535 if param_name == 'chi2': 536 spin.chi2 = val 537 538 # Iteration count. 539 elif param_name == 'iter': 540 spin.iter = val 541 542 # Function call count. 543 elif param_name == 'f_count': 544 spin.f_count = val 545 546 # Gradient call count. 547 elif param_name == 'g_count': 548 spin.g_count = val 549 550 # Hessian call count. 551 elif param_name == 'h_count': 552 spin.h_count = val
553