Package generic_fns :: Module minimise
[hide private]
[frames] | no frames]

Source Code for Module generic_fns.minimise

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2005 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the program relax.                                     # 
  6  #                                                                             # 
  7  # relax is free software; you can redistribute it and/or modify               # 
  8  # it under the terms of the GNU General Public License as published by        # 
  9  # the Free Software Foundation; either version 2 of the License, or           # 
 10  # (at your option) any later version.                                         # 
 11  #                                                                             # 
 12  # relax is distributed in the hope that it will be useful,                    # 
 13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 15  # GNU General Public License for more details.                                # 
 16  #                                                                             # 
 17  # You should have received a copy of the GNU General Public License           # 
 18  # along with relax; if not, write to the Free Software                        # 
 19  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA   # 
 20  #                                                                             # 
 21  ############################################################################### 
 22   
 23   
 24  from Queue import Queue 
 25  from re import search 
 26   
 27  from processes import RelaxPopen3 
 28  from thread_classes import RelaxParentThread, RelaxThread 
 29   
 30   
31 -class Minimise:
32 - def __init__(self, relax):
33 """Class containing the calc, grid_search, minimise, and set functions.""" 34 35 self.relax = relax
36 37
38 - def calc(self, run=None, print_flag=1):
39 """Function for calculating the function value.""" 40 41 # Test if the run exists. 42 if not run in self.relax.data.run_names: 43 raise RelaxNoRunError, run 44 45 # Function type. 46 function_type = self.relax.data.run_types[self.relax.data.run_names.index(run)] 47 48 # Specific calculate function setup. 49 calculate = self.relax.specific_setup.setup('calculate', function_type) 50 51 # Monte Carlo simulation calculation. 52 if hasattr(self.relax.data, 'sim_state') and self.relax.data.sim_state.has_key(run) and self.relax.data.sim_state[run] == 1: 53 # Loop over the simulations. 54 for i in xrange(self.relax.data.sim_number[run]): 55 if print_flag: 56 print "Simulation " + `i+1` 57 calculate(run=run, print_flag=print_flag-1, sim_index=i) 58 59 # Minimisation. 60 else: 61 calculate(run=run, print_flag=print_flag)
62 63
64 - def grid_search(self, run=None, lower=None, upper=None, inc=None, constraints=1, print_flag=1):
65 """The grid search function.""" 66 67 # Test if the run exists. 68 if not run in self.relax.data.run_names: 69 raise RelaxNoRunError, run 70 71 # Function type. 72 function_type = self.relax.data.run_types[self.relax.data.run_names.index(run)] 73 74 # Specific grid search function. 75 grid_search = self.relax.specific_setup.setup('grid_search', function_type) 76 77 # Monte Carlo simulation grid search. 78 if hasattr(self.relax.data, 'sim_state') and self.relax.data.sim_state.has_key(run) and self.relax.data.sim_state[run] == 1: 79 # Loop over the simulations. 80 for i in xrange(self.relax.data.sim_number[run]): 81 if print_flag: 82 print "Simulation " + `i+1` 83 grid_search(run=run, lower=lower, upper=upper, inc=inc, constraints=constraints, print_flag=print_flag-1, sim_index=i) 84 85 # Grid search. 86 else: 87 grid_search(run=run, lower=lower, upper=upper, inc=inc, constraints=constraints, print_flag=print_flag)
88 89
90 - def minimise(self, run=None, min_algor=None, min_options=None, func_tol=None, grad_tol=None, max_iterations=None, constraints=1, scaling=1, print_flag=1, sim_index=None):
91 """Minimisation function.""" 92 93 # Test if the run exists. 94 if not run in self.relax.data.run_names: 95 raise RelaxNoRunError, run 96 97 # Function type. 98 function_type = self.relax.data.run_types[self.relax.data.run_names.index(run)] 99 100 # Specific minimisation function. 101 minimise = self.relax.specific_setup.setup('minimise', function_type) 102 103 # Single Monte Carlo simulation. 104 if sim_index != None: 105 minimise(run=run, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iterations, constraints=constraints, scaling=scaling, print_flag=print_flag, sim_index=sim_index) 106 107 # Monte Carlo simulation minimisation. 108 elif hasattr(self.relax.data, 'sim_state') and self.relax.data.sim_state.has_key(run) and self.relax.data.sim_state[run] == 1: 109 # Threaded minimisation of simulations. 110 if self.relax.thread_data.status: 111 # Print out. 112 print "Threaded minimisation of Monte Carlo simulations.\n" 113 114 # Run the main threading loop. 115 RelaxMinParentThread(self.relax, run, min_algor, min_options, func_tol, grad_tol, max_iterations, constraints, scaling, print_flag) 116 117 # Non-threaded minimisation of simulations. 118 else: 119 for i in xrange(self.relax.data.sim_number[run]): 120 if print_flag: 121 print "Simulation " + `i+1` 122 minimise(run=run, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iterations, constraints=constraints, scaling=scaling, print_flag=print_flag-1, sim_index=i) 123 124 # Standard minimisation. 125 else: 126 minimise(run=run, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, max_iterations=max_iterations, constraints=constraints, scaling=scaling, print_flag=print_flag)
127 128
129 - def reset_min_stats(self, run, index=None):
130 """Function for resetting the minimisation statistics.""" 131 132 # Arguments. 133 self.run = run 134 135 # Global minimisation statistics. 136 if index == None: 137 # Chi-squared. 138 if hasattr(self.relax.data, 'chi2') and self.relax.data.chi2.has_key(self.run): 139 self.relax.data.chi2[self.run] = None 140 141 # Iteration count. 142 if hasattr(self.relax.data, 'iter') and self.relax.data.iter.has_key(self.run): 143 self.relax.data.iter[self.run] = None 144 145 # Function count. 146 if hasattr(self.relax.data, 'f_count') and self.relax.data.f_count.has_key(self.run): 147 self.relax.data.f_count[self.run] = None 148 149 # Gradient count. 150 if hasattr(self.relax.data, 'g_count') and self.relax.data.g_count.has_key(self.run): 151 self.relax.data.g_count[self.run] = None 152 153 # Hessian count. 154 if hasattr(self.relax.data, 'h_count') and self.relax.data.h_count.has_key(self.run): 155 self.relax.data.h_count[self.run] = None 156 157 # Warning. 158 if hasattr(self.relax.data, 'warning') and self.relax.data.warning.has_key(self.run): 159 self.relax.data.warning[self.run] = None 160 161 # Sequence specific minimisation statistics. 162 else: 163 # Chi-squared. 164 if hasattr(self.relax.data.res[self.run][index], 'chi2'): 165 self.relax.data.res[self.run][index].chi2 = None 166 167 # Iteration count. 168 if hasattr(self.relax.data.res[self.run][index], 'iter'): 169 self.relax.data.res[self.run][index].iter = None 170 171 # Function count. 172 if hasattr(self.relax.data.res[self.run][index], 'f_count'): 173 self.relax.data.res[self.run][index].f_count = None 174 175 # Gradient count. 176 if hasattr(self.relax.data.res[self.run][index], 'g_count'): 177 self.relax.data.res[self.run][index].g_count = None 178 179 # Hessian count. 180 if hasattr(self.relax.data.res[self.run][index], 'h_count'): 181 self.relax.data.res[self.run][index].h_count = None 182 183 # Warning. 184 if hasattr(self.relax.data.res[self.run][index], 'warning'): 185 self.relax.data.res[self.run][index].warning = None
186 187
188 - def return_conversion_factor(self, stat_type):
189 """Dummy function for returning 1.0.""" 190 191 return 1.0
192 193
194 - def return_data_name(self, name):
195 """ 196 Minimisation statistic data type string matching patterns 197 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 198 199 ____________________________________________________________________________________________ 200 | | | | 201 | Data type | Object name | Patterns | 202 |________________________|______________|__________________________________________________| 203 | | | | 204 | Chi-squared statistic | 'chi2' | '^[Cc]hi2$' or '^[Cc]hi[-_ ][Ss]quare' | 205 | | | | 206 | Iteration count | 'iter' | '^[Ii]ter' | 207 | | | | 208 | Function call count | 'f_count' | '^[Ff].*[ -_][Cc]ount' | 209 | | | | 210 | Gradient call count | 'g_count' | '^[Gg].*[ -_][Cc]ount' | 211 | | | | 212 | Hessian call count | 'h_count' | '^[Hh].*[ -_][Cc]ount' | 213 |________________________|______________|__________________________________________________| 214 215 """ 216 217 # Chi-squared. 218 if search('^[Cc]hi2$', name) or search('^[Cc]hi[-_ ][Ss]quare', name): 219 return 'chi2' 220 221 # Iteration count. 222 if search('^[Ii]ter', name): 223 return 'iter' 224 225 # Function call count. 226 if search('^[Ff].*[ -_][Cc]ount', name): 227 return 'f_count' 228 229 # Gradient call count. 230 if search('^[Gg].*[ -_][Cc]ount', name): 231 return 'g_count' 232 233 # Hessian call count. 234 if search('^[Hh].*[ -_][Cc]ount', name): 235 return 'h_count'
236 237
238 - def return_grace_string(self, stat_type):
239 """Function for returning the Grace string representing the data type for axis labelling.""" 240 241 # Get the object name. 242 object_name = self.return_data_name(stat_type) 243 244 # Chi-squared. 245 if object_name == 'chi2': 246 grace_string = '\\xc\\S2' 247 248 # Iteration count. 249 elif object_name == 'iter': 250 grace_string = 'Iteration count' 251 252 # Function call count. 253 elif object_name == 'f_count': 254 grace_string = 'Function call count' 255 256 # Gradient call count. 257 elif object_name == 'g_count': 258 grace_string = 'Gradient call count' 259 260 # Hessian call count. 261 elif object_name == 'h_count': 262 grace_string = 'Hessian call count' 263 264 # Return the Grace string. 265 return grace_string
266 267
268 - def return_units(self, stat_type):
269 """Dummy function which returns None as the stats have no units.""" 270 271 return None
272 273
274 - def return_value(self, run, index=None, stat_type=None, sim=None):
275 """Function for returning the minimisation statistic corresponding to 'stat_type'.""" 276 277 # Arguments. 278 self.run = run 279 280 # Get the object name. 281 object_name = self.return_data_name(stat_type) 282 283 # The statistic type does not exist. 284 if not object_name: 285 raise RelaxError, "The statistic type " + `stat_type` + " does not exist." 286 287 # The simulation object name. 288 object_sim = object_name + '_sim' 289 290 # Get the global statistic. 291 if index == None: 292 # Get the statistic. 293 if sim == None: 294 if hasattr(self.relax.data, object_name) and getattr(self.relax.data.res[self.run][index], object_name).has_key(self.run): 295 stat = getattr(self.relax.data, object_name)[self.run] 296 else: 297 stat = None 298 299 # Get the simulation statistic. 300 else: 301 if hasattr(self.relax.data, object_sim) and getattr(self.relax.data.res[self.run][index], object_sim).has_key(self.run): 302 stat = getattr(self.relax.data, object_sim)[self.run][sim] 303 else: 304 stat = None 305 306 # Residue specific statistic. 307 else: 308 # Get the statistic. 309 if sim == None: 310 if hasattr(self.relax.data.res[self.run][index], object_name): 311 stat = getattr(self.relax.data.res[self.run][index], object_name) 312 else: 313 stat = None 314 315 # Get the simulation statistic. 316 else: 317 if hasattr(self.relax.data.res[self.run][index], object_sim): 318 stat = getattr(self.relax.data.res[self.run][index], object_sim)[sim] 319 else: 320 stat = None 321 322 # Return the statistic (together with None to indicate that there are no errors associated with the statistic). 323 return stat, None
324 325
326 - def set(self, run=None, value=None, error=None, param=None, scaling=None, index=None):
327 """ 328 Minimisation statistic set details 329 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 330 331 This shouldn't really be executed by a user. 332 """ 333 334 # Arguments. 335 self.run = run 336 337 # Get the parameter name. 338 param_name = self.return_data_name(param) 339 340 # Global minimisation stats. 341 if index == None: 342 # Chi-squared. 343 if param_name == 'chi2': 344 self.relax.data.chi2[self.run] = value 345 346 # Iteration count. 347 elif param_name == 'iter': 348 self.relax.data.iter[self.run] = value 349 350 # Function call count. 351 elif param_name == 'f_count': 352 self.relax.data.f_count[self.run] = value 353 354 # Gradient call count. 355 elif param_name == 'g_count': 356 self.relax.data.g_count[self.run] = value 357 358 # Hessian call count. 359 elif param_name == 'h_count': 360 self.relax.data.h_count[self.run] = value 361 362 # Residue specific minimisation. 363 else: 364 # Chi-squared. 365 if param_name == 'chi2': 366 self.relax.data.res[self.run][index].chi2 = value 367 368 # Iteration count. 369 elif param_name == 'iter': 370 self.relax.data.res[self.run][index].iter = value 371 372 # Function call count. 373 elif param_name == 'f_count': 374 self.relax.data.res[self.run][index].f_count = value 375 376 # Gradient call count. 377 elif param_name == 'g_count': 378 self.relax.data.res[self.run][index].g_count = value 379 380 # Hessian call count. 381 elif param_name == 'h_count': 382 self.relax.data.res[self.run][index].h_count = value
383 384 385 386 # Main threading loop for the minimisation of Monte Carlo simulations. 387 ###################################################################### 388
389 -class RelaxMinParentThread(RelaxParentThread):
390 - def __init__(self, relax, parent_run, *min_args):
391 """Initialisation of the Monte Carlo simulation minimisation parent thread.""" 392 393 # Arguments. 394 self.relax = relax 395 self.parent_run = parent_run 396 self.min_args = min_args 397 398 # Run the RelaxParentThread __init__ function. 399 RelaxParentThread.__init__(self) 400 401 # The number of jobs. 402 self.num_jobs = self.relax.data.sim_number[self.parent_run] 403 404 # Run the main loop. 405 self.run()
406 407
408 - def thread_object(self, i):
409 """Function for returning an initialised thread object.""" 410 411 # Return the thread object. 412 return RelaxMinimiseThread(self.relax, i, self.job_queue, self.results_queue, self.finished_jobs, self.job_locks, self.tag, self.parent_run, self.min_args)
413 414 415 416 # Threads for the minimisation of Monte Carlo simulations. 417 ########################################################## 418
419 -class RelaxMinimiseThread(RelaxThread):
420 - def __init__(self, relax, i, job_queue, results_queue, finished_jobs, job_locks, tag, parent_run, min_args):
421 """Initialisation of the thread.""" 422 423 # Arguments. 424 self.relax = relax 425 self.tag = tag 426 self.parent_run = parent_run 427 self.min_args = min_args 428 429 # Run the RelaxThread __init__ function (this is 'asserted' by the Thread class). 430 RelaxThread.__init__(self, i, job_queue, results_queue, finished_jobs, job_locks) 431 432 # Expand the minimisation arguments. 433 self.min_algor, self.min_options, self.func_tol, self.grad_tol, self.max_iterations, self.constraints, self.scaling, self.print_flag = self.min_args
434 435
436 - def generate_script(self):
437 """Function for generating the script for the thread to minimise sim `sim`.""" 438 439 # Function array. 440 fn = [] 441 442 # Function: Load the program state. 443 fn.append("self.relax.generic.state.load(file='%s')" % self.save_state_file) 444 445 # Function: Minimise. 446 fn.append("self.relax.generic.minimise.minimise(run='%s', min_algor='%s', min_options=%s, func_tol=%s, grad_tol=%s, max_iterations=%s, constraints=%s, scaling=%s, print_flag=%s, sim_index=%s)" % (self.parent_run, self.min_algor, self.min_options, self.func_tol, self.grad_tol, self.max_iterations, self.constraints, self.scaling, self.print_flag, self.job_number)) 447 448 # Function: Turn logging off. This is so that the results can come back through the child's stdout pipe. 449 fn.append("self.relax.IO.logging_off()") 450 451 # Generate the main text of the script file. 452 text = '' 453 for i in xrange(len(fn)): 454 text = text + "\nprint \"\\n" + fn[i] + "\"\n" 455 text = text + fn[i] + "\n" 456 457 # Function: Write the results to stdout. 458 text = text + "self.relax.generic.results.display(run='%s')\n" % (self.parent_run) 459 460 # Cat the text into the script file. 461 cmd = "cat > %s" % self.script_file 462 cmd = self.remote_command(cmd=cmd, login_cmd=self.login_cmd) 463 464 # Start the child process. 465 self.child = RelaxPopen3(cmd, capturestderr=1) 466 467 # Write the text to the child's stdin, then close it. 468 self.child.tochild.write(text) 469 self.child.tochild.close() 470 471 # Catch errors. 472 err = self.child.childerr.readlines() 473 474 # Close all pipes. 475 self.child.fromchild.close() 476 self.child.childerr.close() 477 478 # The file could not be copied. 479 if len(err): 480 raise RelaxError, "The command `%s` could not be executed." % cmd
481 482
483 - def post_locked_code(self):
484 """Code to run after locking the job.""" 485 486 # Create a run in the parent to temporarily store the data prior to copying into the main run. 487 self.relax.generic.runs.create(run=self.thread_run, run_type=self.relax.data.run_types[self.relax.data.run_names.index(self.parent_run)]) 488 489 # Read the data into the run. 490 self.relax.generic.results.read(run=self.thread_run, file_data=self.results, print_flag=0) 491 492 # Copy the results from the thread run to the parent run. 493 self.relax.generic.results.copy(run1=self.thread_run, run2=self.parent_run, sim=self.job_number) 494 495 # Delete the thread run. 496 self.relax.generic.runs.delete(self.thread_run) 497 498 # Print out. 499 print "Simulation: " + `self.job_number`
500