Package specific_fns :: Module relax_fit
[hide private]
[frames] | no frames]

Source Code for Module specific_fns.relax_fit

   1  ############################################################################### 
   2  #                                                                             # 
   3  # Copyright (C) 2004-2006 Edward d'Auvergne                                   # 
   4  #                                                                             # 
   5  # This file is part of the program relax.                                     # 
   6  #                                                                             # 
   7  # relax is free software; you can redistribute it and/or modify               # 
   8  # it under the terms of the GNU General Public License as published by        # 
   9  # the Free Software Foundation; either version 2 of the License, or           # 
  10  # (at your option) any later version.                                         # 
  11  #                                                                             # 
  12  # relax is distributed in the hope that it will be useful,                    # 
  13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
  14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
  15  # GNU General Public License for more details.                                # 
  16  #                                                                             # 
  17  # You should have received a copy of the GNU General Public License           # 
  18  # along with relax; if not, write to the Free Software                        # 
  19  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA   # 
  20  #                                                                             # 
  21  ############################################################################### 
  22   
  23  import __builtin__ 
  24  from LinearAlgebra import inverse 
  25  from math import sqrt 
  26  from Numeric import Float64, array, average, identity, matrixmultiply, zeros 
  27  from re import match, search 
  28  import sys 
  29   
  30  from base_class import Common_functions 
  31  from minimise.generic import generic_minimise 
  32   
  33  # C modules. 
  34  try: 
  35      from maths_fns.relax_fit import setup, func, dfunc, d2func, back_calc_I 
  36  except ImportError: 
  37      sys.stderr.write("\nImportError: relaxation curve fitting is unavailible, try compiling the C modules.\n") 
  38      __builtin__.C_module_exp_fn = 0 
  39  else: 
  40      __builtin__.C_module_exp_fn = 1 
  41   
  42   
43 -class Relax_fit(Common_functions):
44 - def __init__(self, relax):
45 """Class containing functions for relaxation data.""" 46 47 self.relax = relax
48 49
50 - def assemble_param_vector(self, index=None, sim_index=None):
51 """Function for assembling various pieces of data into a Numeric parameter array.""" 52 53 # Initialise. 54 param_vector = [] 55 56 # Alias the residue specific data structure. 57 data = self.relax.data.res[self.run][index] 58 59 # Loop over the model parameters. 60 for i in xrange(len(data.params)): 61 # Relaxation rate. 62 if data.params[i] == 'Rx': 63 if sim_index != None: 64 param_vector.append(data.rx_sim[sim_index]) 65 elif data.rx == None: 66 param_vector.append(0.0) 67 else: 68 param_vector.append(data.rx) 69 70 # Initial intensity. 71 elif data.params[i] == 'I0': 72 if sim_index != None: 73 param_vector.append(data.i0_sim[sim_index]) 74 elif data.i0 == None: 75 param_vector.append(0.0) 76 else: 77 param_vector.append(data.i0) 78 79 # Intensity at infinity. 80 elif data.params[i] == 'Iinf': 81 if sim_index != None: 82 param_vector.append(data.iinf_sim[sim_index]) 83 elif data.iinf == None: 84 param_vector.append(0.0) 85 else: 86 param_vector.append(data.iinf) 87 88 # Return a Numeric array. 89 return array(param_vector, Float64)
90 91
92 - def assemble_scaling_matrix(self, index=None, scaling=1):
93 """Function for creating the scaling matrix.""" 94 95 # Initialise. 96 self.scaling_matrix = identity(len(self.param_vector), Float64) 97 i = 0 98 99 # No diagonal scaling. 100 if not scaling: 101 return 102 103 # Alias the residue specific data structure. 104 data = self.relax.data.res[self.run][index] 105 106 # Loop over the parameters. 107 for i in xrange(len(data.params)): 108 # Relaxation rate. 109 if data.params[i] == 'Rx': 110 pass 111 112 # Intensity scaling. 113 elif search('^i', data.params[i]): 114 # Find the position of the first time point. 115 pos = self.relax.data.relax_times[self.run].index(min(self.relax.data.relax_times[self.run])) 116 117 # Scaling. 118 self.scaling_matrix[i, i] = 1.0 / average(data.intensities[pos]) 119 120 # Increment i. 121 i = i + 1
122 123
124 - def assign_function(self, run=None, i=None, intensity=None):
125 """Function for assigning peak intensity data to either the reference or saturated spectra.""" 126 127 # Alias the residue specific data structure. 128 data = self.relax.data.res[run][i] 129 130 # Initialise. 131 index = None 132 if not hasattr(data, 'intensities'): 133 data.intensities = [] 134 135 # Determine if the relaxation time already exists for the residue (duplicated spectra). 136 for i in xrange(len(self.relax.data.relax_times[self.run])): 137 if self.relax_time == self.relax.data.relax_times[self.run][i]: 138 index = i 139 140 # A new relaxation time has been encountered. 141 if index >= len(data.intensities): 142 data.intensities.append([intensity]) 143 144 # Duplicated spectra. 145 else: 146 data.intensities[index].append(intensity)
147 148
149 - def back_calc(self, run=None, index=None, relax_time_index=None):
150 """Back-calculation of peak intensity for the given relaxation time.""" 151 152 # Run argument. 153 self.run = run 154 155 # Alias the residue specific data structure. 156 data = self.relax.data.res[self.run][index] 157 158 # Create the initial parameter vector. 159 self.param_vector = self.assemble_param_vector(index=index) 160 161 # Initialise the relaxation fit functions. 162 setup(num_params=len(data.params), num_times=len(self.relax.data.relax_times[self.run]), values=data.ave_intensities, sd=self.relax.data.sd[self.run], relax_times=self.relax.data.relax_times[self.run], scaling_matrix=self.scaling_matrix) 163 164 # Make a single function call. This will cause back calculation and the data will be stored in the C module. 165 func(self.param_vector) 166 167 # Get the data back. 168 results = back_calc_I() 169 170 # Return the correct peak height. 171 return results[relax_time_index]
172 173
174 - def create_mc_data(self, run, i):
175 """Function for creating the Monte Carlo peak intensity data.""" 176 177 # Arguments 178 self.run = run 179 180 # Initialise the MC data data structure. 181 mc_data = [] 182 183 # Alias the residue specific data structure. 184 data = self.relax.data.res[self.run][i] 185 186 # Skip unselected residues. 187 if not data.select: 188 return 189 190 # Skip residues which have no data. 191 if not hasattr(data, 'intensities'): 192 return 193 194 # Test if the model is set. 195 if not hasattr(data, 'model') or not data.model: 196 raise RelaxNoModelError, self.run 197 198 # Loop over the spectral time points. 199 for j in xrange(len(self.relax.data.relax_times[run])): 200 # Back calculate the value. 201 value = self.back_calc(run=run, index=i, relax_time_index=j) 202 203 # Append the value. 204 mc_data.append(value) 205 206 # Return the MC data. 207 return mc_data
208 209
210 - def data_init(self):
211 """Function for initialising the data structures.""" 212 213 # Curve type. 214 if not hasattr(self.relax.data, 'curve_type'): 215 self.relax.data.curve_type = {} 216 217 # Get the data names. 218 data_names = self.data_names() 219 220 # Loop over the sequence. 221 for i in xrange(len(self.relax.data.res[self.run])): 222 # Alias the residue specific data structure. 223 data = self.relax.data.res[self.run][i] 224 225 # Skip unselected residues. 226 if not data.select: 227 continue 228 229 # Loop over the data structure names. 230 for name in data_names: 231 # Data structures which are initially empty arrays. 232 list_data = [ 'params' ] 233 if name in list_data: 234 init_data = [] 235 236 # Otherwise initialise the data structure to None. 237 else: 238 init_data = None 239 240 # If the name is not in 'data', add it. 241 if not hasattr(data, name): 242 setattr(data, name, init_data)
243 244
245 - def data_names(self, set='all'):
246 """Function for returning a list of names of data structures. 247 248 Description 249 ~~~~~~~~~~~ 250 251 The names are as follows: 252 253 params: An array of the parameter names associated with the model. 254 255 rx: Either the R1 or R2 relaxation rate. 256 257 i0: The initial intensity. 258 259 iinf: The intensity at infinity. 260 261 chi2: Chi-squared value. 262 263 iter: Iterations. 264 265 f_count: Function count. 266 267 g_count: Gradient count. 268 269 h_count: Hessian count. 270 271 warning: Minimisation warning. 272 """ 273 274 # Initialise. 275 names = [] 276 277 # Generic. 278 if set == 'all' or set == 'generic': 279 names.append('params') 280 281 # Parameters. 282 if set == 'all' or set == 'params': 283 names.append('rx') 284 names.append('i0') 285 names.append('iinf') 286 287 # Minimisation statistics. 288 if set == 'all' or set == 'min': 289 names.append('chi2') 290 names.append('iter') 291 names.append('f_count') 292 names.append('g_count') 293 names.append('h_count') 294 names.append('warning') 295 296 return names
297 298
299 - def default_value(self, param):
300 """ 301 Relaxation curve fitting default values 302 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 303 304 These values are completely arbitrary as peak heights (or volumes) are extremely variable 305 and the Rx value is a compensation for both the R1 and R2 values. 306 ___________________________________________________________________ 307 | | | | 308 | Data type | Object name | Value | 309 |________________________|_______________|________________________| 310 | | | | 311 | Relaxation rate | 'rx' | 8.0 | 312 | | | | 313 | Initial intensity | 'i0' | 10000.0 | 314 | | | | 315 | Intensity at infinity | 'iinf' | 0.0 | 316 | | | | 317 |________________________|_______________|________________________| 318 319 """ 320 321 # Relaxation rate. 322 if param == 'rx': 323 return 8.0 324 325 # Initial intensity. 326 if param == 'i0': 327 return 10000.0 328 329 # Intensity at infinity. 330 if param == 'iinf': 331 return 0.0
332 333
334 - def disassemble_param_vector(self, index=None, sim_index=None):
335 """Function for disassembling the parameter vector.""" 336 337 # Alias the residue specific data structure. 338 data = self.relax.data.res[self.run][index] 339 340 # Monte Carlo simulations. 341 if sim_index != None: 342 # The relaxation rate. 343 data.rx_sim[sim_index] = self.param_vector[0] 344 345 # Initial intensity. 346 data.i0_sim[sim_index] = self.param_vector[1] 347 348 # Intensity at infinity. 349 if self.relax.data.curve_type[self.run] == 'inv': 350 data.iinf_sim[sim_index] = self.param_vector[2] 351 352 # Parameter values. 353 else: 354 # The relaxation rate. 355 data.rx = self.param_vector[0] 356 357 # Initial intensity. 358 data.i0 = self.param_vector[1] 359 360 # Intensity at infinity. 361 if self.relax.data.curve_type[self.run] == 'inv': 362 data.iinf = self.param_vector[2]
363 364
365 - def grid_search(self, run, lower, upper, inc, constraints, print_flag, sim_index=None):
366 """The grid search function.""" 367 368 # Arguments. 369 self.lower = lower 370 self.upper = upper 371 self.inc = inc 372 373 # Minimisation. 374 self.minimise(run=run, min_algor='grid', constraints=constraints, print_flag=print_flag, sim_index=sim_index)
375 376
377 - def grid_search_setup(self, index=None):
378 """The grid search setup function.""" 379 380 # The length of the parameter array. 381 n = len(self.param_vector) 382 383 # Make sure that the length of the parameter array is > 0. 384 if n == 0: 385 raise RelaxError, "Cannot run a grid search on a model with zero parameters." 386 387 # Lower bounds. 388 if self.lower != None: 389 if len(self.lower) != n: 390 raise RelaxLenError, ('lower bounds', n) 391 392 # Upper bounds. 393 if self.upper != None: 394 if len(self.upper) != n: 395 raise RelaxLenError, ('upper bounds', n) 396 397 # Increment. 398 if type(self.inc) == list: 399 if len(self.inc) != n: 400 raise RelaxLenError, ('increment', n) 401 inc = self.inc 402 elif type(self.inc) == int: 403 temp = [] 404 for j in xrange(n): 405 temp.append(self.inc) 406 inc = temp 407 408 # Minimisation options initialisation. 409 min_options = [] 410 j = 0 411 412 # Alias the residue specific data structure. 413 data = self.relax.data.res[self.run][index] 414 415 # Loop over the parameters. 416 for i in xrange(len(data.params)): 417 # Relaxation rate (from 0 to 20 s^-1). 418 if data.params[i] == 'Rx': 419 min_options.append([inc[j], 0.0, 20.0]) 420 421 # Intensity 422 elif search('^I', data.params[i]): 423 # Find the position of the first time point. 424 pos = self.relax.data.relax_times[self.run].index(min(self.relax.data.relax_times[self.run])) 425 426 # Scaling. 427 min_options.append([inc[j], 0.0, average(data.intensities[pos])]) 428 429 # Increment j. 430 j = j + 1 431 432 # Set the lower and upper bounds if these are supplied. 433 if self.lower != None: 434 for j in xrange(n): 435 if self.lower[j] != None: 436 min_options[j][1] = self.lower[j] 437 if self.upper != None: 438 for j in xrange(n): 439 if self.upper[j] != None: 440 min_options[j][2] = self.upper[j] 441 442 # Test if the grid is too large. 443 self.grid_size = 1 444 for i in xrange(len(min_options)): 445 self.grid_size = self.grid_size * min_options[i][0] 446 if type(self.grid_size) == long: 447 raise RelaxError, "A grid search of size " + `self.grid_size` + " is too large." 448 449 # Diagonal scaling of minimisation options. 450 for j in xrange(len(min_options)): 451 min_options[j][1] = min_options[j][1] / self.scaling_matrix[j, j] 452 min_options[j][2] = min_options[j][2] / self.scaling_matrix[j, j] 453 454 return min_options
455 456
457 - def linear_constraints(self, index=None):
458 """Function for setting up the linear constraint matrices A and b. 459 460 Standard notation 461 ~~~~~~~~~~~~~~~~~ 462 463 The relaxation rate constraints are: 464 465 Rx >= 0 466 467 The intensity constraints are: 468 469 I0 >= 0 470 Iinf >= 0 471 472 473 Matrix notation 474 ~~~~~~~~~~~~~~~ 475 476 In the notation A.x >= b, where A is an matrix of coefficients, x is an array of parameter 477 values, and b is a vector of scalars, these inequality constraints are: 478 479 | 1 0 0 | | Rx | | 0 | 480 | | | | | | 481 | 1 0 0 | . | I0 | >= | 0 | 482 | | | | | | 483 | 1 0 0 | | Iinf | | 0 | 484 485 """ 486 487 # Initialisation (0..j..m). 488 A = [] 489 b = [] 490 n = len(self.param_vector) 491 zero_array = zeros(n, Float64) 492 i = 0 493 j = 0 494 495 # Alias the residue specific data structure. 496 data = self.relax.data.res[self.run][index] 497 498 # Loop over the parameters. 499 for k in xrange(len(data.params)): 500 # Relaxation rate. 501 if data.params[k] == 'Rx': 502 # Rx >= 0. 503 A.append(zero_array * 0.0) 504 A[j][i] = 1.0 505 b.append(0.0) 506 j = j + 1 507 508 # Intensity parameter. 509 elif search('^I', data.params[k]): 510 # I0, Iinf >= 0. 511 A.append(zero_array * 0.0) 512 A[j][i] = 1.0 513 b.append(0.0) 514 j = j + 1 515 516 # Increment i. 517 i = i + 1 518 519 # Convert to Numeric data structures. 520 A = array(A, Float64) 521 b = array(b, Float64) 522 523 return A, b
524 525
526 - def mean_and_error(self, run=None, print_flag=0):
527 """Function for calculating the average intensity and standard deviation of all spectra.""" 528 529 # Arguments. 530 self.run = run 531 532 # Test if the standard deviation is already calculated. 533 if hasattr(self.relax.data, 'sigma'): 534 raise RelaxError, "The average intensity and standard deviation of all spectra has already been calculated." 535 536 # Print out. 537 print "\nCalculating the average intensity and standard deviation of all spectra." 538 539 # Initialise. 540 self.relax.data.sigma = {} 541 self.relax.data.sigma[self.run] = [] 542 543 # Loop over the time points. 544 for time_index in xrange(len(self.relax.data.relax_times[self.run])): 545 # Print out. 546 print "\nTime point: " + `self.relax.data.relax_times[self.run][time_index]` + " s" 547 print "Number of spectra: " + `self.relax.data.num_spectra[self.run][time_index]` 548 if print_flag: 549 print "%-5s%-6s%-20s%-20s" % ("Num", "Name", "Average", "SD") 550 551 # Append zero to the global standard deviation structure. 552 self.relax.data.sigma[self.run].append(0.0) 553 554 # Initialise the time point and residue specific sd. 555 total_res = 0 556 557 # Test for multiple spectra. 558 if self.relax.data.num_spectra[self.run][time_index] == 1: 559 multiple_spectra = 0 560 else: 561 multiple_spectra = 1 562 563 # Calculate the mean value. 564 for i in xrange(len(self.relax.data.res[self.run])): 565 # Alias the residue specific data structure. 566 data = self.relax.data.res[self.run][i] 567 568 # Skip unselected residues. 569 if not data.select: 570 continue 571 572 # Skip and unselect residues which have no data. 573 if not hasattr(data, 'intensities'): 574 data.select = 0 575 continue 576 577 # Initialise the average intensity and standard deviation data structures. 578 if not hasattr(data, 'ave_intensities'): 579 data.ave_intensities = [] 580 if not hasattr(data, 'sigma'): 581 data.sigma = [] 582 583 # Average intensity. 584 data.ave_intensities.append(average(data.intensities[time_index])) 585 586 # Sum of squared errors. 587 SSE = 0.0 588 for j in xrange(self.relax.data.num_spectra[self.run][time_index]): 589 SSE = SSE + (data.intensities[time_index][j] - data.ave_intensities[time_index]) ** 2 590 591 # Variance. 592 # 593 # 1 594 # sigma = ----- * sum({Xi - Xav}^2)] 595 # n - 1 596 # 597 if self.relax.data.num_spectra[self.run][time_index] == 1: 598 sigma = 0.0 599 else: 600 sigma = 1.0 / (self.relax.data.num_spectra[self.run][time_index] - 1.0) * SSE 601 data.sigma.append(sigma) 602 603 # Print out. 604 if print_flag: 605 print "%-5i%-6s%-20s%-20s" % (data.num, data.name, `data.ave_intensities[time_index]`, `data.sigma[time_index]`) 606 607 # Sum of variances (for average). 608 self.relax.data.sigma[self.run][time_index] = self.relax.data.sigma[self.run][time_index] + data.sigma[time_index] 609 610 # Increment the number of residues counter. 611 total_res = total_res + 1 612 613 # Average variance. 614 self.relax.data.sigma[self.run][time_index] = self.relax.data.sigma[self.run][time_index] / float(total_res) 615 616 # Print out. 617 print "Standard deviation for time point %s: %s" % (`time_index`, `self.relax.data.sigma[self.run][time_index]`) 618 619 620 # Average across all spectra if there are time points with a single spectrum. 621 if 0.0 in self.relax.data.sigma[self.run]: 622 # Initialise. 623 sigma = 0.0 624 num_dups = 0 625 626 # Loop over all time points. 627 for i in xrange(len(self.relax.data.relax_times[self.run])): 628 # Single spectrum (or extraordinarily accurate NMR spectra!). 629 if self.relax.data.sigma[self.run][i] == 0.0: 630 continue 631 632 # Sum and count. 633 sigma = sigma + self.relax.data.sigma[self.run][i] 634 num_dups = num_dups + 1 635 636 # Average value. 637 sigma = sigma / float(num_dups) 638 639 # Assign the average value to all time points. 640 for i in xrange(len(self.relax.data.relax_times[self.run])): 641 self.relax.data.sigma[self.run][i] = sigma 642 643 # Print out. 644 print "\nStandard deviation (averaged over all spectra): " + `sigma` 645 646 # Create the standard deviation data structure. 647 cdp.sd = [] 648 for sigma in cdp.sigma: 649 cdp.sd.append(sqrt(sigma))
650 651
652 - def minimise(self, run=None, min_algor=None, min_options=None, func_tol=None, grad_tol=None, max_iterations=None, constraints=0, scaling=1, print_flag=0, sim_index=None):
653 """Relaxation curve fitting function.""" 654 655 # Arguments. 656 self.run = run 657 self.print_flag = print_flag 658 659 # Test if the sequence data for self.run is loaded. 660 if not self.relax.data.res.has_key(self.run): 661 raise RelaxNoSequenceError, self.run 662 663 # Loop over the sequence. 664 for i in xrange(len(self.relax.data.res[self.run])): 665 # Alias the residue specific data structure. 666 data = self.relax.data.res[self.run][i] 667 668 # Skip unselected residues. 669 if not data.select: 670 continue 671 672 # Skip residues which have no data. 673 if not hasattr(data, 'intensities'): 674 continue 675 676 # Create the initial parameter vector. 677 self.param_vector = self.assemble_param_vector(index=i, sim_index=sim_index) 678 679 # Diagonal scaling. 680 self.assemble_scaling_matrix(index=i, scaling=scaling) 681 self.param_vector = matrixmultiply(inverse(self.scaling_matrix), self.param_vector) 682 683 # Get the grid search minimisation options. 684 if match('^[Gg]rid', min_algor): 685 min_options = self.grid_search_setup(index=i) 686 687 # Linear constraints. 688 if constraints: 689 A, b = self.linear_constraints(index=i) 690 691 # Print out. 692 if self.print_flag >= 1: 693 # Individual residue print out. 694 if self.print_flag >= 2: 695 print "\n\n" 696 string = "Fitting to residue: " + `data.num` + " " + data.name 697 print "\n\n" + string 698 print len(string) * '~' 699 700 # Grid search print out. 701 if match('^[Gg]rid', min_algor): 702 print "Unconstrained grid search size: " + `self.grid_size` + " (constraints may decrease this size).\n" 703 704 705 # Initialise the function to minimise. 706 ###################################### 707 708 if sim_index == None: 709 values = data.ave_intensities 710 else: 711 values = data.sim_intensities[sim_index] 712 713 setup(num_params=len(data.params), num_times=len(self.relax.data.relax_times[self.run]), values=values, sd=self.relax.data.sd[self.run], relax_times=self.relax.data.relax_times[self.run], scaling_matrix=self.scaling_matrix) 714 715 716 # Setup the minimisation algorithm when constraints are present. 717 ################################################################ 718 719 if constraints and not match('^[Gg]rid', min_algor): 720 algor = min_options[0] 721 else: 722 algor = min_algor 723 724 725 # Levenberg-Marquardt minimisation. 726 ################################### 727 728 if match('[Ll][Mm]$', algor) or match('[Ll]evenburg-[Mm]arquardt$', algor): 729 # Reconstruct the error data structure. 730 lm_error = zeros(len(data.relax_times), Float64) 731 index = 0 732 for k in xrange(len(data.relax_times)): 733 lm_error[index:index+len(relax_error[k])] = relax_error[k] 734 index = index + len(relax_error[k]) 735 736 min_options = min_options + (self.relax_fit.lm_dri, lm_error) 737 738 739 # Minimisation. 740 ############### 741 742 if constraints: 743 results = generic_minimise(func=func, dfunc=dfunc, d2func=d2func, args=(), x0=self.param_vector, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, A=A, b=b, full_output=1, print_flag=print_flag) 744 else: 745 results = generic_minimise(func=func, dfunc=dfunc, d2func=d2func, args=(), x0=self.param_vector, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, full_output=1, print_flag=print_flag) 746 if results == None: 747 return 748 self.param_vector, self.func, self.iter_count, self.f_count, self.g_count, self.h_count, self.warning = results 749 750 # Scaling. 751 if scaling: 752 self.param_vector = matrixmultiply(self.scaling_matrix, self.param_vector) 753 754 # Disassemble the parameter vector. 755 self.disassemble_param_vector(index=i, sim_index=sim_index) 756 757 # Monte Carlo minimisation statistics. 758 if sim_index != None: 759 # Chi-squared statistic. 760 data.chi2_sim[sim_index] = self.func 761 762 # Iterations. 763 data.iter_sim[sim_index] = self.iter_count 764 765 # Function evaluations. 766 data.f_count_sim[sim_index] = self.f_count 767 768 # Gradient evaluations. 769 data.g_count_sim[sim_index] = self.g_count 770 771 # Hessian evaluations. 772 data.h_count_sim[sim_index] = self.h_count 773 774 # Warning. 775 data.warning_sim[sim_index] = self.warning 776 777 778 # Normal statistics. 779 else: 780 # Chi-squared statistic. 781 data.chi2 = self.func 782 783 # Iterations. 784 data.iter = self.iter_count 785 786 # Function evaluations. 787 data.f_count = self.f_count 788 789 # Gradient evaluations. 790 data.g_count = self.g_count 791 792 # Hessian evaluations. 793 data.h_count = self.h_count 794 795 # Warning. 796 data.warning = self.warning
797 798
799 - def model_setup(self, model, params):
800 """Function for updating various data structures dependant on the model selected.""" 801 802 # Set the model. 803 self.relax.data.curve_type[self.run] = model 804 805 # Initialise the data structures (if needed). 806 self.data_init() 807 808 # Loop over the sequence. 809 for i in xrange(len(self.relax.data.res[self.run])): 810 # Skip unselected residues. 811 if not self.relax.data.res[self.run][i].select: 812 continue 813 814 # The model and parameter names. 815 self.relax.data.res[self.run][i].model = model 816 self.relax.data.res[self.run][i].params = params
817 818
819 - def num_instances(self, run=None):
820 """Function for returning the number of instances.""" 821 822 # Arguments. 823 self.run = run 824 825 # Test if sequence data is loaded. 826 if not self.relax.data.res.has_key(self.run): 827 return 0 828 829 # Return the number of residues. 830 return len(self.relax.data.res[self.run])
831 832
833 - def overfit_deselect(self, run):
834 """Function for deselecting residues without sufficient data to support minimisation""" 835 836 # Test the sequence data exists: 837 if not self.relax.data.res.has_key(run): 838 raise RelaxNoSequenceError, run 839 840 # Loop over residue data: 841 for residue in self.relax.data.res[run]: 842 843 # Check for sufficient data 844 if not hasattr(residue, 'intensities'): 845 residue.select = 0 846 continue 847 848 # Require 3 or more data points 849 if len(residue.intensities) < 3: 850 residue.select = 0 851 continue
852 853
854 - def read(self, run=None, file=None, dir=None, relax_time=0.0, format=None, heteronuc=None, proton=None, int_col=None):
855 """Function for reading peak intensity data.""" 856 857 # Arguments. 858 self.run = run 859 self.relax_time = relax_time 860 self.format = format 861 self.heteronuc = heteronuc 862 self.proton = proton 863 self.int_col = int_col 864 865 # Initialise the global data if necessary. 866 self.data_init() 867 868 # Global relaxation time data structure. 869 if not hasattr(self.relax.data, 'relax_times'): 870 self.relax.data.relax_times = {} 871 if not self.relax.data.relax_times.has_key(self.run): 872 self.relax.data.relax_times[self.run] = [] 873 874 # Number of spectra. 875 if not hasattr(self.relax.data, 'num_spectra'): 876 self.relax.data.num_spectra = {} 877 if not self.relax.data.num_spectra.has_key(self.run): 878 self.relax.data.num_spectra[self.run] = [] 879 880 # Determine if the relaxation time already exists for the residue (duplicated spectra). 881 index = None 882 for i in xrange(len(self.relax.data.relax_times[self.run])): 883 if self.relax_time == self.relax.data.relax_times[self.run][i]: 884 index = i 885 886 # A new relaxation time. 887 if index == None: 888 # Add the time. 889 self.relax.data.relax_times[self.run].append(self.relax_time) 890 891 # First spectrum. 892 self.relax.data.num_spectra[self.run].append(1) 893 894 # Duplicated spectra. 895 else: 896 self.relax.data.num_spectra[self.run][index] = self.relax.data.num_spectra[self.run][index] + 1 897 898 # Generic intensity function. 899 self.relax.generic.intensity.read(run=run, file=file, dir=dir, format=format, heteronuc=heteronuc, proton=proton, int_col=int_col, assign_func=self.assign_function)
900 901
902 - def return_conversion_factor(self, stat_type):
903 """Dummy function for returning 1.0.""" 904 905 return 1.0
906 907
908 - def return_data(self, run, i):
909 """Function for returning the peak intensity data structure.""" 910 911 return self.relax.data.res[run][i].intensities
912 913
914 - def return_error(self, run, i):
915 """Function for returning the standard deviation data structure.""" 916 917 return self.relax.data.sd[run]
918 919
920 - def return_data_name(self, name):
921 """ 922 Relaxation curve fitting data type string matching patterns 923 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 924 925 __________________________________________________________________________________________ 926 | | | | 927 | Data type | Object name | Patterns | 928 |___________________________________|______________________|_____________________________| 929 | | | | 930 | Relaxation rate | 'rx' | '^[Rr]x$' | 931 | | | | 932 | Average peak intensities (series) | 'ave_intensities' | '^[Aa]ve[ -_][Ii]nt$' | 933 | | | | 934 | Initial intensity | 'i0' | '^[Ii]0$' | 935 | | | | 936 | Intensity at infinity | 'iinf' | '^[Ii]inf$' | 937 | | | | 938 | Relaxation period times (series) | 'relax_times' | '^[Rr]elax[ -_][Tt]imes$' | 939 |___________________________________|______________________|_____________________________| 940 941 """ 942 943 # Relaxation rate. 944 if match('^[Rr]x$', name): 945 return 'rx' 946 947 # Average peak intensities (series) 948 if match('^[Aa]ve[ -_][Ii]nt$', name): 949 return 'ave_intensities' 950 951 # Initial intensity. 952 if match('^[Ii]0$', name): 953 return 'i0' 954 955 # Intensity at infinity. 956 if match('^[Ii]inf$', name): 957 return 'iinf' 958 959 # Relaxation period times (series). 960 if match('^[Rr]elax[ -_][Tt]imes$', name): 961 return 'relax_times'
962 963
964 - def return_grace_string(self, data_type):
965 """Function for returning the Grace string representing the data type for axis labelling.""" 966 967 # Get the object name. 968 object_name = self.return_data_name(data_type) 969 970 # Relaxation rate. 971 if object_name == 'rx': 972 grace_string = '\\qR\\sx\\Q' 973 974 # Average peak intensities. 975 elif object_name == 'ave_intensities': 976 grace_string = '\\qAverage peak intensities\\Q' 977 978 # Initial intensity. 979 elif object_name == 'i0': 980 grace_string = '\\qI\\s0\\Q' 981 982 # Intensity at infinity. 983 elif object_name == 'iinf': 984 grace_string = '\\qI\\sinf\\Q' 985 986 # Intensity at infinity. 987 elif object_name == 'relax_times': 988 grace_string = '\\qRelaxation time period (s)\\Q' 989 990 # Return the Grace string. 991 return grace_string
992 993
994 - def return_units(self, stat_type):
995 """Dummy function which returns None as the stats have no units.""" 996 997 return None
998 999
1000 - def select_model(self, run=None, model='exp'):
1001 """Function for selecting the model of the exponential curve.""" 1002 1003 # Arguments. 1004 self.run = run 1005 1006 # Test if the run exists. 1007 if not self.run in self.relax.data.run_names: 1008 raise RelaxNoRunError, self.run 1009 1010 # Test if the run type is set to 'relax_fit'. 1011 function_type = self.relax.data.run_types[self.relax.data.run_names.index(self.run)] 1012 if function_type != 'relax_fit': 1013 raise RelaxFuncSetupError, self.relax.specific_setup.get_string(function_type) 1014 1015 # Test if sequence data is loaded. 1016 if not self.relax.data.res.has_key(self.run): 1017 raise RelaxNoSequenceError, self.run 1018 1019 # Two parameter exponential fit. 1020 if model == 'exp': 1021 print "Two parameter exponential fit." 1022 params = ['Rx', 'I0'] 1023 1024 # Three parameter inversion recovery fit. 1025 elif model == 'inv': 1026 print "Three parameter inversion recovery fit." 1027 params = ['Rx', 'I0', 'Iinf'] 1028 1029 # Invalid model. 1030 else: 1031 raise RelaxError, "The model '" + model + "' is invalid." 1032 1033 # Set up the model. 1034 self.model_setup(model, params)
1035 1036
1037 - def set_doc(self):
1038 """ 1039 Relaxation curve fitting set details 1040 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 1041 1042 Only three parameters can be set, the relaxation rate (Rx), the initial intensity (I0), and 1043 the intensity at infinity (Iinf). Setting the parameter Iinf has no effect if the chosen 1044 model is that of the exponential curve which decays to zero. 1045 """
1046 1047
1048 - def set_selected_sim(self, run, instance, select_sim):
1049 """Function for returning the array of selected simulation flags.""" 1050 1051 # Arguments. 1052 self.run = run 1053 1054 # Multiple instances. 1055 self.relax.data.res[self.run][instance].select_sim = select_sim
1056 1057
1058 - def sim_pack_data(self, run, i, sim_data):
1059 """Function for packing Monte Carlo simulation data.""" 1060 1061 # Test if the simulation data already exists. 1062 if hasattr(self.relax.data.res[run][i], 'sim_intensities'): 1063 raise RelaxError, "Monte Carlo simulation data already exists." 1064 1065 # Create the data structure. 1066 self.relax.data.res[run][i].sim_intensities = sim_data
1067