Package specific_analyses :: Package relax_fit
[hide private]
[frames] | no frames]

Source Code for Package specific_analyses.relax_fit

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2004-2013 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the program relax (http://www.nmr-relax.com).          # 
  6  #                                                                             # 
  7  # This program is free software: you can redistribute it and/or modify        # 
  8  # it under the terms of the GNU General Public License as published by        # 
  9  # the Free Software Foundation, either version 3 of the License, or           # 
 10  # (at your option) any later version.                                         # 
 11  #                                                                             # 
 12  # This program is distributed in the hope that it will be useful,             # 
 13  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 15  # GNU General Public License for more details.                                # 
 16  #                                                                             # 
 17  # You should have received a copy of the GNU General Public License           # 
 18  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 19  #                                                                             # 
 20  ############################################################################### 
 21   
 22  # Package docstring. 
 23  """The R1 and R2 exponential relaxation curve fitting analysis.""" 
 24   
 25  # The available modules. 
 26  __all__ = [ 
 27  ] 
 28   
 29  # Python module imports. 
 30  from minfx.generic import generic_minimise 
 31  from minfx.grid import grid 
 32  from numpy import array, average, dot, float64, identity, zeros 
 33  from numpy.linalg import inv 
 34  from re import match, search 
 35  from warnings import warn 
 36   
 37  # relax module imports. 
 38  from dep_check import C_module_exp_fn 
 39  from lib.errors import RelaxError, RelaxFuncSetupError, RelaxLenError, RelaxNoModelError, RelaxNoSequenceError 
 40  from lib.mathematics import round_to_next_order 
 41  from lib.warnings import RelaxDeselectWarning 
 42  from pipe_control import pipes 
 43  from pipe_control.mol_res_spin import exists_mol_res_spin_data, generate_spin_id_unique, return_spin, spin_loop 
 44  from specific_analyses.api_base import API_base 
 45  from specific_analyses.api_common import API_common 
 46  from user_functions.data import Uf_tables; uf_tables = Uf_tables() 
 47  from user_functions.objects import Desc_container 
 48   
 49  # C modules. 
 50  if C_module_exp_fn: 
 51      from target_functions.relax_fit import setup, func, dfunc, d2func, back_calc_I 
 52   
 53   
54 -class Relax_fit(API_base, API_common):
55 """Class containing functions for relaxation curve fitting.""" 56
57 - def __init__(self):
58 """Initialise the class by placing API_common methods into the API.""" 59 60 # Execute the base class __init__ method. 61 super(Relax_fit, self).__init__() 62 63 # Place methods into the API. 64 self.base_data_loop = self._base_data_loop_spin 65 self.model_loop = self._model_loop_spin 66 self.return_conversion_factor = self._return_no_conversion_factor 67 self.return_value = self._return_value_general 68 self.set_error = self._set_error_spin 69 self.set_param_values = self._set_param_values_spin 70 self.set_selected_sim = self._set_selected_sim_spin 71 self.sim_init_values = self._sim_init_values_spin 72 self.sim_return_param = self._sim_return_param_spin 73 self.sim_return_selected = self._sim_return_selected_spin 74 75 # Set up the spin parameters. 76 self.PARAMS.add('intensities', scope='spin', py_type=dict, grace_string='\\qPeak intensities\\Q') 77 self.PARAMS.add('relax_times', scope='spin', py_type=dict, grace_string='\\qRelaxation time period (s)\\Q') 78 self.PARAMS.add('rx', scope='spin', default=8.0, desc='Either the R1 or R2 relaxation rate', set='params', py_type=float, grace_string='\\qR\\sx\\Q', err=True, sim=True) 79 self.PARAMS.add('i0', scope='spin', default=10000.0, desc='The initial intensity', py_type=float, set='params', grace_string='\\qI\\s0\\Q', err=True, sim=True) 80 self.PARAMS.add('iinf', scope='spin', default=0.0, desc='The intensity at infinity', py_type=float, set='params', grace_string='\\qI\\sinf\\Q', err=True, sim=True) 81 self.PARAMS.add('params', scope='spin', desc='The model parameters', py_type=list) 82 83 # Add the minimisation data. 84 self.PARAMS.add_min_data(min_stats_global=False, min_stats_spin=True)
85 86
87 - def _assemble_param_vector(self, spin=None, sim_index=None):
88 """Assemble the exponential curve parameter vector (as a numpy array). 89 90 @keyword spin: The spin data container. 91 @type spin: SpinContainer instance 92 @keyword sim_index: The optional MC simulation index. 93 @type sim_index: int 94 @return: An array of the parameter values of the exponential model. 95 @rtype: numpy array 96 """ 97 98 # Initialise. 99 param_vector = [] 100 101 # Loop over the model parameters. 102 for i in range(len(spin.params)): 103 # Relaxation rate. 104 if spin.params[i] == 'rx': 105 if sim_index != None: 106 param_vector.append(spin.rx_sim[sim_index]) 107 elif spin.rx == None: 108 param_vector.append(0.0) 109 else: 110 param_vector.append(spin.rx) 111 112 # Initial intensity. 113 elif spin.params[i] == 'i0': 114 if sim_index != None: 115 param_vector.append(spin.i0_sim[sim_index]) 116 elif spin.i0 == None: 117 param_vector.append(0.0) 118 else: 119 param_vector.append(spin.i0) 120 121 # Intensity at infinity. 122 elif spin.params[i] == 'iinf': 123 if sim_index != None: 124 param_vector.append(spin.iinf_sim[sim_index]) 125 elif spin.iinf == None: 126 param_vector.append(0.0) 127 else: 128 param_vector.append(spin.iinf) 129 130 # Return a numpy array. 131 return array(param_vector, float64)
132 133
134 - def _assemble_scaling_matrix(self, spin=None, scaling=True):
135 """Create and return the scaling matrix. 136 137 @keyword spin: The spin data container. 138 @type spin: SpinContainer instance 139 @keyword scaling: A flag which if false will cause the identity matrix to be returned. 140 @type scaling: bool 141 @return: The diagonal and square scaling matrix. 142 @rtype: numpy diagonal matrix 143 """ 144 145 # Initialise. 146 scaling_matrix = identity(len(spin.params), float64) 147 i = 0 148 149 # No diagonal scaling. 150 if not scaling: 151 return scaling_matrix 152 153 # Loop over the parameters. 154 for i in range(len(spin.params)): 155 # Relaxation rate. 156 if spin.params[i] == 'rx': 157 pass 158 159 # Intensity scaling. 160 elif search('^i', spin.params[i]): 161 scaling_matrix[i, i] = round_to_next_order(max(spin.intensities.values())) 162 163 # Return the scaling matrix. 164 return scaling_matrix
165 166
167 - def _back_calc(self, spin=None, relax_time_id=None):
168 """Back-calculation of peak intensity for the given relaxation time. 169 170 @keyword spin: The spin container. 171 @type spin: SpinContainer instance 172 @keyword relax_time_id: The ID string for the desired relaxation time. 173 @type relax_time_id: str 174 @return: The peak intensity for the desired relaxation time. 175 @rtype: float 176 """ 177 178 # Create the initial parameter vector. 179 param_vector = self._assemble_param_vector(spin=spin) 180 181 # Create a scaling matrix. 182 scaling_matrix = self._assemble_scaling_matrix(spin=spin, scaling=False) 183 184 # The keys. 185 keys = list(spin.intensities.keys()) 186 187 # The peak intensities and times. 188 values = [] 189 errors = [] 190 times = [] 191 for key in keys: 192 values.append(spin.intensities[key]) 193 errors.append(spin.intensity_err[key]) 194 times.append(cdp.relax_times[key]) 195 196 # The scaling matrix in a diagonalised list form. 197 scaling_list = [] 198 for i in range(len(scaling_matrix)): 199 scaling_list.append(scaling_matrix[i, i]) 200 201 # Initialise the relaxation fit functions. 202 setup(num_params=len(spin.params), num_times=len(cdp.relax_times), values=values, sd=errors, relax_times=times, scaling_matrix=scaling_list) 203 204 # Make a single function call. This will cause back calculation and the data will be stored in the C module. 205 self._func(param_vector) 206 207 # Get the data back. 208 results = back_calc_I() 209 210 # Return the correct peak height. 211 return results[keys.index(relax_time_id)]
212 213
214 - def _disassemble_param_vector(self, param_vector=None, spin=None, sim_index=None):
215 """Disassemble the parameter vector. 216 217 @keyword param_vector: The parameter vector. 218 @type param_vector: numpy array 219 @keyword spin: The spin data container. 220 @type spin: SpinContainer instance 221 @keyword sim_index: The optional MC simulation index. 222 @type sim_index: int 223 """ 224 225 # Monte Carlo simulations. 226 if sim_index != None: 227 # The relaxation rate. 228 spin.rx_sim[sim_index] = param_vector[0] 229 230 # Initial intensity. 231 spin.i0_sim[sim_index] = param_vector[1] 232 233 # Intensity at infinity. 234 if cdp.curve_type == 'inv': 235 spin.iinf_sim[sim_index] = param_vector[2] 236 237 # Parameter values. 238 else: 239 # The relaxation rate. 240 spin.rx = param_vector[0] 241 242 # Initial intensity. 243 spin.i0 = param_vector[1] 244 245 # Intensity at infinity. 246 if cdp.curve_type == 'inv': 247 spin.iinf = param_vector[2]
248 249
250 - def _func(self, params):
251 """Wrapper function for the C module, for converting numpy arrays. 252 253 @param params: The parameter array from the minimisation code. 254 @type params: numpy array 255 @return: The function value generated by the C module. 256 @rtype: float 257 """ 258 259 # Call the C code. 260 chi2 = func(params.tolist()) 261 262 # Return the chi2 value. 263 return chi2
264 265
266 - def _dfunc(self, params):
267 """Wrapper function for the C module, for converting numpy arrays. 268 269 The currently does nothing. 270 """
271 272
273 - def _d2func(self, params):
274 """Wrapper function for the C module, for converting numpy arrays. 275 276 The currently does nothing. 277 """
278 279
280 - def _grid_search_setup(self, spin=None, param_vector=None, lower=None, upper=None, inc=None, scaling_matrix=None):
281 """The grid search setup function. 282 283 @keyword spin: The spin data container. 284 @type spin: SpinContainer instance 285 @keyword param_vector: The parameter vector. 286 @type param_vector: numpy array 287 @keyword lower: The lower bounds of the grid search which must be equal to the 288 number of parameters in the model. This optional argument is 289 only used when doing a grid search. 290 @type lower: array of numbers 291 @keyword upper: The upper bounds of the grid search which must be equal to the 292 number of parameters in the model. This optional argument is 293 only used when doing a grid search. 294 @type upper: array of numbers 295 @keyword inc: The increments for each dimension of the space for the grid 296 search. The number of elements in the array must equal to the 297 number of parameters in the model. This argument is only used 298 when doing a grid search. 299 @type inc: array of int 300 @keyword scaling_matrix: The scaling matrix. 301 @type scaling_matrix: numpy diagonal matrix 302 @return: A tuple of the grid size and the minimisation options. For the 303 minimisation options, the first dimension corresponds to the 304 model parameter. The second dimension is a list of the number 305 of increments, the lower bound, and upper bound. 306 @rtype: (int, list of lists [int, float, float]) 307 """ 308 309 # The length of the parameter array. 310 n = len(param_vector) 311 312 # Make sure that the length of the parameter array is > 0. 313 if n == 0: 314 raise RelaxError("Cannot run a grid search on a model with zero parameters.") 315 316 # Lower bounds. 317 if lower != None and len(lower) != n: 318 raise RelaxLenError('lower bounds', n) 319 320 # Upper bounds. 321 if upper != None and len(upper) != n: 322 raise RelaxLenError('upper bounds', n) 323 324 # Increments. 325 if isinstance(inc, list) and len(inc) != n: 326 raise RelaxLenError('increment', n) 327 elif isinstance(inc, int): 328 inc = [inc]*n 329 330 # Set up the default bounds. 331 if not lower: 332 # Init. 333 lower = [] 334 upper = [] 335 336 # Loop over the parameters. 337 for i in range(n): 338 # Relaxation rate (from 0 to 20 s^-1). 339 if spin.params[i] == 'rx': 340 lower.append(0.0) 341 upper.append(20.0) 342 343 # Intensity 344 elif search('^i', spin.params[i]): 345 # Find the ID of the first time point. 346 min_time = min(cdp.relax_times.values()) 347 for key in list(cdp.relax_times.keys()): 348 if cdp.relax_times[key] == min_time: 349 id = key 350 break 351 352 # Defaults. 353 lower.append(0.0) 354 upper.append(average(spin.intensities[id])) 355 356 # Diagonal scaling of minimisation options. 357 lower_new = [] 358 upper_new = [] 359 for i in range(n): 360 lower_new.append(lower[i] / scaling_matrix[i, i]) 361 upper_new.append(upper[i] / scaling_matrix[i, i]) 362 363 # Return the minimisation options. 364 return inc, lower_new, upper_new
365 366
367 - def _linear_constraints(self, spin=None, scaling_matrix=None):
368 """Set up the relaxation curve fitting linear constraint matrices A and b. 369 370 Standard notation 371 ================= 372 373 The relaxation rate constraints are:: 374 375 Rx >= 0 376 377 The intensity constraints are:: 378 379 I0 >= 0 380 Iinf >= 0 381 382 383 Matrix notation 384 =============== 385 386 In the notation A.x >= b, where A is an matrix of coefficients, x is an array of parameter 387 values, and b is a vector of scalars, these inequality constraints are:: 388 389 | 1 0 0 | | Rx | | 0 | 390 | | | | | | 391 | 1 0 0 | . | I0 | >= | 0 | 392 | | | | | | 393 | 1 0 0 | | Iinf | | 0 | 394 395 396 @keyword spin: The spin data container. 397 @type spin: SpinContainer instance 398 @keyword scaling_matrix: The diagonal, square scaling matrix. 399 @type scaling_matrix: numpy diagonal matrix 400 """ 401 402 # Initialisation (0..j..m). 403 A = [] 404 b = [] 405 n = len(spin.params) 406 zero_array = zeros(n, float64) 407 i = 0 408 j = 0 409 410 # Loop over the parameters. 411 for k in range(len(spin.params)): 412 # Relaxation rate. 413 if spin.params[k] == 'rx': 414 # Rx >= 0. 415 A.append(zero_array * 0.0) 416 A[j][i] = 1.0 417 b.append(0.0) 418 j = j + 1 419 420 # Intensity parameter. 421 elif search('^i', spin.params[k]): 422 # I0, Iinf >= 0. 423 A.append(zero_array * 0.0) 424 A[j][i] = 1.0 425 b.append(0.0) 426 j = j + 1 427 428 # Increment i. 429 i = i + 1 430 431 # Convert to numpy data structures. 432 A = array(A, float64) 433 b = array(b, float64) 434 435 return A, b
436 437
438 - def _model_setup(self, model, params):
439 """Update various model specific data structures. 440 441 @param model: The exponential curve type. 442 @type model: str 443 @param params: A list consisting of the model parameters. 444 @type params: list of str 445 """ 446 447 # Set the model. 448 cdp.curve_type = model 449 450 # Loop over the sequence. 451 for spin in spin_loop(): 452 # Skip deselected spins. 453 if not spin.select: 454 continue 455 456 # Initialise the data structures (if needed). 457 self.data_init(spin) 458 459 # The model and parameter names. 460 spin.model = model 461 spin.params = params
462 463
464 - def _relax_time(self, time=0.0, spectrum_id=None):
465 """Set the relaxation time period associated with a given spectrum. 466 467 @keyword time: The time, in seconds, of the relaxation period. 468 @type time: float 469 @keyword spectrum_id: The spectrum identification string. 470 @type spectrum_id: str 471 """ 472 473 # Test if the spectrum id exists. 474 if spectrum_id not in cdp.spectrum_ids: 475 raise RelaxError("The peak heights corresponding to spectrum id '%s' have not been loaded." % spectrum_id) 476 477 # Initialise the global relaxation time data structure if needed. 478 if not hasattr(cdp, 'relax_times'): 479 cdp.relax_times = {} 480 481 # Add the time at the correct position. 482 cdp.relax_times[spectrum_id] = time
483 484
485 - def _select_model(self, model='exp'):
486 """Function for selecting the model of the exponential curve. 487 488 @keyword model: The exponential curve type. Can be one of 'exp' or 'inv'. 489 @type model: str 490 """ 491 492 # Test if the current pipe exists. 493 pipes.test() 494 495 # Test if the pipe type is set to 'relax_fit'. 496 function_type = cdp.pipe_type 497 if function_type != 'relax_fit': 498 raise RelaxFuncSetupError(specific_setup.get_string(function_type)) 499 500 # Test if sequence data is loaded. 501 if not exists_mol_res_spin_data(): 502 raise RelaxNoSequenceError 503 504 # Two parameter exponential fit. 505 if model == 'exp': 506 print("Two parameter exponential fit.") 507 params = ['rx', 'i0'] 508 509 # Three parameter inversion recovery fit. 510 elif model == 'inv': 511 print("Three parameter inversion recovery fit.") 512 params = ['rx', 'i0', 'iinf'] 513 514 # Invalid model. 515 else: 516 raise RelaxError("The model '" + model + "' is invalid.") 517 518 # Set up the model. 519 self._model_setup(model, params)
520 521
522 - def create_mc_data(self, data_id=None):
523 """Create the Monte Carlo peak intensity data. 524 525 @keyword data_id: The spin identification string, as yielded by the base_data_loop() generator method. 526 @type data_id: str 527 @return: The Monte Carlo simulation data. 528 @rtype: list of floats 529 """ 530 531 # Initialise the MC data data structure. 532 mc_data = {} 533 534 # Get the spin container. 535 spin = return_spin(data_id) 536 537 # Skip deselected spins. 538 if not spin.select: 539 return 540 541 # Skip spins which have no data. 542 if not hasattr(spin, 'intensities'): 543 return 544 545 # Test if the model is set. 546 if not hasattr(spin, 'model') or not spin.model: 547 raise RelaxNoModelError 548 549 # Loop over the spectral time points. 550 for id in list(cdp.relax_times.keys()): 551 # Back calculate the value. 552 value = self._back_calc(spin=spin, relax_time_id=id) 553 554 # Append the value. 555 mc_data[id] = value 556 557 # Return the MC data. 558 return mc_data
559 560
561 - def data_init(self, data_cont, sim=False):
562 """Initialise the spin specific data structures. 563 564 @param data_cont: The spin container. 565 @type data_cont: SpinContainer instance 566 @keyword sim: The Monte Carlo simulation flag, which if true will initialise the simulation data structure. 567 @type sim: bool 568 """ 569 570 # Loop over the data structure names. 571 for name in self.data_names(set='params'): 572 # Data structures which are initially empty arrays. 573 list_data = [ 'params' ] 574 if name in list_data: 575 init_data = [] 576 577 # Otherwise initialise the data structure to None. 578 else: 579 init_data = None 580 581 # If the name is not in 'data_cont', add it. 582 if not hasattr(data_cont, name): 583 setattr(data_cont, name, init_data)
584 585 586 default_value_doc = Desc_container("Relaxation curve fitting default values") 587 default_value_doc.add_paragraph("These values are completely arbitrary as peak heights (or volumes) are extremely variable and the Rx value is a compensation for both the R1 and R2 values.") 588 _table = uf_tables.add_table(label="table: curve-fit default values", caption="Relaxation curve fitting default values.") 589 _table.add_headings(["Data type", "Object name", "Value"]) 590 _table.add_row(["Relaxation rate", "'rx'", "8.0"]) 591 _table.add_row(["Initial intensity", "'i0'", "10000.0"]) 592 _table.add_row(["Intensity at infinity", "'iinf'", "0.0"]) 593 default_value_doc.add_table(_table.label) 594 595
596 - def grid_search(self, lower=None, upper=None, inc=None, constraints=True, verbosity=1, sim_index=None):
597 """The exponential curve fitting grid search method. 598 599 @keyword lower: The lower bounds of the grid search which must be equal to the number of parameters in the model. 600 @type lower: array of numbers 601 @keyword upper: The upper bounds of the grid search which must be equal to the number of parameters in the model. 602 @type upper: array of numbers 603 @keyword inc: The increments for each dimension of the space for the grid search. The number of elements in the array must equal to the number of parameters in the model. 604 @type inc: array of int 605 @keyword constraints: If True, constraints are applied during the grid search (eliminating parts of the grid). If False, no constraints are used. 606 @type constraints: bool 607 @keyword verbosity: A flag specifying the amount of information to print. The higher the value, the greater the verbosity. 608 @type verbosity: int 609 @keyword sim_index: The index of the simulation to apply the grid search to. If None, the normal model is optimised. 610 @type sim_index: int 611 """ 612 613 # Minimisation. 614 self.minimise(min_algor='grid', lower=lower, upper=upper, inc=inc, constraints=constraints, verbosity=verbosity, sim_index=sim_index)
615 616
617 - def minimise(self, min_algor=None, min_options=None, func_tol=None, grad_tol=None, max_iterations=None, constraints=False, scaling=True, verbosity=0, sim_index=None, lower=None, upper=None, inc=None):
618 """Relaxation curve fitting minimisation method. 619 620 @keyword min_algor: The minimisation algorithm to use. 621 @type min_algor: str 622 @keyword min_options: An array of options to be used by the minimisation algorithm. 623 @type min_options: array of str 624 @keyword func_tol: The function tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 625 @type func_tol: None or float 626 @keyword grad_tol: The gradient tolerance which, when reached, terminates optimisation. Setting this to None turns of the check. 627 @type grad_tol: None or float 628 @keyword max_iterations: The maximum number of iterations for the algorithm. 629 @type max_iterations: int 630 @keyword constraints: If True, constraints are used during optimisation. 631 @type constraints: bool 632 @keyword scaling: If True, diagonal scaling is enabled during optimisation to allow the problem to be better conditioned. 633 @type scaling: bool 634 @keyword verbosity: The amount of information to print. The higher the value, the greater the verbosity. 635 @type verbosity: int 636 @keyword sim_index: The index of the simulation to optimise. This should be None if normal optimisation is desired. 637 @type sim_index: None or int 638 @keyword lower: The lower bounds of the grid search which must be equal to the number of parameters in the model. This optional argument is only used when doing a grid search. 639 @type lower: array of numbers 640 @keyword upper: The upper bounds of the grid search which must be equal to the number of parameters in the model. This optional argument is only used when doing a grid search. 641 @type upper: array of numbers 642 @keyword inc: The increments for each dimension of the space for the grid search. The number of elements in the array must equal to the number of parameters in the model. This argument is only used when doing a grid search. 643 @type inc: array of int 644 """ 645 646 # Test if sequence data is loaded. 647 if not exists_mol_res_spin_data(): 648 raise RelaxNoSequenceError 649 650 # Loop over the sequence. 651 for spin, mol_name, res_num, res_name in spin_loop(full_info=True): 652 # Skip deselected spins. 653 if not spin.select: 654 continue 655 656 # Skip spins which have no data. 657 if not hasattr(spin, 'intensities'): 658 continue 659 660 # Create the initial parameter vector. 661 param_vector = self._assemble_param_vector(spin=spin) 662 663 # Diagonal scaling. 664 scaling_matrix = self._assemble_scaling_matrix(spin=spin, scaling=scaling) 665 if len(scaling_matrix): 666 param_vector = dot(inv(scaling_matrix), param_vector) 667 668 # Get the grid search minimisation options. 669 if match('^[Gg]rid', min_algor): 670 inc, lower_new, upper_new = self._grid_search_setup(spin=spin, param_vector=param_vector, lower=lower, upper=upper, inc=inc, scaling_matrix=scaling_matrix) 671 672 # Linear constraints. 673 if constraints: 674 A, b = self._linear_constraints(spin=spin, scaling_matrix=scaling_matrix) 675 else: 676 A, b = None, None 677 678 # Print out. 679 if verbosity >= 1: 680 # Get the spin id string. 681 spin_id = generate_spin_id_unique(mol_name=mol_name, res_num=res_num, res_name=res_name, spin_num=spin.num, spin_name=spin.name) 682 683 # Individual spin printout. 684 if verbosity >= 2: 685 print("\n\n") 686 687 string = "Fitting to spin " + repr(spin_id) 688 print("\n\n" + string) 689 print(len(string) * '~') 690 691 692 # Initialise the function to minimise. 693 ###################################### 694 695 # The keys. 696 keys = list(spin.intensities.keys()) 697 698 # The peak intensities and times. 699 values = [] 700 errors = [] 701 times = [] 702 for key in keys: 703 # The values. 704 if sim_index == None: 705 values.append(spin.intensities[key]) 706 else: 707 values.append(spin.sim_intensities[sim_index][key]) 708 709 # The errors. 710 errors.append(spin.intensity_err[key]) 711 712 # The relaxation times. 713 times.append(cdp.relax_times[key]) 714 715 # The scaling matrix in a diagonalised list form. 716 scaling_list = [] 717 for i in range(len(scaling_matrix)): 718 scaling_list.append(scaling_matrix[i, i]) 719 720 setup(num_params=len(spin.params), num_times=len(values), values=values, sd=errors, relax_times=times, scaling_matrix=scaling_list) 721 722 723 # Setup the minimisation algorithm when constraints are present. 724 ################################################################ 725 726 if constraints and not match('^[Gg]rid', min_algor): 727 algor = min_options[0] 728 else: 729 algor = min_algor 730 731 732 # Levenberg-Marquardt minimisation. 733 ################################### 734 735 if match('[Ll][Mm]$', algor) or match('[Ll]evenburg-[Mm]arquardt$', algor): 736 # Reconstruct the error data structure. 737 lm_error = zeros(len(spin.relax_times), float64) 738 index = 0 739 for k in range(len(spin.relax_times)): 740 lm_error[index:index+len(relax_error[k])] = relax_error[k] 741 index = index + len(relax_error[k]) 742 743 min_options = min_options + (self.relax_fit.lm_dri, lm_error) 744 745 746 # Minimisation. 747 ############### 748 749 # Grid search. 750 if search('^[Gg]rid', min_algor): 751 results = grid(func=self._func, args=(), num_incs=inc, lower=lower_new, upper=upper_new, A=A, b=b, verbosity=verbosity) 752 753 # Unpack the results. 754 param_vector, chi2, iter_count, warning = results 755 f_count = iter_count 756 g_count = 0.0 757 h_count = 0.0 758 759 # Minimisation. 760 else: 761 results = generic_minimise(func=self._func, dfunc=self._dfunc, d2func=self._d2func, args=(), x0=param_vector, min_algor=min_algor, min_options=min_options, func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, A=A, b=b, full_output=True, print_flag=verbosity) 762 763 # Unpack the results. 764 if results == None: 765 return 766 param_vector, chi2, iter_count, f_count, g_count, h_count, warning = results 767 768 # Scaling. 769 if scaling: 770 param_vector = dot(scaling_matrix, param_vector) 771 772 # Disassemble the parameter vector. 773 self._disassemble_param_vector(param_vector=param_vector, spin=spin, sim_index=sim_index) 774 775 # Monte Carlo minimisation statistics. 776 if sim_index != None: 777 # Chi-squared statistic. 778 spin.chi2_sim[sim_index] = chi2 779 780 # Iterations. 781 spin.iter_sim[sim_index] = iter_count 782 783 # Function evaluations. 784 spin.f_count_sim[sim_index] = f_count 785 786 # Gradient evaluations. 787 spin.g_count_sim[sim_index] = g_count 788 789 # Hessian evaluations. 790 spin.h_count_sim[sim_index] = h_count 791 792 # Warning. 793 spin.warning_sim[sim_index] = warning 794 795 796 # Normal statistics. 797 else: 798 # Chi-squared statistic. 799 spin.chi2 = chi2 800 801 # Iterations. 802 spin.iter = iter_count 803 804 # Function evaluations. 805 spin.f_count = f_count 806 807 # Gradient evaluations. 808 spin.g_count = g_count 809 810 # Hessian evaluations. 811 spin.h_count = h_count 812 813 # Warning. 814 spin.warning = warning
815 816
817 - def overfit_deselect(self, data_check=True, verbose=True):
818 """Deselect spins which have insufficient data to support minimisation. 819 820 @keyword data_check: A flag to signal if the presence of base data is to be checked for. 821 @type data_check: bool 822 @keyword verbose: A flag which if True will allow printouts. 823 @type verbose: bool 824 """ 825 826 # Print out. 827 if verbose: 828 print("\nOver-fit spin deselection:") 829 830 # Test the sequence data exists. 831 if not exists_mol_res_spin_data(): 832 raise RelaxNoSequenceError 833 834 # Loop over spin data. 835 deselect_flag = False 836 for spin, spin_id in spin_loop(return_id=True): 837 # Skip deselected spins. 838 if not spin.select: 839 continue 840 841 # Check if data exists. 842 if not hasattr(spin, 'intensities'): 843 warn(RelaxDeselectWarning(spin_id, 'missing intensity data')) 844 spin.select = False 845 deselect_flag = True 846 continue 847 848 # Require 3 or more data points. 849 elif len(spin.intensities) < 3: 850 warn(RelaxDeselectWarning(spin_id, 'insufficient data, 3 or more data points are required')) 851 spin.select = False 852 deselect_flag = True 853 continue 854 855 # Check that the number of relaxation times is complete. 856 if len(spin.intensities) != len(cdp.relax_times): 857 raise RelaxError("The %s peak intensity points of the spin '%s' does not match the expected number of %s (the IDs %s do not match %s)." % (len(spin.intensities), spin_id, len(cdp.relax_times), list(spin.intensities.keys()), list(cdp.relax_times.keys()))) 858 859 # Final printout. 860 if verbose and not deselect_flag: 861 print("No spins have been deselected.")
862 863
864 - def return_data(self, data_id=None):
865 """Function for returning the peak intensity data structure. 866 867 @keyword data_id: The spin identification string, as yielded by the base_data_loop() generator method. 868 @type data_id: str 869 @return: The peak intensity data structure. 870 @rtype: list of float 871 """ 872 873 # Get the spin container. 874 spin = return_spin(data_id) 875 876 # Return the peak intensities. 877 return spin.intensities
878 879 880 return_data_name_doc = Desc_container("Relaxation curve fitting data type string matching patterns") 881 _table = uf_tables.add_table(label="table: curve-fit data type patterns", caption="Relaxation curve fitting data type string matching patterns.") 882 _table.add_headings(["Data type", "Object name"]) 883 _table.add_row(["Relaxation rate", "'rx'"]) 884 _table.add_row(["Peak intensities (series)", "'intensities'"]) 885 _table.add_row(["Initial intensity", "'i0'"]) 886 _table.add_row(["Intensity at infinity", "'iinf'"]) 887 _table.add_row(["Relaxation period times (series)", "'relax_times'"]) 888 return_data_name_doc.add_table(_table.label) 889 890
891 - def return_error(self, data_id):
892 """Return the standard deviation data structure. 893 894 @param data_id: The spin identification string, as yielded by the base_data_loop() generator 895 method. 896 @type data_id: str 897 @return: The standard deviation data structure. 898 @rtype: list of float 899 """ 900 901 # Get the spin container. 902 spin = return_spin(data_id) 903 904 # Return the error list. 905 return spin.intensity_err
906 907
908 - def return_units(self, param):
909 """Dummy method which returns None as the stats have no units. 910 911 @param param: The name of the parameter to return the units string for. 912 @type param: str 913 @return: Nothing. 914 @rtype: None 915 """ 916 917 # Unitless. 918 return None
919 920 921 set_doc = Desc_container("Relaxation curve fitting set details") 922 set_doc.add_paragraph("Only three parameters can be set, the relaxation rate (Rx), the initial intensity (I0), and the intensity at infinity (Iinf). Setting the parameter Iinf has no effect if the chosen model is that of the exponential curve which decays to zero.") 923 924
925 - def sim_pack_data(self, data_id, sim_data):
926 """Pack the Monte Carlo simulation data. 927 928 @param data_id: The spin identification string, as yielded by the base_data_loop() generator method. 929 @type data_id: str 930 @param sim_data: The Monte Carlo simulation data. 931 @type sim_data: list of float 932 """ 933 934 # Get the spin container. 935 spin = return_spin(data_id) 936 937 # Create the data structure. 938 spin.sim_intensities = sim_data
939