mailr6869 - /branches/rdc_analysis/specific_fns/n_state_model.py


Others Months | Index by Date | Thread Index
>>   [Date Prev] [Date Next] [Thread Prev] [Thread Next]

Header


Content

Posted by edward on July 09, 2008 - 13:29:
Author: bugman
Date: Wed Jul  9 13:29:21 2008
New Revision: 6869

URL: http://svn.gna.org/viewcvs/relax?rev=6869&view=rev
Log:
Spun out some of the minimisation code into the minimise_setup_tensors() 
method.


Modified:
    branches/rdc_analysis/specific_fns/n_state_model.py

Modified: branches/rdc_analysis/specific_fns/n_state_model.py
URL: 
http://svn.gna.org/viewcvs/relax/branches/rdc_analysis/specific_fns/n_state_model.py?rev=6869&r1=6868&r2=6869&view=diff
==============================================================================
--- branches/rdc_analysis/specific_fns/n_state_model.py (original)
+++ branches/rdc_analysis/specific_fns/n_state_model.py Wed Jul  9 13:29:21 
2008
@@ -32,6 +32,7 @@
 import generic_fns
 import generic_fns.structure.geometric
 import generic_fns.structure.mass
+from generic_fns.mol_res_spin import spin_loop
 from generic_fns.structure.internal import Internal
 from maths_fns.n_state_model import N_state_opt
 from maths_fns.rotation_matrix import R_2vect, R_euler_zyz
@@ -559,6 +560,94 @@
         if constraints:
             A, b = self.linear_constraints()
 
+        # Determine if alignment tensors or RDCs are to be used.
+        tensor_flag = False
+        rdc_flag = False
+        if hasattr(cdp, 'align_tensors'):
+            tensor_flag = True
+        for spin in spin_loop():
+            if hasattr(spin, 'rdc'):
+                rdc_flag = True
+                break
+
+        # Ok, have no idea what to do, so complain.
+        if tensor_flag and rdc_flag:
+            raise RelaxError, "Both RDCs and alignment tensors are present.  
Cannot determine which will be used for optimisation." 
+
+        # Set up minimisation using alignment tensors.
+        if tensor_flag:
+            results = self.minimise_setup_tensors()
+
+        # Minimisation.
+        if constraints:
+            results = generic_minimise(func=model.func, args=(), 
x0=param_vector, min_algor=min_algor, min_options=min_options, 
func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, A=A, b=b, 
full_output=1, print_flag=verbosity)
+        else:
+            results = generic_minimise(func=model.func, args=(), 
x0=param_vector, min_algor=min_algor, min_options=min_options, 
func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, full_output=1, 
print_flag=verbosity)
+        if results == None:
+            return
+
+        # Disassemble the results.
+        param_vector, func, iter_count, f_count, g_count, h_count, warning = 
results
+
+        # Catch infinite chi-squared values.
+        if isInf(func):
+            raise RelaxInfError, 'chi-squared'
+
+        # Catch chi-squared values of NaN.
+        if isNaN(func):
+            raise RelaxNaNError, 'chi-squared'
+
+        # Disassemble the parameter vector.
+        self.disassemble_param_vector(param_vector=param_vector, 
sim_index=sim_index)
+
+        # Monte Carlo minimisation statistics.
+        if sim_index != None:
+            # Chi-squared statistic.
+            cdp.chi2_sim[sim_index] = func
+
+            # Iterations.
+            cdp.iter_sim[sim_index] = iter_count
+
+            # Function evaluations.
+            cdp.f_count_sim[sim_index] = f_count
+
+            # Gradient evaluations.
+            cdp.g_count_sim[sim_index] = g_count
+
+            # Hessian evaluations.
+            cdp.h_count_sim[sim_index] = h_count
+
+            # Warning.
+            cdp.warning_sim[sim_index] = warning
+
+        # Normal statistics.
+        else:
+            # Chi-squared statistic.
+            cdp.chi2 = func
+
+            # Iterations.
+            cdp.iter = iter_count
+
+            # Function evaluations.
+            cdp.f_count = f_count
+
+            # Gradient evaluations.
+            cdp.g_count = g_count
+
+            # Hessian evaluations.
+            cdp.h_count = h_count
+
+            # Warning.
+            cdp.warning = warning
+
+
+    def minimise_setup_tensors(self):
+        """Set up minimisation for the N-state model using alignment tensors.
+
+        @return:        The initialised N_state_opt class for minimisation.
+        @rteyp:         N_state_opt instance
+        """
+
         # Initialise.
         full_tensors = []
         red_tensor_elem = []
@@ -607,65 +696,8 @@
         # Set up the class instance containing the target function.
         model = N_state_opt(N=cdp.N, init_params=param_vector, 
full_tensors=full_tensors, red_data=red_tensor_elem, 
red_errors=red_tensor_err, full_in_ref_frame=full_in_ref_frame)
 
-        # Minimisation.
-        if constraints:
-            results = generic_minimise(func=model.func, args=(), 
x0=param_vector, min_algor=min_algor, min_options=min_options, 
func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, A=A, b=b, 
full_output=1, print_flag=verbosity)
-        else:
-            results = generic_minimise(func=model.func, args=(), 
x0=param_vector, min_algor=min_algor, min_options=min_options, 
func_tol=func_tol, grad_tol=grad_tol, maxiter=max_iterations, full_output=1, 
print_flag=verbosity)
-        if results == None:
-            return
-        param_vector, func, iter_count, f_count, g_count, h_count, warning = 
results
-
-        # Catch infinite chi-squared values.
-        if isInf(func):
-            raise RelaxInfError, 'chi-squared'
-
-        # Catch chi-squared values of NaN.
-        if isNaN(func):
-            raise RelaxNaNError, 'chi-squared'
-
-        # Disassemble the parameter vector.
-        self.disassemble_param_vector(param_vector=param_vector, 
sim_index=sim_index)
-
-        # Monte Carlo minimisation statistics.
-        if sim_index != None:
-            # Chi-squared statistic.
-            cdp.chi2_sim[sim_index] = func
-
-            # Iterations.
-            cdp.iter_sim[sim_index] = iter_count
-
-            # Function evaluations.
-            cdp.f_count_sim[sim_index] = f_count
-
-            # Gradient evaluations.
-            cdp.g_count_sim[sim_index] = g_count
-
-            # Hessian evaluations.
-            cdp.h_count_sim[sim_index] = h_count
-
-            # Warning.
-            cdp.warning_sim[sim_index] = warning
-
-        # Normal statistics.
-        else:
-            # Chi-squared statistic.
-            cdp.chi2 = func
-
-            # Iterations.
-            cdp.iter = iter_count
-
-            # Function evaluations.
-            cdp.f_count = f_count
-
-            # Gradient evaluations.
-            cdp.g_count = g_count
-
-            # Hessian evaluations.
-            cdp.h_count = h_count
-
-            # Warning.
-            cdp.warning = warning
+        # Return the instantiated class.
+        return model
 
 
     def number_of_states(self, N=None):




Related Messages


Powered by MHonArc, Updated Wed Jul 09 13:40:15 2008