mailRe: r25227 - /trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py


Others Months | Index by Date | Thread Index
>>   [Date Prev] [Date Next] [Thread Prev] [Thread Next]

Header


Content

Posted by Edward d'Auvergne on August 25, 2014 - 16:31:
Hi Troels,

It's clear that the constraints are adding a lot of time.  And for
exponential curve-fitting, this is not needed at all.  I think we
should turn this off in the dispersion auto-analysis, for just the
'R2eff' model optimisation.  What do you think?

Regards,

Edward



On 22 August 2014 19:17,  <tlinnet@xxxxxxxxxxxxx> wrote:
Author: tlinnet
Date: Fri Aug 22 19:17:19 2014
New Revision: 25227

URL: http://svn.gna.org/viewcvs/relax?rev=25227&view=rev
Log:
Further extended the profiling script for curve fitting.

Now profiling is in place for the implemented C code method in relax.

A similar code should now be devised for numpy array for comparing.

But this profiling shows that when contraints=True, is slowing down this 
procedure by a factor 10 X !

Modified:
    
trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py

Modified: 
trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py
URL: 
http://svn.gna.org/viewcvs/relax/trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py?rev=25227&r1=25226&r2=25227&view=diff
==============================================================================
--- 
trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py 
(original)
+++ 
trunk/test_suite/shared_data/curve_fitting/profiling/profiling_relax_fit.py 
Fri Aug 22 19:17:19 2014
@@ -24,10 +24,11 @@
 # Python module imports.
 import cProfile
 from os import getcwd, path, sep
-from numpy import array, arange, int32, float64, pi, load
+from numpy import array, arange, int32, float64, pi, load, sqrt, sum
 import pstats
 import sys
 import tempfile
+from minfx.generic import generic_minimise

 # Python 3 support.
 try:
@@ -52,46 +53,171 @@

 # relax module imports.
 from status import Status; status = Status()
+from target_functions.relax_fit import setup, func, dfunc, d2func, 
back_calc_I


 # Alter setup.
 def main():
-    param_key_list = [
-        'r1rho_799.77739910_118.078_431.000',
-        'r1rho_799.77739910_118.078_651.200',
-        'r1rho_799.77739910_118.078_800.500',
-        'r1rho_799.77739910_118.078_984.000',
-        'r1rho_799.77739910_118.078_1341.110',
-        'r1rho_799.77739910_118.078_1648.500',
-        'r1rho_799.77739910_124.247_1341.110',
-        'r1rho_799.77739910_130.416_800.500',
-        'r1rho_799.77739910_130.416_1341.110',
-        'r1rho_799.77739910_130.416_1648.500',
-        'r1rho_799.77739910_142.754_800.500',
-        'r1rho_799.77739910_142.754_1341.110',
-        'r1rho_799.77739910_179.768_1341.110',
-        'r1rho_799.77739910_241.459_1341.110'
-    ]
-
-    # Define maximum dimensions.
-    NE, NS, NM, NO, ND, NT = 1, 1, 1, 6, 6, 5
-
-    # Define path to data
-    data_path = status.install_path + 
sep+'test_suite'+sep+'shared_data'+sep+'curve_fitting'+sep+'profiling'+sep
-
-    values_arr = load(data_path + "values_arr.npy")
-    errors_arr = load(data_path + "errors_arr.npy")
-    times_arr = load(data_path + "times_arr.npy")
-    struct_arr = load(data_path + "struct_arr.npy")
+    v_cT_chi2_list = array(verify(constraints=True))
+    v_cF_chi2_list = array(verify(constraints=False))
+
+    sum_root_squared = sum( sqrt( (v_cT_chi2_list - v_cF_chi2_list)**2 ) )
+    print("The sum of the root squared differences are: %.3e" % 
sum_root_squared)
+
+    lines_report = 15
+
+    if True:
+        #################
+        #  Verify, with constraints
+        constraints = True
+
+        # Print statistics.
+        verbose = True
+
+        # Calc for verify with constraints.
+        v_cT_filename = tempfile.NamedTemporaryFile(delete=False).name
+        # Profile for a single spin.
+        cProfile.run('verify(constraints=%s)'%constraints, v_cT_filename)
+
+        # Read all stats files into a single object
+        v_cT_stats = pstats.Stats(v_cT_filename)
+
+        # Clean up filenames for the report
+        v_cT_stats.strip_dirs()
+
+        # Sort the statistics by the cumulative time spent in the 
function. cumulative, time, calls
+        v_cT_stats.sort_stats('cumulative')
+
+        # Print report for single.
+        if verbose:
+            v_cT_stats.print_stats(lines_report)
+
+    if True:
+        #################
+        #  Verify, without constraints
+        constraints = False
+
+        # Print statistics.
+        verbose = True
+
+        # Calc for verify with constraints.
+        v_cF_filename = tempfile.NamedTemporaryFile(delete=False).name
+        # Profile for a single spin.
+        cProfile.run('verify(constraints=%s)'%constraints, v_cF_filename)
+
+        # Read all stats files into a single object
+        v_cF_stats = pstats.Stats(v_cF_filename)
+
+        # Clean up filenames for the report
+        v_cF_stats.strip_dirs()
+
+        # Sort the statistics by the cumulative time spent in the 
function. cumulative, time, calls
+        v_cF_stats.sort_stats('cumulative')
+
+        # Print report for single.
+        if verbose:
+            v_cF_stats.print_stats(lines_report)
+
+
+class Profile:
+    """
+    Class Profile inherits the Dispersion container class object.
+    """
+
+    def __init__(self):
+
+        # Define parameters
+        self.param_key_list = [
+            'r1rho_799.77739910_118.078_431.000',
+            'r1rho_799.77739910_118.078_651.200',
+            'r1rho_799.77739910_118.078_800.500',
+            'r1rho_799.77739910_118.078_984.000',
+            'r1rho_799.77739910_118.078_1341.110',
+            'r1rho_799.77739910_118.078_1648.500',
+            'r1rho_799.77739910_124.247_1341.110',
+            'r1rho_799.77739910_130.416_800.500',
+            'r1rho_799.77739910_130.416_1341.110',
+            'r1rho_799.77739910_130.416_1648.500',
+            'r1rho_799.77739910_142.754_800.500',
+            'r1rho_799.77739910_142.754_1341.110',
+            'r1rho_799.77739910_179.768_1341.110',
+            'r1rho_799.77739910_241.459_1341.110'
+        ]
+
+        # Define maximum dimensions.
+        self.NE, self.NS, self.NM, self.NO, self.ND, self.NT = 1, 1, 1, 6, 
6, 5
+
+        # Define path to data
+        self.data_path = status.install_path + 
sep+'test_suite'+sep+'shared_data'+sep+'curve_fitting'+sep+'profiling'+sep
+
+        self.values_arr = load(self.data_path + "values_arr.npy")
+        self.errors_arr = load(self.data_path + "errors_arr.npy")
+        self.times_arr = load(self.data_path + "times_arr.npy")
+        self.struct_arr = load(self.data_path + "struct_arr.npy")
+
+        #param_vector = array([ 0.,  0.])
+        self.param_vector = array([  8.800000000000001e+00,   
2.000000000800000e+05])
+        self.scaling_list = [1.0, 1.0]
+        self.func_tol = 1e-25
+        self.grad_tol = None
+        self.max_iterations = 10000000
+        self.verbosity = 0
+
+    def set_options(self, constraints=None):
+        # Define which constraints should be used.
+        self.constraints = constraints
+
+        if self.constraints:
+            self.min_algor = 'Log barrier'
+            self.min_options = ('simplex',)
+            self.A = array([ [ 1.,  0.],
+                        [-1.,  0.],
+                        [ 0.,  1.]] )
+            self.b = array([   0., -200.,    0.])
+
+        else:
+            self.min_algor = 'simplex'
+            self.min_options = ()
+            self.A = None
+            self.b = None
+
+
+def verify(constraints=None):
+    # Instantiate class.
+    C = Profile()
+
+    # Set the minimising options.
+    C.set_options(constraints=constraints)
+
+    chi2_list = []

     # Print arrays.
-    for ei in range(NE):
-        for si in range(NS):
-            for mi in range(NM):
-                for oi in range(NO):
-                    for di in range(ND):
-                        print(ei, si, mi, oi, di, values_arr[ei, si, mi, 
oi, di], struct_arr[ei, si, mi, oi, di])
-
+    for ei in range(C.NE):
+        for si in range(C.NS):
+            for mi in range(C.NM):
+                for oi in range(C.NO):
+                    for di in range(C.ND):
+                        # Extract values
+                        values = C.values_arr[ei, si, mi, oi, di]
+                        errors = C.errors_arr[ei, si, mi, oi, di]
+                        times = C.times_arr[ei, si, mi, oi, di]
+                        struct = C.struct_arr[ei, si, mi, oi, di]
+                        num_times = int( sum(struct) )
+
+                        if num_times == 0:
+                            continue
+
+                        # Initialise the function to minimise.
+                        setup(num_params=len(C.param_vector), 
num_times=num_times, values=values, sd=errors, relax_times=times, 
scaling_matrix=C.scaling_list)
+
+                        results = generic_minimise(func=func, dfunc=dfunc, 
d2func=d2func, args=(), x0=C.param_vector, min_algor=C.min_algor, 
min_options=C.min_options, func_tol=C.func_tol, grad_tol=C.grad_tol, 
maxiter=C.max_iterations, A=C.A, b=C.b, full_output=True, 
print_flag=C.verbosity)
+
+                        # Unpack
+                        param_vector, chi2, iter_count, f_count, g_count, 
h_count, warning = results
+
+                        chi2_list.append(chi2)
+
+    return chi2_list

 # Execute main function.
 if __name__ == "__main__":


_______________________________________________
relax (http://www.nmr-relax.com)

This is the relax-commits mailing list
relax-commits@xxxxxxx

To unsubscribe from this list, get a password
reminder, or change your subscription options,
visit the list information page at
https://mail.gna.org/listinfo/relax-commits



Related Messages


Powered by MHonArc, Updated Mon Aug 25 17:20:17 2014