Package minimise :: Module minimise
[hide private]
[frames] | no frames]

Source Code for Module minimise.minimise

  1  import sys 
  2  from Numeric import copy 
  3  from re import match 
  4   
  5  try: 
  6          from scipy.optimize import fmin, fmin_bfgs, fmin_ncg 
  7          simplex_scipy = fmin 
  8          bfgs_scipy = fmin_bfgs 
  9          ncg_scipy = fmin_ncg 
 10          noscipy_flag = 0 
 11  except ImportError: 
 12          print "Scipy is not installed, cannot use simplex, BFGS, or Newton conjugate gradient minimisation from the scipy package." 
 13          noscipy_flag = 1 
 14   
 15  # Grid search. 
 16  from grid import grid 
 17   
 18  # Line search algorithms. 
 19  from coordinate_descent import coordinate_descent 
 20  from steepest_descent import steepest_descent 
 21  from bfgs import bfgs 
 22  from newton import newton 
 23   
 24  # Trust region algorithms. 
 25  from levenberg_marquardt import levenberg_marquardt 
 26  from cauchy_point import cauchy_point 
 27   
 28  # Other algorithms. 
 29  from simplex import simplex 
 30   
 31   
32 -def minimise(func, dfunc=None, d2func=None, args=(), x0=None, minimiser=None, func_tol=1e-5, maxiter=1000, full_output=0, print_flag=0):
33 """Generic code for iterative minimisers. 34 35 36 Function options 37 ~~~~~~~~~~~~~~~~ 38 39 func - The function to minimise. 40 dfunc - The function which returns the gradient vector. 41 d2func - The function which returns the hessian matrix or approximation. 42 43 f_args - The tuple of arguments to supply to the function func. 44 df_args - The tuple of arguments to supply to the function dfunc. 45 d2f_args - The tuple of arguments to supply to the function d2func. 46 47 xk - The parameter vector which on input is the initial values, x0. 48 fk - The function value which on input corresponds to x0. 49 dfk - The gradient vector which on input corresponds to x0. 50 d2fk - The hessian matrix or approximation which on input corresponds to x0. 51 52 xk_new - The parameter vector for the next iteration which on input can be anything. 53 fk_new - The function value for the next iteration which on input can be anything. 54 dfk_new - The gradient vector for the next iteration which on input can be anything. 55 d2fk_new - The hessian matrix for the next iteration which on input can be anything. 56 57 func_tol - The cutoff value used to terminate minimisation by comparison to the difference in function values between iterations. 58 maxiter - The maximum number of iterations. 59 print_flag - A flag specifying how much information should be printed to standard output during minimisation: 60 61 The print flag corresponds to: 62 0 - No output. 63 1 - Minimal output. 64 2 - Full output. 65 66 67 Returned objects 68 ~~~~~~~~~~~~~~~~ 69 70 The minimised parameter vector, function value at the minimum, number of iterations, and a warning flag are returned. 71 The warning flag corresponds to: 72 0 - Minimisation terminated successfully. 73 1 - Maximum number of iterations have been reached. 74 """ 75 76 f_count = 0 77 g_count = 0 78 h_count = 0 79 warning = None 80 81 82 # Parameter initialisation methods. 83 ################################### 84 85 # Grid search. 86 if match('^[Gg]rid', minimiser[0]): 87 if print_flag: 88 print "\n\n<<< Grid search >>>" 89 xk, fk, k = grid(func, args=args, grid_ops=minimiser[1], print_flag=print_flag) 90 f_count = k 91 92 # Fixed parameter values. 93 elif match('^[Ff]ixed', minimiser[0]): 94 if print_flag: 95 print "\n\n<<< Fixed initial parameter values >>>" 96 fk = apply(func, (minimiser[1],)+args) 97 k, f_count = 1, 1 98 99 100 # Scipy optimisation methods. 101 ############################# 102 103 # Simplex minimisation (scipy). 104 elif match('^[Ss]implex[ _][Ss]cipy$', minimiser[0]): 105 if print_flag: 106 print "\n\n<<< Simplex minimisation (scipy) >>>" 107 if noscipy_flag: 108 print "Simplex minimisation has been choosen yet the scipy python module has not been installed." 109 sys.exit() 110 results = simplex_scipy(func, x0, args=args, xtol=1e-30, ftol=func_tol, maxiter=maxiter, full_output=1, disp=print_flag) 111 xk, fk, k, f_count, warning = results 112 warning = `warning` 113 114 # Quasi-Newton BFGS minimisation (scipy). 115 elif match('^[Bb][Ff][Gg][Ss][ _][Ss]cipy$', minimiser[0]): 116 if print_flag: 117 print "\n\n<<< Quasi-Newton BFGS minimisation (scipy) >>>" 118 if noscipy_flag: 119 print "Quasi-Newton BFGS minimisation from the scipy python module has been choosen yet the module has not been installed." 120 sys.exit() 121 xk, fk, f_count, g_count, warning = bfgs_scipy(func, x0, fprime=dfunc, args=args, avegtol=func_tol, maxiter=maxiter, full_output=1, disp=print_flag) 122 k = f_count 123 warning = `warning` 124 125 126 # Newton Conjugate Gradient minimisation (scipy). 127 elif match('^[Nn][Cc][Gg][ _][Ss]cipy$', minimiser[0]): 128 if print_flag: 129 print "\n\n<<< Newton Conjugate Gradient minimisation (scipy) >>>" 130 if noscipy_flag: 131 print "Newton Conjugate Gradient minimisation has been choosen yet the scipy python module has not been installed." 132 sys.exit() 133 xk, fk, f_count, g_count, h_count, warning = ncg_scipy(func, x0, fprime=dfunc, fhess=d2func, args=args, avextol=func_tol, maxiter=maxiter, full_output=1, disp=print_flag) 134 k = f_count 135 warning = `warning` 136 137 138 # Line search algorithms. 139 ######################### 140 141 # Back-and-forth coordinate descent minimisation. 142 elif match('^[Cc][Dd]$', minimiser[0]) or match('^[Cc]oordinate-[Dd]escent$', minimiser[0]): 143 if print_flag: 144 print "\n\n<<< Back-and-forth coordinate descent minimisation >>>" 145 min = coordinate_descent(func, dfunc=dfunc, args=args, x0=x0, line_search_algor=minimiser[1], func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 146 if full_output: 147 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 148 else: 149 xk = min.minimise() 150 151 # Steepest descent minimisation. 152 elif match('^[Ss][Dd]$', minimiser[0]) or match('^[Ss]teepest[ _][Dd]escent$', minimiser[0]): 153 if print_flag: 154 print "\n\n<<< Steepest descent minimisation >>>" 155 min = steepest_descent(func, dfunc=dfunc, args=args, x0=x0, line_search_algor=minimiser[1], func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 156 if full_output: 157 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 158 else: 159 xk = min.minimise() 160 161 # Quasi-Newton BFGS minimisation. 162 elif match('^[Bb][Ff][Gg][Ss]$', minimiser[0]): 163 if print_flag: 164 print "\n\n<<< Quasi-Newton BFGS minimisation >>>" 165 min = bfgs(func, dfunc=dfunc, args=args, x0=x0, line_search_algor=minimiser[1], func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 166 if full_output: 167 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 168 else: 169 xk = min.minimise() 170 171 # Newton minimisation. 172 elif match('^[Nn]ewton$', minimiser[0]): 173 if print_flag: 174 print "\n\n<<< Newton minimisation >>>" 175 min = newton(func, dfunc=dfunc, d2func=d2func, args=args, x0=x0, line_search_algor=minimiser[1], func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 176 if full_output: 177 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 178 else: 179 xk = min.minimise() 180 181 182 # Trust-region algorithms. 183 ########################## 184 185 # Levenberg-Marquardt minimisation. 186 elif match('^[Ll][Mm]$', minimiser[0]) or match('^[Ll]evenburg-[Mm]arquardt$', minimiser[0]): 187 if print_flag: 188 print "\n\n<<< Levenberg-Marquardt minimisation >>>" 189 min = levenberg_marquardt(chi2_func=func, dchi2_func=dfunc, dfunc=minimiser[1], errors=minimiser[2], args=args, x0=x0, func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 190 if full_output: 191 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 192 else: 193 xk = min.minimise() 194 195 # Cauchy point minimisation. 196 elif match('^[Cc]auchy', minimiser[0]): 197 if print_flag: 198 print "\n\n<<< Cauchy point minimisation >>>" 199 min = cauchy_point(func, dfunc=dfunc, d2func=d2func, args=args, x0=x0, func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 200 if full_output: 201 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 202 else: 203 xk = min.minimise() 204 205 206 # Other algorithms. 207 ################### 208 209 # Simplex minimisation. 210 elif match('^[Ss]implex$', minimiser[0]): 211 if print_flag: 212 print "\n\n<<< Simplex minimisation >>>" 213 min = simplex(func, args=args, x0=x0, func_tol=func_tol, maxiter=maxiter, full_output=full_output, print_flag=print_flag) 214 if full_output: 215 xk, fk, k, f_count, g_count, h_count, warning = min.minimise() 216 else: 217 xk = min.minimise() 218 219 220 # No match to minimiser string. 221 ############################### 222 223 else: 224 print "Minimiser type set incorrectly. The minimiser " + minimiser[0] + " is not programmed.\n" 225 sys.exit() 226 227 228 # Finish. 229 if print_flag: 230 print "\nIterations: " + `k` 231 print "Parameter values: " + `xk` 232 print "Function value: " + `fk` 233 print "Function calls: " + `f_count` 234 print "Gradient calls: " + `g_count` 235 print "Hessian calls: " + `h_count` 236 if warning: 237 print "Warning: " + warning 238 else: 239 print "Warning: None" 240 if full_output: 241 return xk, fk, k, f_count, g_count, h_count, warning 242 else: 243 return xk
244