Package minfx :: Module ncg
[hide private]
[frames] | no frames]

Source Code for Module minfx.ncg

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2013 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the minfx optimisation library,                        # 
  6  # https://sourceforge.net/projects/minfx                                      # 
  7  #                                                                             # 
  8  # This program is free software: you can redistribute it and/or modify        # 
  9  # it under the terms of the GNU General Public License as published by        # 
 10  # the Free Software Foundation, either version 3 of the License, or           # 
 11  # (at your option) any later version.                                         # 
 12  #                                                                             # 
 13  # This program is distributed in the hope that it will be useful,             # 
 14  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 15  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 16  # GNU General Public License for more details.                                # 
 17  #                                                                             # 
 18  # You should have received a copy of the GNU General Public License           # 
 19  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 20  #                                                                             # 
 21  ############################################################################### 
 22   
 23  # Module docstring. 
 24  """Line search Newton conjugate gradient optimization. 
 25   
 26  This file is part of the minfx optimisation library at U{https://sourceforge.net/projects/minfx}. 
 27  """ 
 28   
 29  # Python module imports. 
 30  from numpy import dot, float64, sqrt, zeros 
 31   
 32  # Minfx module imports. 
 33  from minfx.base_classes import Line_search, Min 
 34   
 35   
36 -def ncg(func=None, dfunc=None, d2func=None, args=(), x0=None, min_options=None, func_tol=1e-25, grad_tol=None, maxiter=1e6, a0=1.0, mu=0.0001, eta=0.9, full_output=0, print_flag=0, print_prefix=""):
37 """Line search Newton conjugate gradient algorithm. 38 39 Page 140 from 'Numerical Optimization' by Jorge Nocedal and Stephen J. Wright, 1999, 2nd ed. The algorithm is: 40 41 - Given initial point x0. 42 - while 1: 43 - Compute a search direction pk by applying the CG method to Hk.pk = -gk, starting from x0 = 0. Terminate when ||rk|| <= min(0.5,sqrt(||gk||)), or if negative curvature is encountered. 44 - Set xk+1 = xk + ak.pk, where ak satisfies the Wolfe, Goldstein, or Armijo backtracking conditions. 45 """ 46 47 if print_flag: 48 if print_flag >= 2: 49 print(print_prefix) 50 print(print_prefix) 51 print(print_prefix + "Newton Conjugate Gradient minimisation") 52 print(print_prefix + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 53 min = Ncg(func, dfunc, d2func, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix) 54 if min.init_failure: 55 print(print_prefix + "Initialisation of minimisation has failed.") 56 return None 57 results = min.minimise() 58 return results
59 60
61 -class Ncg(Line_search, Min):
62 - def __init__(self, func, dfunc, d2func, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix):
63 """Class for newton conjugate gradient minimisation specific functions. 64 65 Unless you know what you are doing, you should call the function 'ncg' rather than using this class. 66 """ 67 68 # Function arguments. 69 self.func = func 70 self.dfunc = dfunc 71 self.d2func = d2func 72 self.args = args 73 self.xk = x0 74 self.func_tol = func_tol 75 self.grad_tol = grad_tol 76 self.maxiter = maxiter 77 self.full_output = full_output 78 self.print_flag = print_flag 79 self.print_prefix = print_prefix 80 81 # Set a0. 82 self.a0 = a0 83 84 # Line search constants for the Wolfe conditions. 85 self.mu = mu 86 self.eta = eta 87 88 # Initialisation failure flag. 89 self.init_failure = 0 90 91 # Setup the line search options and algorithm. 92 self.line_search_options(min_options) 93 self.setup_line_search() 94 95 # Initialise the function, gradient, and Hessian evaluation counters. 96 self.f_count = 0 97 self.g_count = 0 98 self.h_count = 0 99 100 # Initialise the warning string. 101 self.warning = None 102 103 # Set the convergence test function. 104 self.setup_conv_tests() 105 106 # Calculate the initial Newton function value, gradient vector, and Hessian matrix. 107 self.fk, self.f_count = self.func(*(self.xk,)+self.args), self.f_count + 1 108 self.dfk, self.g_count = self.dfunc(*(self.xk,)+self.args), self.g_count + 1 109 self.d2fk, self.h_count = self.d2func(*(self.xk,)+self.args), self.h_count + 1
110 111
112 - def get_pk(self):
113 """The CG algorithm.""" 114 115 # Initial values at i = 0. 116 xi = zeros(len(self.xk), float64) 117 ri = self.dfk * 1.0 118 pi = -ri 119 dot_ri = dot(ri, ri) 120 len_g = sqrt(dot_ri) 121 residual_test = min(0.5 * len_g, dot_ri) 122 #residual_test = min(0.5, sqrt(len_g)) * len_g 123 124 # Debugging. 125 if self.print_flag >= 2: 126 print(self.print_prefix + "Initial data:") 127 print(self.print_prefix + "\tx0: " + repr(xi)) 128 print(self.print_prefix + "\tr0: " + repr(ri)) 129 print(self.print_prefix + "\tp0: " + repr(pi)) 130 131 i = 0 132 while True: 133 # Matrix product and curvature. 134 Api = dot(self.d2fk, pi) 135 curv = dot(pi, Api) 136 137 # Negative curvature test. 138 if curv <= 0.0: 139 if i == 0: 140 if dot_ri == 0.0: 141 return xi 142 else: 143 ai = dot_ri / curv 144 return xi + ai*pi 145 else: 146 return xi 147 if sqrt(dot_ri) <= residual_test: 148 return xi 149 150 # Conjugate gradient part 151 ai = dot_ri / curv 152 xi_new = xi + ai*pi 153 ri_new = ri + ai*Api 154 dot_ri_new = dot(ri_new, ri_new) 155 bi_new = dot_ri_new / dot_ri 156 pi_new = -ri_new + bi_new*pi 157 158 # Debugging. 159 if self.print_flag >= 2: 160 print("") 161 print(self.print_prefix + "Iteration i = " + repr(i)) 162 print(self.print_prefix + "Api: " + repr(Api)) 163 print(self.print_prefix + "Curv: " + repr(curv)) 164 print(self.print_prefix + "ai: " + repr(ai)) 165 print(self.print_prefix + "xi+1: " + repr(xi_new)) 166 print(self.print_prefix + "ri+1: " + repr(ri_new)) 167 print(self.print_prefix + "bi+1: " + repr(bi_new)) 168 print(self.print_prefix + "pi+1: " + repr(pi_new)) 169 170 # Update i+1 to i. 171 xi = xi_new * 1.0 172 ri = ri_new * 1.0 173 pi = pi_new * 1.0 174 dot_ri = dot_ri_new 175 i = i + 1
176 177
178 - def new_param_func(self):
179 """The new parameter function. 180 181 Find the search direction, do a line search, and get xk+1 and fk+1. 182 """ 183 184 # Get the pk vector. 185 self.pk = self.get_pk() 186 187 # Line search. 188 self.line_search() 189 190 # Find the new parameter vector and function value at that point. 191 self.xk_new = self.xk + self.alpha * self.pk 192 self.fk_new, self.f_count = self.func(*(self.xk_new,)+self.args), self.f_count + 1 193 self.dfk_new, self.g_count = self.dfunc(*(self.xk_new,)+self.args), self.g_count + 1
194 195
196 - def update(self):
197 """Function to update the function value, gradient vector, and Hessian matrix.""" 198 199 self.xk = self.xk_new * 1.0 200 self.fk = self.fk_new 201 self.dfk = self.dfk_new * 1.0 202 self.d2fk, self.h_count = self.d2func(*(self.xk,)+self.args), self.h_count + 1
203