Package minfx :: Module log_barrier_function
[hide private]
[frames] | no frames]

Source Code for Module minfx.log_barrier_function

  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2014 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the minfx optimisation library,                        # 
  6  # https://sourceforge.net/projects/minfx                                      # 
  7  #                                                                             # 
  8  # This program is free software: you can redistribute it and/or modify        # 
  9  # it under the terms of the GNU General Public License as published by        # 
 10  # the Free Software Foundation, either version 3 of the License, or           # 
 11  # (at your option) any later version.                                         # 
 12  #                                                                             # 
 13  # This program is distributed in the hope that it will be useful,             # 
 14  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 15  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 16  # GNU General Public License for more details.                                # 
 17  #                                                                             # 
 18  # You should have received a copy of the GNU General Public License           # 
 19  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 20  #                                                                             # 
 21  ############################################################################### 
 22   
 23  # Module docstring. 
 24  """Logarithmic barrier function optimization constraint algorithm. 
 25   
 26  This file is part of the U{minfx optimisation library<https://sourceforge.net/projects/minfx>}. 
 27  """ 
 28   
 29  # Python module imports. 
 30  from math import log 
 31  from numpy import dot, float64, inf, outer, sqrt, zeros 
 32  from re import match 
 33   
 34  # Minfx module imports. 
 35  from minfx.base_classes import print_iter, Min 
 36  from minfx.constraint_linear import Constraint_linear 
 37   
 38   
39 -def log_barrier_function(func=None, dfunc=None, d2func=None, args=(), x0=None, min_options=(), A=None, b=None, epsilon0=1e-5, scale_epsilon=1e-2, func_tol=1e-25, grad_tol=None, maxiter=1e6, inner_maxiter=500, full_output=0, print_flag=0):
40 """The logarithmic barrier augmented function. 41 42 From U{http://en.wikipedia.org/wiki/Barrier_function} and U{http://bayen.eecs.berkeley.edu/bayen/?q=webfm_send/247}. This is an augmented function, the algorithm is: 43 44 - Given starting points x0s. 45 - while 1: 46 - Find an approximate minimiser xk of the target function value plus the logarithmic barrier function value. 47 - Final convergence test. 48 - Scale the penalty function scaling factor epsilon. 49 - k = k + 1. 50 51 52 Linear constraints 53 ================== 54 55 These are defined as:: 56 57 A.x >= b 58 59 where: 60 61 - A is an m*n matrix where the rows are the transposed vectors, ai, of length n. The elements of ai are the coefficients of the model parameters. 62 - x is the vector of model parameters of dimension n. 63 - b is the vector of scalars of dimension m. 64 - m is the number of constraints. 65 - n is the number of model parameters. 66 67 E.g. if 0 <= q <= 1, q >= 1 - 2r, and 0 <= r, then:: 68 69 | 1 0 | | 0 | 70 | | | | 71 |-1 0 | | q | | -1 | 72 | | . | | >= | | 73 | 1 2 | | r | | 1 | 74 | | | | 75 | 0 1 | | 2 | 76 77 To use linear constraints both the matrix A and vector b need to be supplied. 78 79 80 Initial values 81 ============== 82 83 These are the default initial values:: 84 85 epsilon0 = 1e-5 86 scale_epsilon = 1e-2 87 """ 88 89 if print_flag: 90 print("\n") 91 print("Logarithmic barrier function") 92 print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 93 min = Log_barrier_function(func, dfunc, d2func, args, x0, min_options, A, b, epsilon0, scale_epsilon, func_tol, grad_tol, maxiter, inner_maxiter, full_output, print_flag) 94 if min.init_failure: 95 return None 96 results = min.minimise() 97 return results
98 99 100
101 -class Log_barrier_function(Min):
102 - def __init__(self, func, dfunc, d2func, args, x0, min_options, A, b, epsilon0, scale_epsilon, func_tol, grad_tol, maxiter, inner_maxiter, full_output, print_flag):
103 """Class for logarithmic barrier function minimisation specific functions. 104 105 Unless you know what you are doing, you should call the function 'log_barrier_function' rather than using this class. 106 """ 107 108 # Import the 'generic_minimise' function from 'generic.py'. It is important that 109 # this import statment occurs here otherwise a recursive import between the module 110 # 'generic' and this module occurs. This means that the function 'generic_minimise' 111 # has not been initialised and is therefore not in the namespace. 112 from minfx.generic import generic_minimise 113 self.generic_minimise = generic_minimise 114 115 # Linear constraints. 116 if A != None and b != None: 117 self.A = A 118 self.b = b 119 constraint_linear = Constraint_linear(self.A, self.b) 120 self.c = constraint_linear.func 121 self.dc = constraint_linear.dfunc 122 self.d2c = None 123 self.m = len(self.b) 124 if print_flag >= 2: 125 print("Linear constraint matrices.") 126 print("A:\n" + repr(self.A)) 127 print("b:\n" + repr(self.b)) 128 129 # Incorrectly supplied constraints. 130 else: 131 print("The constraints have been incorrectly supplied.") 132 self.init_failure = 1 133 return 134 135 # min_options. 136 if len(min_options) == 0: 137 print("The unconstrained minimisation algorithm has not been specified.") 138 self.init_failure = 1 139 return 140 self.min_algor = min_options[0] 141 self.min_options = min_options[1:] 142 143 # Function arguments. 144 self.args = args 145 self.func = func 146 self.dfunc = dfunc 147 self.d2func = d2func 148 self.xk = x0 149 self.epsilon = epsilon0 150 self.scale_epsilon = scale_epsilon 151 self.func_tol = func_tol 152 self.grad_tol = grad_tol 153 self.maxiter = maxiter 154 self.inner_maxiter = inner_maxiter 155 self.full_output = full_output 156 self.print_flag = print_flag 157 158 # Set the print_prefix to nothing. 159 self.print_prefix = "" 160 161 # Initialisation failure flag. 162 self.init_failure = 0 163 164 # Initialise the function, gradient, and Hessian evaluation counters. 165 self.f_count = 0 166 self.g_count = 0 167 self.h_count = 0 168 169 # Initialise the warning string. 170 self.warning = None 171 172 # Initialise data structures. 173 self.test_str = zeros(self.m) 174 self.f_log = self.func_log(*(self.xk,)+self.args) 175 176 # Set the convergence test function. 177 self.setup_conv_tests()
178 179
180 - def func_log(self, *args):
181 """The logarithmic barrier function. 182 183 The equation is:: 184 185 / sum_i=1^m -log(bi - AiT.x) for Ax < b, 186 psi(x) = < 187 \ +inf otherwise. 188 """ 189 190 # Calculate the function and constraint values. 191 self.fk = f_log = self.func(*(args[0],)+args[1:]) 192 self.ck = self.c(*(args[0],)) 193 194 # Calculate the logarithmic penalty function. 195 for i in range(self.m): 196 if self.ck[i] > 0: 197 f_log += - self.epsilon * log(self.ck[i]) 198 self.test_str[i] = 1 199 else: 200 f_log = inf 201 self.test_str[i] = 0 202 203 if self.print_flag >= 4: 204 print("") 205 print("\tfunction value: " + repr(self.fk)) 206 207 return f_log
208 209
210 - def func_dlog(self, *args):
211 """The logarithmic barrier gradient.""" 212 213 # Calculate the function and constraint gradients. 214 dfk = dpsi = self.dfunc(*(args[0],)+args[1:]) 215 self.dck = self.dc(*(args[0],)) 216 217 # Calculate the log-barrier gradient. 218 for i in range(self.m): 219 if self.test_str[i]: 220 dpsi -= self.epsilon / self.ck[i] * self.dck[i] 221 222 # Printout. 223 if self.print_flag >= 4: 224 print("") 225 print("\tlog-barrier grad: " + repr(dpsi)) 226 print("\tfunction grad: " + repr(dfk)) 227 print("\tdck: " + repr(self.dck)) 228 print("\tTest structure: " + repr(self.test_str)) 229 230 # Return the modified gradient. 231 return dpsi
232 233
234 - def func_d2log(self, *args):
235 """The logarithmic barrier Hessian.""" 236 237 # Calculate the function and constraint Hessians. 238 d2psi = self.d2func(*(args[0],)+args[1:]) 239 self.d2ck = self.d2c(*(args[0],)) 240 241 # Calculate the log-barrier Hessian. 242 for i in range(self.m): 243 if self.test_str[i]: 244 d2psi -= self.epsilon / self.ck[i] * self.d2ck[i] + self.epsilon / self.ck[i]**2 * outer(self.dck[i], self.dck[i]) 245 246 return d2psi
247 248
249 - def minimise(self):
250 """Logarithmic barrier optimisation.""" 251 252 # Start the iteration counters. 253 self.k = 0 254 self.j = 0 255 256 # Sub-algorithm printout. 257 sub_print_flag = self.print_flag 258 if sub_print_flag >= 2: 259 sub_print_flag = sub_print_flag - 1 260 261 # Iterate until the local minima is found. 262 while True: 263 # Print out. 264 if self.print_flag: 265 print_iter(self.k, self.xk, self.fk) 266 print("Entering sub-algorithm.") 267 268 # Maximum number of iterations for the sub-loop. 269 if self.maxiter - self.j < self.inner_maxiter: 270 maxiter = self.maxiter - self.j 271 else: 272 maxiter = self.inner_maxiter 273 274 # Normal minimisation but using the modified target functions. 275 results = self.generic_minimise(func=self.func_log, dfunc=self.func_dlog, d2func=self.func_d2log, args=self.args, x0=self.xk, min_algor=self.min_algor, min_options=self.min_options, func_tol=self.func_tol, grad_tol=self.grad_tol, maxiter=maxiter, full_output=1, print_flag=self.print_flag, print_prefix="\t") 276 if results == None: 277 return 278 279 # Unpack and sort the results. 280 self.xk_new, self.f_log_new, j, f, g, h, self.temp_warning = results 281 self.j, self.f_count, self.g_count, self.h_count = self.j + j, self.f_count + f, self.g_count + g, self.h_count + h 282 283 # Maximum number of iteration test. 284 if self.j >= self.maxiter - 1: 285 self.warning = "Maximum number of iterations reached" 286 break 287 288 # Convergence test. 289 if self.conv_test(self.f_log_new, self.f_log): 290 break 291 292 # Infinite function value. 293 if self.f_log_new == inf: 294 self.warning = "Infinite function value encountered, can no longer perform optimisation." 295 break 296 297 # Update epsilon. 298 self.epsilon = self.scale_epsilon * self.epsilon 299 300 # Iteration counter update. 301 self.xk = self.xk_new * 1.0 302 self.f_log = self.f_log_new 303 self.k = self.k + 1 304 305 # Return. 306 if self.full_output: 307 try: 308 self.fk = self.func(*(self.xk_new,)+self.args) 309 return self.xk_new, self.fk, self.j, self.f_count, self.g_count, self.h_count, self.warning 310 except AttributeError: 311 self.fk = self.func(*(self.xk,)+self.args) 312 return self.xk, self.fk, self.j, self.f_count, self.g_count, self.h_count, self.warning 313 else: 314 try: 315 return self.xk_new 316 except AttributeError: 317 return self.xk
318