1   
  2   
  3   
  4   
  5   
  6   
  7   
  8   
  9   
 10   
 11   
 12   
 13   
 14   
 15   
 16   
 17   
 18   
 19   
 20   
 21   
 22   
 23   
 24  """Logarithmic barrier function optimization constraint algorithm. 
 25   
 26  This file is part of the minfx optimisation library at U{https://sourceforge.net/projects/minfx}. 
 27  """ 
 28   
 29   
 30  from math import log 
 31  from numpy import dot, float64, inf, outer, sqrt, zeros 
 32  from re import match 
 33   
 34   
 35  from minfx.base_classes import print_iter, Min 
 36  from minfx.constraint_linear import Constraint_linear 
 37   
 38   
 39 -def log_barrier_function(func=None, dfunc=None, d2func=None, args=(), x0=None, min_options=(), A=None, b=None, epsilon0=1e-2, scale_epsilon=1e-2, func_tol=1e-25, grad_tol=None, maxiter=1e6, inner_maxiter=500, full_output=0, print_flag=0): 
  40      """The logarithmic barrier augmented function. 
 41   
 42      From http://en.wikipedia.org/wiki/Barrier_function and http://bayen.eecs.berkeley.edu/bayen/?q=webfm_send/247.  This is an augmented function, the algorithm is: 
 43   
 44          - Given starting points x0s. 
 45          - while 1: 
 46              - Find an approximate minimiser xk of the target function value plus the logarithmic barrier function value. 
 47              - Final convergence test. 
 48              - Scale the penalty function scaling factor epsilon. 
 49              - k = k + 1. 
 50   
 51   
 52      Linear constraints 
 53      ================== 
 54   
 55      These are defined as:: 
 56   
 57          A.x >= b 
 58   
 59      where: 
 60   
 61          - A is an m*n matrix where the rows are the transposed vectors, ai, of length n.  The elements of ai are the coefficients of the model parameters. 
 62          - x is the vector of model parameters of dimension n. 
 63          - b is the vector of scalars of dimension m. 
 64          - m is the number of constraints. 
 65          - n is the number of model parameters. 
 66   
 67      E.g. if 0 <= q <= 1, q >= 1 - 2r, and 0 <= r, then:: 
 68   
 69          | 1  0 |            |  0 | 
 70          |      |            |    | 
 71          |-1  0 |   | q |    | -1 | 
 72          |      | . |   | >= |    | 
 73          | 1  2 |   | r |    |  1 | 
 74          |      |            |    | 
 75          | 0  1 |            |  2 | 
 76   
 77      To use linear constraints both the matrix A and vector b need to be supplied. 
 78   
 79   
 80      Initial values 
 81      ============== 
 82   
 83      These are the default initial values:: 
 84   
 85          epsilon0 = 1e-2 
 86          scale_epsilon = 1e-2 
 87      """ 
 88   
 89      if print_flag: 
 90          print("\n") 
 91          print("Logarithmic barrier function") 
 92          print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 
 93      min = Log_barrier_function(func, dfunc, d2func, args, x0, min_options, A, b, epsilon0, scale_epsilon, func_tol, grad_tol, maxiter, inner_maxiter, full_output, print_flag) 
 94      if min.init_failure: 
 95          return None 
 96      results = min.minimise() 
 97      return results 
  98   
 99   
100   
102 -    def __init__(self, func, dfunc, d2func, args, x0, min_options, A, b, epsilon0, scale_epsilon, func_tol, grad_tol, maxiter, inner_maxiter, full_output, print_flag): 
 103          """Class for logarithmic barrier function minimisation specific functions. 
104   
105          Unless you know what you are doing, you should call the function 'log_barrier_function' rather than using this class. 
106          """ 
107   
108           
109           
110           
111           
112          from minfx.generic import generic_minimise 
113          self.generic_minimise = generic_minimise 
114   
115           
116          if A != None and b != None: 
117              self.A = A 
118              self.b = b 
119              constraint_linear = Constraint_linear(self.A, self.b) 
120              self.c = constraint_linear.func 
121              self.dc = constraint_linear.dfunc 
122              self.d2c = None 
123              self.m = len(self.b) 
124              if print_flag >= 2: 
125                  print("Linear constraint matrices.") 
126                  print("A:\n" + repr(self.A)) 
127                  print("b:\n" + repr(self.b)) 
128   
129           
130          else: 
131              print("The constraints have been incorrectly supplied.") 
132              self.init_failure = 1 
133              return 
134   
135           
136          if len(min_options) == 0: 
137              print("The unconstrained minimisation algorithm has not been specified.") 
138              self.init_failure = 1 
139              return 
140          self.min_algor = min_options[0] 
141          self.min_options = min_options[1:] 
142   
143           
144          self.args = args 
145          self.func = func 
146          self.dfunc = dfunc 
147          self.d2func = d2func 
148          self.xk = x0 
149          self.epsilon = epsilon0 
150          self.scale_epsilon = scale_epsilon 
151          self.func_tol = func_tol 
152          self.grad_tol = grad_tol 
153          self.maxiter = maxiter 
154          self.inner_maxiter = inner_maxiter 
155          self.full_output = full_output 
156          self.print_flag = print_flag 
157   
158           
159          self.print_prefix = "" 
160   
161           
162          self.init_failure = 0 
163   
164           
165          self.f_count = 0 
166          self.g_count = 0 
167          self.h_count = 0 
168   
169           
170          self.warning = None 
171   
172           
173          self.f_log = self.func_log(*(self.xk,)+self.args) 
174   
175           
176          self.setup_conv_tests() 
 177   
178   
180          """The logarithmic barrier function. 
181   
182          The equation is:: 
183   
184                        /  sum_i=1^m -log(bi - AiT.x)    for Ax < b, 
185              psi(x) = < 
186                        \  +inf                          otherwise. 
187          """ 
188   
189           
190          self.fk = f_log = self.func(*(args[0],)+args[1:]) 
191          self.ck = self.c(*(args[0],)) 
192   
193           
194          for i in range(self.m): 
195              if self.ck[i] > 0: 
196                  f_log += - self.epsilon * log(self.ck[i]) 
197              else: 
198                  f_log = inf 
199   
200          if self.print_flag >= 4: 
201              print("") 
202              print("\tfunction value:       " + repr(self.fk)) 
203   
204          return f_log 
 205   
206   
208          """The logarithmic barrier gradient.""" 
209   
210           
211          raise NameError("The logarithmic barrier gradient is not implemented yet.") 
212   
213           
214          dfk = dpsi = self.dfunc(*(args[0],)+args[1:]) 
215          self.dck = self.dc(*(args[0],)) 
216   
217          if self.print_flag >= 4: 
218              print("") 
219              print("\tfunction grad:       " + repr(dfk)) 
220              print("\tdck:                   " + repr(self.dck)) 
221   
222          return dpsi 
 223   
224   
226          """The logarithmic barrier Hessian.""" 
227   
228           
229          raise NameError("The logarithmic barrier gradient is not implemented yet.") 
230   
231           
232          d2psi = self.d2func(*(args[0],)+args[1:]) 
233          self.d2ck = self.d2c(*(args[0],)) 
234   
235          return d2psi 
 236   
237   
239          """Logarithmic barrier optimisation.""" 
240   
241           
242          self.k = 0 
243          self.j = 0 
244   
245           
246          sub_print_flag = self.print_flag 
247          if sub_print_flag >= 2: 
248              sub_print_flag = sub_print_flag - 1 
249   
250           
251          while True: 
252               
253              if self.print_flag: 
254                  print_iter(self.k, self.xk, self.fk) 
255                  print("Entering sub-algorithm.") 
256   
257               
258              if self.maxiter - self.j < self.inner_maxiter: 
259                  maxiter = self.maxiter - self.j 
260              else: 
261                  maxiter = self.inner_maxiter 
262   
263               
264              results = self.generic_minimise(func=self.func_log, dfunc=self.func_dlog, d2func=self.func_d2log, args=self.args, x0=self.xk, min_algor=self.min_algor, min_options=self.min_options, func_tol=self.func_tol, grad_tol=self.grad_tol, maxiter=maxiter, full_output=1, print_flag=self.print_flag, print_prefix="\t") 
265              if results == None: 
266                  return 
267   
268               
269              self.xk_new, self.f_log_new, j, f, g, h, self.temp_warning = results 
270              self.j, self.f_count, self.g_count, self.h_count = self.j + j, self.f_count + f, self.g_count + g, self.h_count + h 
271   
272               
273              if self.j >= self.maxiter - 1: 
274                  self.warning = "Maximum number of iterations reached" 
275                  break 
276   
277               
278              if self.conv_test(self.f_log_new, self.f_log): 
279                  break 
280   
281               
282              self.epsilon = self.scale_epsilon * self.epsilon 
283   
284               
285              self.xk = self.xk_new * 1.0 
286              self.f_log = self.f_log_new 
287              self.k = self.k + 1 
288   
289           
290          if self.full_output: 
291              try: 
292                  self.fk = self.func(*(self.xk_new,)+self.args) 
293                  return self.xk_new, self.fk, self.j, self.f_count, self.g_count, self.h_count, self.warning 
294              except AttributeError: 
295                  self.fk = self.func(*(self.xk,)+self.args) 
296                  return self.xk, self.fk, self.j, self.f_count, self.g_count, self.h_count, self.warning 
297          else: 
298              try: 
299                  return self.xk_new 
300              except AttributeError: 
301                  return self.xk 
  302