| Trees | Indices | Help | 
 | 
|---|
|  | 
  1  ############################################################################### 
  2  #                                                                             # 
  3  # Copyright (C) 2003-2013 Edward d'Auvergne                                   # 
  4  #                                                                             # 
  5  # This file is part of the minfx optimisation library,                        # 
  6  # https://sourceforge.net/projects/minfx                                      # 
  7  #                                                                             # 
  8  # This program is free software: you can redistribute it and/or modify        # 
  9  # it under the terms of the GNU General Public License as published by        # 
 10  # the Free Software Foundation, either version 3 of the License, or           # 
 11  # (at your option) any later version.                                         # 
 12  #                                                                             # 
 13  # This program is distributed in the hope that it will be useful,             # 
 14  # but WITHOUT ANY WARRANTY; without even the implied warranty of              # 
 15  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the               # 
 16  # GNU General Public License for more details.                                # 
 17  #                                                                             # 
 18  # You should have received a copy of the GNU General Public License           # 
 19  # along with this program.  If not, see <http://www.gnu.org/licenses/>.       # 
 20  #                                                                             # 
 21  ############################################################################### 
 22   
 23  # Module docstring. 
 24  """Steepest descent (SD) optimization. 
 25   
 26  This file is part of the minfx optimisation library at U{https://sourceforge.net/projects/minfx}. 
 27  """ 
 28   
 29  # Python module imports. 
 30  from numpy import dot 
 31   
 32  # Minfx module imports. 
 33  from minfx.base_classes import Line_search, Min 
 34   
 35   
 36 -def steepest_descent(func=None, dfunc=None, args=(), x0=None, min_options=None, func_tol=1e-25, grad_tol=None, maxiter=1e6, a0=1.0, mu=0.0001, eta=0.1, full_output=0, print_flag=0, print_prefix=""): 
 37      """Steepest descent minimisation.""" 
 38   
 39      if print_flag: 
 40          if print_flag >= 2: 
 41              print(print_prefix) 
 42          print(print_prefix) 
 43          print(print_prefix + "Steepest descent minimisation") 
 44          print(print_prefix + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") 
 45      min = Steepest_descent(func, dfunc, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix) 
 46      if min.init_failure: 
 47          print(print_prefix + "Initialisation of minimisation has failed.") 
 48          return None 
 49      results = min.minimise() 
 50      return results 
 51   
 52   
 54 -    def __init__(self, func, dfunc, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix): 
 55          """Class for steepest descent minimisation specific functions. 
 56   
 57          Unless you know what you are doing, you should call the function 'steepest_descent' rather than using this class. 
 58          """ 
 59   
 60          # Function arguments. 
 61          self.func = func 
 62          self.dfunc = dfunc 
 63          self.args = args 
 64          self.xk = x0 
 65          self.func_tol = func_tol 
 66          self.grad_tol = grad_tol 
 67          self.maxiter = maxiter 
 68          self.full_output = full_output 
 69          self.print_flag = print_flag 
 70          self.print_prefix = print_prefix 
 71   
 72          # Set a0. 
 73          self.a0 = a0 
 74   
 75          # Line search constants for the Wolfe conditions. 
 76          self.mu = mu 
 77          self.eta = eta 
 78   
 79          # Initialisation failure flag. 
 80          self.init_failure = 0 
 81   
 82          # Setup the line search options and algorithm. 
 83          self.line_search_options(min_options) 
 84          self.setup_line_search() 
 85   
 86          # Initialise the function, gradient, and Hessian evaluation counters. 
 87          self.f_count = 0 
 88          self.g_count = 0 
 89          self.h_count = 0 
 90   
 91          # Initialise the warning string. 
 92          self.warning = None 
 93   
 94          # Set the convergence test function. 
 95          self.setup_conv_tests() 
 96   
 97          # Calculate the function value and gradient vector. 
 98          self.fk, self.f_count = self.func(*(self.xk,)+self.args), self.f_count + 1 
 99          self.dfk, self.g_count = self.dfunc(*(self.xk,)+self.args), self.g_count + 1 
100   
101   
103          """The new parameter function. 
104   
105          Find the search direction, do a line search, and get xk+1 and fk+1. 
106          """ 
107   
108          # Calculate the steepest descent direction. 
109          self.pk = -self.dfk 
110   
111          # Update a0 using information about the last iteration. 
112          try: 
113              self.a0 = self.alpha * dot(self.dfk_last, -self.dfk_last) / dot(self.dfk, -self.dfk) 
114          except AttributeError: 
115              "First iteration." 
116              pass 
117   
118          # Line search. 
119          self.line_search() 
120   
121          # Find the new parameter vector and function value at that point. 
122          self.xk_new = self.xk + self.alpha * self.pk 
123          self.fk_new, self.f_count = self.func(*(self.xk_new,)+self.args), self.f_count + 1 
124          self.dfk_new, self.g_count = self.dfunc(*(self.xk_new,)+self.args), self.g_count + 1 
125   
126   
138   
| Trees | Indices | Help | 
 | 
|---|
| Generated by Epydoc 3.0.1 on Thu Jul 3 13:38:52 2014 | http://epydoc.sourceforge.net |