1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24 """Steepest descent (SD) optimization.
25
26 This file is part of the minfx optimisation library at U{https://sourceforge.net/projects/minfx}.
27 """
28
29
30 from numpy import dot
31
32
33 from minfx.base_classes import Line_search, Min
34
35
36 -def steepest_descent(func=None, dfunc=None, args=(), x0=None, min_options=None, func_tol=1e-25, grad_tol=None, maxiter=1e6, a0=1.0, mu=0.0001, eta=0.1, full_output=0, print_flag=0, print_prefix=""):
37 """Steepest descent minimisation."""
38
39 if print_flag:
40 if print_flag >= 2:
41 print(print_prefix)
42 print(print_prefix)
43 print(print_prefix + "Steepest descent minimisation")
44 print(print_prefix + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
45 min = Steepest_descent(func, dfunc, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix)
46 if min.init_failure:
47 print(print_prefix + "Initialisation of minimisation has failed.")
48 return None
49 results = min.minimise()
50 return results
51
52
54 - def __init__(self, func, dfunc, args, x0, min_options, func_tol, grad_tol, maxiter, a0, mu, eta, full_output, print_flag, print_prefix):
55 """Class for steepest descent minimisation specific functions.
56
57 Unless you know what you are doing, you should call the function 'steepest_descent' rather than using this class.
58 """
59
60
61 self.func = func
62 self.dfunc = dfunc
63 self.args = args
64 self.xk = x0
65 self.func_tol = func_tol
66 self.grad_tol = grad_tol
67 self.maxiter = maxiter
68 self.full_output = full_output
69 self.print_flag = print_flag
70 self.print_prefix = print_prefix
71
72
73 self.a0 = a0
74
75
76 self.mu = mu
77 self.eta = eta
78
79
80 self.init_failure = 0
81
82
83 self.line_search_options(min_options)
84 self.setup_line_search()
85
86
87 self.f_count = 0
88 self.g_count = 0
89 self.h_count = 0
90
91
92 self.warning = None
93
94
95 self.setup_conv_tests()
96
97
98 self.fk, self.f_count = self.func(*(self.xk,)+self.args), self.f_count + 1
99 self.dfk, self.g_count = self.dfunc(*(self.xk,)+self.args), self.g_count + 1
100
101
103 """The new parameter function.
104
105 Find the search direction, do a line search, and get xk+1 and fk+1.
106 """
107
108
109 self.pk = -self.dfk
110
111
112 try:
113 self.a0 = self.alpha * dot(self.dfk_last, -self.dfk_last) / dot(self.dfk, -self.dfk)
114 except AttributeError:
115 "First iteration."
116 pass
117
118
119 self.line_search()
120
121
122 self.xk_new = self.xk + self.alpha * self.pk
123 self.fk_new, self.f_count = self.func(*(self.xk_new,)+self.args), self.f_count + 1
124 self.dfk_new, self.g_count = self.dfunc(*(self.xk_new,)+self.args), self.g_count + 1
125
126
128 """Function to update the function value, gradient vector, and Hessian matrix."""
129
130
131 self.fk_last = self.fk
132 self.dfk_last = self.dfk * 1.0
133
134
135 self.xk = self.xk_new * 1.0
136 self.fk = self.fk_new
137 self.dfk = self.dfk_new * 1.0
138