1 from Numeric import copy, dot
2
3 from generic_line_search import generic_line_search
4 from generic_minimise import generic_minimise
5
6
8 - def __init__(self, func, dfunc=None, args=(), x0=None, line_search_algor=None, func_tol=1e-5, maxiter=1000, full_output=0, print_flag=0, a0=1.0, mu=0.0001, eta=0.1):
9 "Class for steepest descent minimisation specific functions."
10
11 self.func = func
12 self.dfunc = dfunc
13 self.args = args
14 self.xk = x0
15 self.func_tol = func_tol
16 self.maxiter = maxiter
17 self.full_output = full_output
18 self.print_flag = print_flag
19
20 if not line_search_algor:
21 raise NameError, "No line search algorithm has been supplied."
22 else:
23 self.line_search_algor = line_search_algor
24
25
26 self.a0 = a0
27
28
29 self.mu = mu
30 self.eta = eta
31
32
33 self.f_count = 0
34 self.g_count = 0
35 self.h_count = 0
36
37
38 self.warning = None
39
40
41 self.fk, self.f_count = apply(self.func, (self.xk,)+self.args), self.f_count + 1
42 self.dfk, self.g_count = apply(self.dfunc, (self.xk,)+self.args), self.g_count + 1
43
44
45 self.minimise = self.generic_minimise
46
47
49 "Function to backup the current data dfk into dfk_last."
50
51 self.fk_last = self.fk
52 self.dfk_last = copy.deepcopy(self.dfk)
53
54
56 "Return the steepest descent direction."
57
58 self.pk = -self.dfk
59
60
62 "Update a0 using information about the last iteration."
63
64 self.a0 = self.alpha * dot(self.dfk_last, -self.dfk_last) / dot(self.dfk, -self.dfk)
65
66
68 "Function to update the function value, gradient vector, and hessian matrix"
69
70 self.xk = copy.deepcopy(self.xk_new)
71 self.fk, self.f_count = apply(self.func, (self.xk,)+self.args), self.f_count + 1
72 self.dfk, self.g_count = apply(self.dfunc, (self.xk,)+self.args), self.g_count + 1
73