1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24 from Numeric import dot
25
26 -def backtrack(func, args, x, f, g, p, a_init=1.0, rho=0.5, c=1e-4):
27 """Backtracking line search.
28
29 Procedure 3.1, page 41, from 'Numerical Optimization' by Jorge Nocedal and Stephen J. Wright,
30 1999, 2nd ed.
31
32 Requires the gradient vector at point xk.
33
34
35 Function options
36 ~~~~~~~~~~~~~~~~
37
38 func - The function to minimise.
39 args - The tuple of arguments to supply to the functions func.
40 x - The parameter vector.
41 f - The function value at the point x.
42 g - The gradient vector at the point x.
43 p - The descent direction.
44 a_init - Initial step length.
45 rho - The step length scaling factor (should be between 0 and 1).
46 c - Constant between 0 and 1 determining the slope for the sufficient decrease condition.
47
48
49 Returned objects
50 ~~~~~~~~~~~~~~~~
51
52 The parameter vector, minimised along the direction xk + ak.pk, to be used at k+1.
53
54
55 Internal variables
56 ~~~~~~~~~~~~~~~~~~
57
58 ai - The step length at line search iteration i.
59 xai - The parameter vector at step length ai.
60 fai - The function value at step length ai.
61 """
62
63
64 a = a_init
65 f_count = 0
66
67 while 1:
68 fk = apply(func, (x + a*p,)+args)
69 f_count = f_count + 1
70
71
72 if fk <= f + c*a*dot(g, p):
73 return a, f_count
74 else:
75 a = rho*a
76