1 from Numeric import dot
2
3 -def backtrack(func, args, x, f, g, p, a_init=1.0, rho=0.5, c=1e-4):
4 """Backtracking line search.
5
6 Procedure 3.1, page 41, from 'Numerical Optimization' by Jorge Nocedal and Stephen J. Wright, 1999
7 Requires the gradient vector at point xk.
8
9
10 Function options
11 ~~~~~~~~~~~~~~~~
12
13 func - The function to minimise.
14 args - The tuple of arguments to supply to the functions func.
15 x - The parameter vector.
16 f - The function value at the point x.
17 g - The gradient vector at the point x.
18 p - The descent direction.
19 a_init - Initial step length.
20 rho - The step length scaling factor (should be between 0 and 1).
21 c - Constant between 0 and 1 determining the slope for the sufficient decrease condition.
22
23
24 Returned objects
25 ~~~~~~~~~~~~~~~~
26
27 The parameter vector, minimised along the direction xk + ak.pk, to be used at k+1.
28
29
30 Internal variables
31 ~~~~~~~~~~~~~~~~~~
32
33 ai - The step length at line search iteration i.
34 xai - The parameter vector at step length ai.
35 fai - The function value at step length ai.
36
37 """
38
39
40 a = a_init
41 f_count = 0
42
43 while 1:
44 fk = apply(func, (x + a*p,)+args)
45 f_count = f_count + 1
46
47
48 if fk <= f + c*a*dot(g, p):
49 return a, f_count
50 else:
51 a = rho*a
52