1
2
3 from math import cos, pi, sin, sqrt
4 from Numeric import Float64, array, dot
5
6 from more_thuente import more_thuente
7
9 print "\n\n\n\n\n\n\n\n\n\n\n\n\t\t<<< Test Functions >>>\n\n\n"
10 print "\nSelect the function to test:"
11 while 1:
12 input = raw_input('> ')
13 valid_functions = ['1', '2', '3', '4', '5', '6']
14 if input in valid_functions:
15 func = int(input)
16 break
17 else:
18 print "Choose a function number between 1 and 6."
19
20 print "\nSelect a0:"
21 while 1:
22 input = raw_input('> ')
23 valid_vals = ['1e-3', '1e-1', '1e1', '1e3']
24 if input in valid_vals:
25 a0 = float(input)
26 break
27 else:
28 print "Choose a0 as one of ['1e-3', '1e-1', '1e1', '1e3']."
29
30 print "Testing line minimiser using test function " + `func`
31 if func == 1:
32 f, df = func1, dfunc1
33 mu, eta = 0.001, 0.1
34 elif func == 2:
35 f, df = func2, dfunc2
36 mu, eta = 0.1, 0.1
37 elif func == 3:
38 f, df = func3, dfunc3
39 mu, eta = 0.1, 0.1
40 elif func == 4:
41 f, df = func456, dfunc456
42 beta1, beta2 = 0.001, 0.001
43 mu, eta = 0.001, 0.001
44 elif func == 5:
45 f, df = func456, dfunc456
46 beta1, beta2 = 0.01, 0.001
47 mu, eta = 0.001, 0.001
48 elif func == 6:
49 f, df = func456, dfunc456
50 beta1, beta2 = 0.001, 0.01
51 mu, eta = 0.001, 0.001
52
53 xk = array([0.0], Float64)
54 pk = array([1.0], Float64)
55 if func >= 4:
56 args = (beta1, beta2)
57 else:
58 args = ()
59 f0 = apply(f, (xk,)+args)
60 g0 = apply(df, (xk,)+args)
61 a = more_thuente(f, df, args, xk, pk, f0, g0, a_init=a0, mu=mu, eta=eta, print_flag=1)
62 print "The minimum is at " + `a`
63
64
65 -def func1(alpha, beta=2.0):
66 """Test function 1.
67
68 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
69 ACM Trans. Math. Softw. 20, 286-307.
70
71 The function is:
72 alpha
73 phi(alpha) = - ---------------
74 alpha**2 + beta
75 """
76
77 return - alpha[0]/(alpha[0]**2 + beta)
78
79
81 """Derivative of test function 1.
82
83 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
84 ACM Trans. Math. Softw. 20, 286-307.
85
86 The gradient is:
87 2*alpha**2 1
88 phi'(alpha) = -------------------- - ---------------
89 (alpha**2 + beta)**2 alpha**2 + beta
90
91 """
92
93 temp = array([0.0], Float64)
94 if alpha[0] > 1e90:
95 return temp
96 else:
97 a = 2.0*(alpha[0]**2)/((alpha[0]**2 + beta)**2)
98 b = 1.0/(alpha[0]**2 + beta)
99 temp[0] = a - b
100 return temp
101
102
103 -def func2(alpha, beta=0.004):
104 """Test function 2.
105
106 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
107 ACM Trans. Math. Softw. 20, 286-307.
108
109 The function is:
110
111 phi(alpha) = (alpha + beta)**5 - 2(alpha + beta)**4
112 """
113
114 return (alpha[0] + beta)**5 - 2.0*((alpha[0] + beta)**4)
115
116
117 -def dfunc2(alpha, beta=0.004):
118 """Derivative of test function 2.
119
120 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
121 ACM Trans. Math. Softw. 20, 286-307.
122
123 The gradient is:
124
125 phi'(alpha) = 5(alpha + beta)**4 - 8(alpha + beta)**3
126 """
127
128 temp = array([0.0], Float64)
129 temp[0] = 5.0*((alpha[0] + beta)**4) - 8.0*((alpha[0] + beta)**3)
130 return temp
131
132
133 -def func3(alpha, beta=0.01, l=39.0):
134 """Test function 3.
135
136 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
137 ACM Trans. Math. Softw. 20, 286-307.
138
139 The function is:
140
141 2(1 - beta) / l*pi \
142 phi(alpha) = phi0(alpha) + ----------- . sin | ---- . alpha |
143 l*pi \ 2 /
144
145 where:
146 / 1 - alpha, if alpha <= 1 - beta,
147 |
148 | alpha - 1, if alpha >= 1 + beta,
149 phi0(alpha) = <
150 | 1 1
151 | ------(alpha - 1)**2 + - beta, if alpha in [1 - beta, 1 + beta].
152 \ 2*beta 2
153 """
154
155
156 if alpha[0] <= 1.0 - beta:
157 phi0 = 1.0 - alpha[0]
158 elif alpha[0] >= 1.0 + beta:
159 phi0 = alpha[0] - 1.0
160 else:
161 phi0 = 0.5/beta * (alpha[0] - 1.0)**2 + 0.5*beta
162
163 return phi0 + 2.0*(1.0 - beta)/(l*pi) * sin(0.5 * l * pi * alpha[0])
164
165
166 -def dfunc3(alpha, beta=0.01, l=39.0):
167 """Derivative of test function 3.
168
169 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
170 ACM Trans. Math. Softw. 20, 286-307.
171
172 The gradient is:
173 / l*pi \
174 phi(alpha) = phi0'(alpha) + (1 - beta) . cos | ---- . alpha |
175 \ 2 /
176
177 where:
178 / -1, if alpha <= 1 - beta,
179 |
180 | 1, if alpha >= 1 + beta,
181 phi0'(alpha) = <
182 | alpha - 1
183 | ---------, if alpha in [1 - beta, 1 + beta].
184 \ beta
185
186
187
188 """
189
190
191 if alpha[0] <= 1.0 - beta:
192 phi0_prime = -1.0
193 elif alpha[0] >= 1.0 + beta:
194 phi0_prime = 1.0
195 else:
196 phi0_prime = (alpha[0] - 1.0)/beta
197
198 temp = array([0.0], Float64)
199 temp[0] = phi0_prime + (1.0 - beta) * cos(0.5 * l * pi * alpha[0])
200 return temp
201
202
204 """Test functions 4, 5, and 6.
205
206 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
207 ACM Trans. Math. Softw. 20, 286-307.
208
209 The function is:
210
211 phi(alpha) = gamma(beta1) * sqrt((1 - alpha)**2 + beta2**2) + gamma(beta2) * sqrt(alpha**2 + beta1**2)
212
213 where:
214 gamma(beta) = sqrt(1 + beta**2) - beta
215 """
216
217 g1 = sqrt(1.0 + beta1**2) - beta1
218 g2 = sqrt(1.0 + beta2**2) - beta2
219 return g1 * sqrt((1.0 - alpha[0])**2 + beta2**2) + g2 * sqrt(alpha[0]**2 + beta1**2)
220
221
223 """Test functions 4, 5, and 6.
224
225 From More, J. J., and Thuente, D. J. 1994, Line search algorithms with guaranteed sufficient decrease.
226 ACM Trans. Math. Softw. 20, 286-307.
227
228 The function is:
229
230 (1 - alpha) a
231 phi'(alpha) = - gamma(beta1) * ------------------------------- + gamma(beta2) * -------------------------
232 sqrt((1 - alpha)**2 + beta2**2) sqrt(alpha**2 + beta1**2)
233
234 where:
235 gamma(beta) = sqrt(1 + beta**2) - beta
236 """
237
238 temp = array([0.0], Float64)
239 g1 = sqrt(1.0 + beta1**2) - beta1
240 g2 = sqrt(1.0 + beta2**2) - beta2
241 a = -g1 * (1.0 - alpha[0]) / sqrt((1.0 - alpha[0])**2 + beta2**2)
242 b = g2 * alpha[0] / sqrt(alpha[0]**2 + beta1**2)
243 temp[0] = a + b
244 return temp
245
246
247 run()
248