1
2
3
4 import datetime as dt
5 from scipy import optimize
6 from warnings import warn
7
8
9
10
11
12
13 from .scg import SCG
14
16 """
17 Superclass for all the optimizers.
18
19 :param x_init: initial set of parameters
20 :param f_fp: function that returns the function AND the gradients at the same time
21 :param f: function to optimize
22 :param fp: gradients
23 :param messages: print messages from the optimizer?
24 :type messages: (True | False)
25 :param max_f_eval: maximum number of function evaluations
26
27 :rtype: optimizer object.
28
29 """
30 - def __init__(self, messages=False, max_f_eval=1e4, max_iters=1e3,
31 ftol=None, gtol=None, xtol=None, bfgs_factor=None):
32 self.opt_name = None
33
34
35 self.messages = False
36 self.f_opt = None
37 self.x_opt = None
38 self.funct_eval = None
39 self.status = None
40 self.max_f_eval = int(max_iters)
41 self.max_iters = int(max_iters)
42 self.bfgs_factor = bfgs_factor
43 self.trace = None
44 self.time = "Not available"
45 self.xtol = xtol
46 self.gtol = gtol
47 self.ftol = ftol
48
49 - def run(self, x_init, **kwargs):
50 start = dt.datetime.now()
51 self.opt(x_init, **kwargs)
52 end = dt.datetime.now()
53 self.time = str(end - start)
54
55 - def opt(self, x_init, f_fp=None, f=None, fp=None):
56 raise NotImplementedError("this needs to be implemented to use the optimizer class")
57
59 diagnostics = "Optimizer: \t\t\t\t %s\n" % self.opt_name
60 diagnostics += "f(x_opt): \t\t\t\t %.3f\n" % self.f_opt
61 diagnostics += "Number of function evaluations: \t %d\n" % self.funct_eval
62 diagnostics += "Optimization status: \t\t\t %s\n" % self.status
63 diagnostics += "Time elapsed: \t\t\t\t %s\n" % self.time
64 return diagnostics
65
68
69
72 Optimizer.__init__(self, *args, **kwargs)
73 self.opt_name = "TNC (Scipy implementation)"
74
75 - def opt(self, x_init, f_fp=None, f=None, fp=None):
76 """
77 Run the TNC optimizer
78
79 """
80 tnc_rcstrings = ['Local minimum', 'Converged', 'XConverged', 'Maximum number of f evaluations reached',
81 'Line search failed', 'Function is constant']
82
83 assert f_fp != None, "TNC requires f_fp"
84
85 opt_dict = {}
86 if self.xtol is not None:
87 opt_dict['xtol'] = self.xtol
88 if self.ftol is not None:
89 opt_dict['ftol'] = self.ftol
90 if self.gtol is not None:
91 opt_dict['pgtol'] = self.gtol
92
93 opt_result = optimize.fmin_tnc(f_fp, x_init, messages=self.messages,
94 maxfun=self.max_f_eval, **opt_dict)
95 self.x_opt = opt_result[0]
96 self.f_opt = f_fp(self.x_opt)[0]
97 self.funct_eval = opt_result[1]
98 self.status = tnc_rcstrings[opt_result[2]]
99
102 Optimizer.__init__(self, *args, **kwargs)
103 self.opt_name = "L-BFGS-B (Scipy implementation)"
104
105 - def opt(self, x_init, f_fp=None, f=None, fp=None):
106 """
107 Run the optimizer
108
109 """
110 rcstrings = ['Converged', 'Maximum number of f evaluations reached', 'Error']
111
112 assert f_fp != None, "BFGS requires f_fp"
113
114 opt_dict = {}
115 if self.xtol is not None:
116 print("WARNING: l-bfgs-b doesn't have an xtol arg, so I'm going to ignore it")
117 if self.ftol is not None:
118 print("WARNING: l-bfgs-b doesn't have an ftol arg, so I'm going to ignore it")
119 if self.gtol is not None:
120 opt_dict['pgtol'] = self.gtol
121 if self.bfgs_factor is not None:
122 opt_dict['factr'] = self.bfgs_factor
123
124 opt_result = optimize.fmin_l_bfgs_b(f_fp, x_init, maxfun=self.max_iters, maxiter=self.max_iters, **opt_dict)
125 self.x_opt = opt_result[0]
126 self.f_opt = f_fp(self.x_opt)[0]
127 self.funct_eval = opt_result[2]['funcalls']
128 self.status = rcstrings[opt_result[2]['warnflag']]
129
130
131 if opt_result[2]['warnflag']==2:
132 self.status = 'Error' + str(opt_result[2]['task'])
133
136 Optimizer.__init__(self, *args, **kwargs)
137 self.opt_name = "BFGS (Scipy implementation)"
138
139 - def opt(self, x_init, f_fp=None, f=None, fp=None):
140 """
141 Run the optimizer
142
143 """
144 rcstrings = ['','Maximum number of iterations exceeded', 'Gradient and/or function calls not changing']
145
146 opt_dict = {}
147 if self.xtol is not None:
148 print("WARNING: bfgs doesn't have an xtol arg, so I'm going to ignore it")
149 if self.ftol is not None:
150 print("WARNING: bfgs doesn't have an ftol arg, so I'm going to ignore it")
151 if self.gtol is not None:
152 opt_dict['gtol'] = self.gtol
153
154 opt_result = optimize.fmin_bfgs(f, x_init, fp, disp=self.messages,
155 maxiter=self.max_iters, full_output=True, **opt_dict)
156 self.x_opt = opt_result[0]
157 self.f_opt = f_fp(self.x_opt)[0]
158 self.funct_eval = opt_result[4]
159 self.status = rcstrings[opt_result[6]]
160
163 Optimizer.__init__(self, *args, **kwargs)
164 self.opt_name = "Nelder-Mead simplex routine (via Scipy)"
165
166 - def opt(self, x_init, f_fp=None, f=None, fp=None):
167 """
168 The simplex optimizer does not require gradients.
169 """
170
171 statuses = ['Converged', 'Maximum number of function evaluations made', 'Maximum number of iterations reached']
172
173 opt_dict = {}
174 if self.xtol is not None:
175 opt_dict['xtol'] = self.xtol
176 if self.ftol is not None:
177 opt_dict['ftol'] = self.ftol
178 if self.gtol is not None:
179 print("WARNING: simplex doesn't have an gtol arg, so I'm going to ignore it")
180
181 opt_result = optimize.fmin(f, x_init, (), disp=self.messages,
182 maxfun=self.max_f_eval, full_output=True, **opt_dict)
183
184 self.x_opt = opt_result[0]
185 self.f_opt = opt_result[1]
186 self.funct_eval = opt_result[3]
187 self.status = statuses[opt_result[4]]
188 self.trace = None
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
224 if 'max_f_eval' in kwargs:
225 warn("max_f_eval deprecated for SCG optimizer: use max_iters instead!\nIgnoring max_f_eval!", FutureWarning)
226 Optimizer.__init__(self, *args, **kwargs)
227
228 self.opt_name = "Scaled Conjugate Gradients"
229
230 - def opt(self, x_init, f_fp=None, f=None, fp=None):
231 assert not f is None
232 assert not fp is None
233
234 opt_result = SCG(f, fp, x_init,
235 maxiters=self.max_iters,
236 max_f_eval=self.max_f_eval,
237 xtol=self.xtol, ftol=self.ftol,
238 gtol=self.gtol)
239
240 self.x_opt = opt_result[0]
241 self.trace = opt_result[1]
242 self.f_opt = self.trace[-1]
243 self.funct_eval = opt_result[2]
244 self.status = opt_result[3]
245
247 try:
248 import climin
249 except ImportError:
250 raise ImportError("Need climin to run this optimizer. See https://github.com/BRML/climin.")
251
253 - def __init__(self, step_rate=0.1, decay=0.9, momentum=0, *args, **kwargs):
254 Optimizer.__init__(self, *args, **kwargs)
255 self.opt_name = "Adadelta (climin)"
256 self.step_rate=step_rate
257 self.decay = decay
258 self.momentum = momentum
259
260 _check_for_climin()
261
262
263 - def opt(self, x_init, f_fp=None, f=None, fp=None):
264 assert not fp is None
265
266 import climin
267
268 opt = climin.adadelta.Adadelta(x_init, fp, step_rate=self.step_rate, decay=self.decay, momentum=self.momentum)
269
270 for info in opt:
271 if info['n_iter']>=self.max_iters:
272 self.x_opt = opt.wrt
273 self.status = 'maximum number of function evaluations exceeded '
274 break
275 else:
276 pass
277
279
280 - def __init__(self, step_shrink=0.5, step_grow=1.2, min_step=1e-06, max_step=1, changes_max=0.1, *args, **kwargs):
281 super(RProp, self).__init__(*args, **kwargs)
282 self.opt_name = 'RProp (climin)'
283 self.step_shrink = step_shrink
284 self.step_grow = step_grow
285 self.min_step = min_step
286 self.max_step = max_step
287 self.changes_max = changes_max
288
289 _check_for_climin()
290
291 - def opt(self, x_init, f_fp=None, f=None, fp=None):
292
293 assert not fp is None
294
295 import climin
296
297
298 opt = climin.rprop.Rprop(x_init, fp,
299 step_shrink=self.step_shrink, step_grow=self.step_grow,
300 min_step=self.min_step, max_step=self.max_step,
301 changes_max=self.changes_max)
302
303
304
305
306 for info in opt:
307 if info['n_iter']>=self.max_iters:
308 self.x_opt = opt.wrt
309 self.status = 'maximum number of function evaluations exceeded'
310 break
311 else:
312 pass
313
314 -class Adam(Optimizer):
315
316 - def __init__(self, step_rate=.0002,
317 decay=None,
318 decay_mom1=0.1,
319 decay_mom2=0.001,
320 momentum=0,
321 offset=1e-8, *args, **kwargs):
322 super(Adam, self).__init__(*args, **kwargs)
323 self.opt_name = 'Adam (climin)'
324 self.step_rate = step_rate
325 self.decay = decay
326 self.decay_mom1 = decay_mom1
327 self.decay_mom2 = decay_mom2
328 self.momentum = momentum
329 self.offset = offset
330
331 _check_for_climin()
332
333 - def opt(self, x_init, f_fp=None, f=None, fp=None):
334
335 assert not fp is None
336
337 import climin
338
339
340 opt = climin.adam.Adam(x_init, fp,
341 step_rate=self.step_rate, decay=self.decay,
342 decay_mom1=self.decay_mom1, decay_mom2=self.decay_mom2,
343 momentum=self.momentum,offset=self.offset)
344
345
346
347
348 for info in opt:
349 if info['n_iter']>=self.max_iters:
350 self.x_opt = opt.wrt
351 self.status = 'maximum number of function evaluations exceeded'
352 break
353 else:
354 pass
355
356
358
359 optimizers = {'fmin_tnc': opt_tnc,
360 'simplex': opt_simplex,
361 'lbfgsb': opt_lbfgsb,
362 'org-bfgs': opt_bfgs,
363 'scg': opt_SCG,
364 'adadelta':Opt_Adadelta,
365 'rprop':RProp,
366 'adam':Adam}
367
368
369
370
371 for opt_name in sorted(optimizers.keys()):
372 if opt_name.lower().find(f_min.lower()) != -1:
373 return optimizers[opt_name]
374
375 raise KeyError('No optimizer was found matching the name: %s' % f_min)
376