Package paramz :: Package optimization :: Module verbose_optimization
[hide private]
[frames] | no frames]

Source Code for Module paramz.optimization.verbose_optimization

  1  #=============================================================================== 
  2  # Copyright (c) 2015, Max Zwiessele 
  3  # All rights reserved. 
  4  # 
  5  # Redistribution and use in source and binary forms, with or without 
  6  # modification, are permitted provided that the following conditions are met: 
  7  # 
  8  # * Redistributions of source code must retain the above copyright notice, this 
  9  #   list of conditions and the following disclaimer. 
 10  # 
 11  # * Redistributions in binary form must reproduce the above copyright notice, 
 12  #   this list of conditions and the following disclaimer in the documentation 
 13  #   and/or other materials provided with the distribution. 
 14  # 
 15  # * Neither the name of paramax nor the names of its 
 16  #   contributors may be used to endorse or promote products derived from 
 17  #   this software without specific prior written permission. 
 18  # 
 19  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 
 20  # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 
 21  # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
 22  # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 
 23  # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
 24  # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 
 25  # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
 26  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 
 27  # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 
 28  # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
 29  #=============================================================================== 
 30   
 31  from __future__ import print_function 
 32  import numpy as np 
 33  import sys 
 34  import time 
 35   
36 -def exponents(fnow, current_grad):
37 exps = [np.abs(np.float(fnow)), 1 if current_grad is np.nan else current_grad] 38 return np.sign(exps) * np.log10(exps).astype(int)
39
40 -class VerboseOptimization(object):
41 - def __init__(self, model, opt, maxiters, verbose=False, current_iteration=0, ipython_notebook=True, clear_after_finish=False):
42 self.verbose = verbose 43 if self.verbose: 44 self.model = model 45 self.iteration = current_iteration 46 self.p_iter = self.iteration 47 self.maxiters = maxiters 48 self.len_maxiters = len(str(int(maxiters))) 49 self.opt_name = opt.opt_name 50 self.opt = opt 51 self.model.add_observer(self, self.print_status) 52 self.status = 'running' 53 self.clear = clear_after_finish 54 55 self.update() 56 57 try:# pragma: no cover 58 from IPython.display import display 59 from ipywidgets import IntProgress, HTML, Box, VBox, HBox, FlexBox 60 self.text = HTML(width='100%') 61 self.progress = IntProgress(min=0, max=maxiters) 62 #self.progresstext = Text(width='100%', disabled=True, value='0/{}'.format(maxiters)) 63 self.model_show = HTML() 64 self.ipython_notebook = ipython_notebook 65 except: 66 # Not in Ipython notebook 67 self.ipython_notebook = False 68 69 if self.ipython_notebook:# pragma: no cover 70 left_col = VBox(children=[self.progress, self.text], padding=2, width='40%') 71 right_col = Box(children=[self.model_show], padding=2, width='60%') 72 self.hor_align = FlexBox(children = [left_col, right_col], width='100%', orientation='horizontal') 73 74 display(self.hor_align) 75 76 try: 77 self.text.set_css('width', '100%') 78 left_col.set_css({ 79 'padding': '2px', 80 'width': "100%", 81 }) 82 83 right_col.set_css({ 84 'padding': '2px', 85 }) 86 87 self.hor_align.set_css({ 88 'width': "100%", 89 }) 90 91 self.hor_align.remove_class('vbox') 92 self.hor_align.add_class('hbox') 93 94 left_col.add_class("box-flex1") 95 right_col.add_class('box-flex0') 96 97 except: 98 pass 99 100 #self.text.add_class('box-flex2') 101 #self.progress.add_class('box-flex1') 102 else: 103 self.exps = exponents(self.fnow, self.current_gradient) 104 print('Running {} Code:'.format(self.opt_name)) 105 print(' {3:7s} {0:{mi}s} {1:11s} {2:11s}'.format("i", "f", "|g|", "runtime", mi=self.len_maxiters))
106
107 - def __enter__(self):
108 self.start = time.time() 109 self._time = self.start 110 return self
111
112 - def print_out(self, seconds):
113 if seconds<60: 114 ms = (seconds%1)*100 115 self.timestring = "{s:0>2d}s{ms:0>2d}".format(s=int(seconds), ms=int(ms)) 116 else: 117 m, s = divmod(seconds, 60) 118 if m>59: 119 h, m = divmod(m, 60) 120 if h>23: 121 d, h = divmod(h, 24) 122 self.timestring = '{d:0>2d}d{h:0>2d}h{m:0>2d}'.format(m=int(m), h=int(h), d=int(d)) 123 else: 124 self.timestring = '{h:0>2d}h{m:0>2d}m{s:0>2d}'.format(m=int(m), s=int(s), h=int(h)) 125 else: 126 ms = (seconds%1)*100 127 self.timestring = '{m:0>2d}m{s:0>2d}s{ms:0>2d}'.format(m=int(m), s=int(s), ms=int(ms)) 128 if self.ipython_notebook: # pragma: no cover 129 names_vals = [['optimizer', "{:s}".format(self.opt_name)], 130 ['runtime', "{:>s}".format(self.timestring)], 131 ['evaluation', "{:>0{l}}".format(self.iteration, l=self.len_maxiters)], 132 ['objective', "{: > 12.3E}".format(self.fnow)], 133 ['||gradient||', "{: >+12.3E}".format(float(self.current_gradient))], 134 ['status', "{:s}".format(self.status)], 135 ] 136 #message = "Lik:{:5.3E} Grad:{:5.3E} Lik:{:5.3E} Len:{!s}".format(float(m.log_likelihood()), np.einsum('i,i->', grads, grads), float(m.likelihood.variance), " ".join(["{:3.2E}".format(l) for l in m.kern.lengthscale.values])) 137 html_begin = """<style type="text/css"> 138 .tg-opt {font-family:"Courier New", Courier, monospace !important;padding:2px 3px;word-break:normal;border-collapse:collapse;border-spacing:0;border-color:#DCDCDC;margin:0px auto;width:100%;} 139 .tg-opt td{font-family:"Courier New", Courier, monospace !important;font-weight:bold;color:#444;background-color:#F7FDFA;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} 140 .tg-opt th{font-family:"Courier New", Courier, monospace !important;font-weight:normal;color:#fff;background-color:#26ADE4;border-style:solid;border-width:1px;overflow:hidden;word-break:normal;border-color:#DCDCDC;} 141 .tg-opt .tg-left{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:left;} 142 .tg-opt .tg-right{font-family:"Courier New", Courier, monospace !important;font-weight:normal;text-align:right;} 143 </style> 144 <table class="tg-opt">""" 145 html_end = "</table>" 146 html_body = "" 147 for name, val in names_vals: 148 html_body += "<tr>" 149 html_body += "<td class='tg-left'>{}</td>".format(name) 150 html_body += "<td class='tg-right'>{}</td>".format(val) 151 html_body += "</tr>" 152 self.text.value = html_begin + html_body + html_end 153 self.progress.value = (self.iteration+1) 154 #self.progresstext.value = '0/{}'.format((self.iteration+1)) 155 self.model_show.value = self.model._repr_html_() 156 else: 157 n_exps = exponents(self.fnow, self.current_gradient) 158 if self.iteration - self.p_iter >= 20 * np.random.rand(): 159 a = self.iteration >= self.p_iter * 2.78 160 b = np.any(n_exps < self.exps) 161 if a or b: 162 self.p_iter = self.iteration 163 print('') 164 if b: 165 self.exps = n_exps 166 print('\r', end=' ') 167 print('{3:} {0:>0{mi}g} {1:> 12e} {2:> 12e}'.format(self.iteration, float(self.fnow), float(self.current_gradient), "{:>8s}".format(self.timestring), mi=self.len_maxiters), end=' ') # print 'Iteration:', iteration, ' Objective:', fnow, ' Scale:', beta, '\r', 168 sys.stdout.flush()
169
170 - def print_status(self, me, which=None):
171 self.update() 172 173 t = time.time() 174 seconds = t-self.start 175 #sys.stdout.write(" "*len(self.message)) 176 if t-self._time > 1. or seconds < .2: 177 self.print_out(seconds) 178 self._time = t 179 180 self.iteration += 1
181
182 - def update(self):
183 self.fnow = self.model.objective_function() 184 if self.model.obj_grads is not None: 185 grad = self.model.obj_grads 186 self.current_gradient = np.dot(grad, grad) 187 else: 188 self.current_gradient = np.nan
189
190 - def finish(self, opt): # pragma: no cover
191 import warnings 192 warnings.warn('Finish now automatic, deprecating', DeprecationWarning)
193
194 - def __exit__(self, type, value, traceback):
195 if self.verbose: 196 self.status = self.opt.status 197 198 self.stop = time.time() 199 self.model.remove_observer(self) 200 self.print_out(self.stop - self.start) 201 202 if not self.ipython_notebook: 203 print() 204 print('Runtime: {}'.format("{:>9s}".format(self.timestring))) 205 print('Optimization status: {0}'.format(self.status)) 206 print() 207 elif self.clear:# pragma: no cover 208 self.hor_align.close() 209 else:# pragma: no cover 210 if 'conv' in self.status.lower(): 211 self.progress.bar_style = 'success' 212 elif self.iteration >= self.maxiters: 213 self.progress.bar_style = 'warning' 214 else: 215 self.progress.bar_style = 'danger'
216