Package paramz :: Package core :: Module gradcheckable
[hide private]
[frames] | no frames]

Source Code for Module paramz.core.gradcheckable

 1  #=============================================================================== 
 2  # Copyright (c) 2015, Max Zwiessele 
 3  # All rights reserved. 
 4  # 
 5  # Redistribution and use in source and binary forms, with or without 
 6  # modification, are permitted provided that the following conditions are met: 
 7  # 
 8  # * Redistributions of source code must retain the above copyright notice, this 
 9  #   list of conditions and the following disclaimer. 
10  # 
11  # * Redistributions in binary form must reproduce the above copyright notice, 
12  #   this list of conditions and the following disclaimer in the documentation 
13  #   and/or other materials provided with the distribution. 
14  # 
15  # * Neither the name of paramz.core.gradcheckable nor the names of its 
16  #   contributors may be used to endorse or promote products derived from 
17  #   this software without specific prior written permission. 
18  # 
19  # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 
20  # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 
21  # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 
22  # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 
23  # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 
24  # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 
25  # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
26  # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 
27  # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 
28  # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
29  #=============================================================================== 
30  from . import HierarchyError 
31  from .pickleable import Pickleable 
32  from .parentable import Parentable 
33   
34 -class Gradcheckable(Pickleable, Parentable):
35 """ 36 Adds the functionality for an object to be gradcheckable. 37 It is just a thin wrapper of a call to the highest parent for now. 38 TODO: Can be done better, by only changing parameters of the current parameter handle, 39 such that object hierarchy only has to change for those. 40 """
41 - def __init__(self, *a, **kw):
42 super(Gradcheckable, self).__init__(*a, **kw)
43
44 - def checkgrad(self, verbose=0, step=1e-6, tolerance=1e-3, df_tolerance=1e-12):
45 """ 46 Check the gradient of this parameter with respect to the highest parent's 47 objective function. 48 This is a three point estimate of the gradient, wiggling at the parameters 49 with a stepsize step. 50 The check passes if either the ratio or the difference between numerical and 51 analytical gradient is smaller then tolerance. 52 53 :param bool verbose: whether each parameter shall be checked individually. 54 :param float step: the stepsize for the numerical three point gradient estimate. 55 :param float tolerance: the tolerance for the gradient ratio or difference. 56 :param float df_tolerance: the tolerance for df_tolerance 57 58 .. note:: 59 The *dF_ratio* indicates the limit of accuracy of numerical gradients. 60 If it is too small, e.g., smaller than 1e-12, the numerical gradients 61 are usually not accurate enough for the tests (shown with blue). 62 """ 63 # Make sure we always call the gradcheck on the highest parent 64 # This ensures the assumption of the highest parent to hold the fixes 65 # In the checkgrad function we take advantage of that, so it needs 66 # to be set in place here. 67 if self.has_parent(): 68 return self._highest_parent_._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance) 69 return self._checkgrad(self, verbose=verbose, step=step, tolerance=tolerance, df_tolerance=df_tolerance)
70
71 - def _checkgrad(self, param, verbose=0, step=1e-6, tolerance=1e-3, df_tolerance=1e-12):
72 """ 73 Perform the checkgrad on the model. 74 TODO: this can be done more efficiently, when doing it inside here 75 """ 76 raise HierarchyError("This parameter is not in a model with a likelihood, and, therefore, cannot be gradient checked!")
77