1 '''
2 Created on 16 Oct 2015
3
4 @author: Max Zwiessele
5 '''
6 import numpy as np
7 from ..model import Model
8 from ..core.observable_array import ObsAr
9 from ..param import Param
10 from ..caching import Cacher
11 from ..parameterized import Parameterized
14 '''
15 Ridge regression with regularization.
16
17 For any regularization to work we to gradient based optimization.
18 '''
19 - def __init__(self, X, Y, regularizer=None, basis=None, name='ridge_regression'):
20 '''
21 :param array-like X: the inputs X of the regression problem
22 :param array-like Y: the outputs Y
23 :param :py:class:`paramz.examples.ridge_regression.Regularizer` regularizer: the regularizer to use
24 :param str name: the name of this regression object
25 '''
26 super(RidgeRegression, self).__init__(name=name)
27 assert X.ndim == 2, 'inputs need to be at least a column vector'
28 assert Y.ndim == 2, 'inputs need to be at least a column vector'
29
30 self.X = ObsAr(X)
31 self.Y = ObsAr(Y)
32
33 if basis is None:
34 basis = Polynomial(1)
35 self.basis = basis
36
37 if regularizer is None:
38 regularizer = Ridge(1)
39 self.regularizer = regularizer
40
41 self.regularizer.init(basis, X.shape[1])
42
43 self.link_parameters(self.regularizer, self.basis)
44
45 @property
48
49 @property
52
53 @property
55 return self.basis._basis
56
57 - def phi(self, Xpred, degrees=None):
58 """
59 Compute the design matrix for this model
60 using the degrees given by the index array
61 in degrees
62
63 :param array-like Xpred: inputs to compute the design matrix for
64 :param array-like degrees: array of degrees to use [default=range(self.degree+1)]
65 :returns array-like phi: The design matrix [degree x #samples x #dimensions]
66 """
67 assert Xpred.shape[1] == self.X.shape[1], "Need to predict with same shape as training data."
68 if degrees is None:
69 degrees = range(self.basis.degree+1)
70 tmp_phi = np.empty((len(degrees), Xpred.shape[0], Xpred.shape[1]))
71 for i, w in enumerate(degrees):
72
73 tmpX = self._phi(Xpred, w)
74 tmp_phi[i] = tmpX * self.weights[[w], :]
75 return tmp_phi
76
78 tmp_outer = 0.
79 for i in range(self.degree+1):
80
81 tmp_X = self._phi(self.X, i)
82 target_f = tmp_X.dot(self.weights[[i], :].T)
83 tmp_outer += target_f
84
85 tmp_outer = (self.Y-tmp_outer)
86 for i in range(self.degree+1):
87 tmp_X = self._phi(self.X, i)
88
89
90
91
92 self.weights.gradient[i] -= 2*(tmp_outer*tmp_X).sum(0)
93 self._obj = (((tmp_outer)**2).sum() + self.regularizer.error.sum())
94
97
99 tmp_outer = 0.
100 for i in range(self.degree+1):
101
102 tmp_X = self._phi(Xnew, i)
103 tmp_outer += tmp_X.dot(self.weights[[i], :].T)
104 return tmp_outer
105
106
107
108 -class Basis(Parameterized):
109 - def __init__(self, degree, name='basis'):
110 """
111 Basis class for computing the design matrix phi(X). The weights are held
112 in the regularizer, so that this only represents the design matrix.
113 """
114 super(Basis, self).__init__(name=name)
115 self.degree = degree
116
117
118 self._basis = Cacher(self.basis, self.degree+1, [], [])
119
120
121
122
123
125 """
126 Return the ith basis dimension.
127 In the polynomial case, this is X**index.
128 You can write your own basis function here, inheriting from this class
129 and the gradients will still check.
130
131 Note: i will be zero for the first degree. This means we
132 have also a bias in the model, which makes the problem of having an explicit
133 bias obsolete.
134 """
135 raise NotImplementedError('Implement the basis you want to optimize over.')
136
138 - def __init__(self, degree, name='polynomial'):
140
143
147 - def __init__(self, lambda_, name='regularizer'):
151
152 - def init(self, basis, input_dim):
169
173
175 raise NotImplementedError('Set the error `error` and gradient of weights in here')
176
177 -class Lasso(Regularizer):
178 - def __init__(self, lambda_, name='Lasso'):
180
184
185 -class Ridge(Regularizer):
186 - def __init__(self, lambda_, name='Ridge'):
188
192