Skip to content

Commit cb5fa6a

Browse files
authored
Merge pull request #455 from SheffieldML/devel
1.5.6
2 parents a94d26d + daa28f5 commit cb5fa6a

File tree

15 files changed

+363
-36
lines changed

15 files changed

+363
-36
lines changed

.gitchangelog.rc

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ ignore_regexps = [
7676
##
7777
section_regexps = [
7878
('New', [
79-
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
79+
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
8080
]),
8181
('Changes', [
8282
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
@@ -87,7 +87,6 @@ section_regexps = [
8787

8888
('Other', None ## Match all lines
8989
),
90-
9190
]
9291

9392

@@ -147,7 +146,7 @@ tag_filter_regexp = r'^v[0-9]+\.[0-9]+(\.[0-9]+)?$'
147146
##
148147
## This label will be used as the changelog Title of the last set of changes
149148
## between last valid tag and HEAD if any.
150-
unreleased_version_label = "%%__version__%% (unreleased)"
149+
unreleased_version_label = "Unreleased"
151150

152151

153152
## ``output_engine`` is a callable
@@ -178,7 +177,6 @@ unreleased_version_label = "%%__version__%% (unreleased)"
178177
## Examples:
179178
## - makotemplate("restructuredtext")
180179
##
181-
182180
#output_engine = rest_py
183181
#output_engine = mustache("restructuredtext")
184182
output_engine = mustache("markdown")
@@ -189,4 +187,4 @@ output_engine = mustache("markdown")
189187
##
190188
## This option tells git-log whether to include merge commits in the log.
191189
## The default is to include them.
192-
include_merge = True
190+
include_merge = True

.travis.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,10 @@ before_install:
2828
install:
2929
- echo $PATH
3030
- source install_retry.sh
31+
- if [[ "$TRAVIS_OS_NAME" == "osx" ]];
32+
then
33+
conda install --yes pandoc;
34+
fi;
3135
- pip install codecov
3236
- pip install coveralls
3337
- pip install pypandoc

CHANGELOG.md

Lines changed: 134 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,97 @@
11
# Changelog
22

3-
## v1.5.5 (2016-10-03)
3+
## v1.5.6 (2016-11-07)
4+
5+
### New
6+
7+
* Added ploy basis kernel tests and import. [mzwiessele]
8+
9+
* Gitchangelogrc. [mzwiessele]
10+
11+
### Changes
12+
13+
* Added polynomial basis func kernel. [mzwiessele]
14+
15+
### Fix
16+
17+
* Installation #451. [Max Zwiessele]
18+
19+
* Pandoc install under travis osx. [mzwiessele]
20+
21+
* Pandoc install under travis osx. [mzwiessele]
22+
23+
* Pypi changing to pypi.org. [mzwiessele]
424

525
### Other
626

7-
* Bump version: 1.5.4 → 1.5.5. [Max Zwiessele]
27+
* Bump version: 1.5.5 → 1.5.6. [mzwiessele]
828

29+
* Merge pull request #448 from thangbui/devel. [Max Zwiessele]
930

10-
## v1.5.4 (2016-10-03)
31+
Added pep.py -- Sparse Gaussian processes using Power Expectation Propagation
1132

12-
### New
33+
* Renamed pep test scripts. [Thang Bui]
1334

14-
* Added deployment pull request instructions for developers. [mzwiessele]
35+
* Fixed seed in pep test script #448. [Thang Bui]
1536

16-
* Using gitchangelog to keep track of changes and log new features. [mzwiessele]
37+
* Added tests. [Thang Bui]
38+
39+
* Added pep.py -- Sparse Gaussian processes using Power Expectation Propagation. [Thang Bui]
40+
41+
This allows interpolation between FITC (EP or alpha = 1), and Titsias's variational (VarDTC, VFE when alpha = 0).
42+
43+
* Merge pull request #452 from SheffieldML/setupreq. [Max Zwiessele]
44+
45+
fix: Installation #451
46+
47+
* Merge pull request #447 from SheffieldML/polinomial. [Max Zwiessele]
48+
49+
Polynomial
50+
51+
* Merge branch 'devel' into polinomial. [mzwiessele]
52+
53+
* Merge pull request #449 from SheffieldML/deploy. [Max Zwiessele]
54+
55+
Deploy
56+
57+
* Update setup.py. [Mike Croucher]
58+
59+
* Merge pull request #446 from SheffieldML/devel. [Max Zwiessele]
60+
61+
newest patch fixing some issues
62+
63+
* Merge branch 'devel' of github.com:SheffieldML/GPy into devel. [mzwiessele]
64+
65+
* Merge branch 'deploy' into devel. [Max Zwiessele]
66+
67+
* Merge pull request #442 from SheffieldML/devel. [Max Zwiessele]
68+
69+
New Major for GPy
70+
71+
* Merge pull request #426 from SheffieldML/devel. [Max Zwiessele]
72+
73+
some fixes from issues and beckdaniels warped gp improvements
74+
75+
76+
## v1.5.5 (2016-10-03)
77+
78+
### Other
79+
80+
* Bump version: 1.5.4 → 1.5.5. [Max Zwiessele]
81+
82+
83+
## v1.5.4 (2016-10-03)
1784

1885
### Changes
1986

2087
* Version update on paramz. [Max Zwiessele]
2188

2289
* Fixed naming in variational priors : [Max Zwiessele]
2390

24-
* Changelog update. [mzwiessele]
25-
2691
### Fix
2792

2893
* Bug in dataset (in fn download_url) which wrongly interprets the Content-Length meta data, and just takes first character. [Michael T Smith]
2994

30-
* What's new update fix #425 in changelog. [mzwiessele]
31-
3295
### Other
3396

3497
* Bump version: 1.5.3 → 1.5.4. [Max Zwiessele]
@@ -39,25 +102,14 @@
39102

40103
* Merge branch 'kurtCutajar-devel' into devel. [mzwiessele]
41104

42-
* Bump version: 1.5.2 → 1.5.3. [mzwiessele]
43-
44-
* Merge branch 'devel' into kurtCutajar-devel. [mzwiessele]
45-
46-
* Bump version: 1.5.1 → 1.5.2. [mzwiessele]
47-
48-
* Minor readme changes. [mzwiessele]
49-
50-
* Bump version: 1.5.0 → 1.5.1. [mzwiessele]
51-
52-
* Bump version: 1.4.3 → 1.5.0. [mzwiessele]
53105

54-
* Bump version: 1.4.2 → 1.4.3. [mzwiessele]
106+
## v1.5.3 (2016-09-06)
55107

56-
* Bump version: 1.4.1 → 1.4.2. [mzwiessele]
108+
### Other
57109

58-
* Merge branch 'devel' of github.com:SheffieldML/GPy into devel. [mzwiessele]
110+
* Bump version: 1.5.2 → 1.5.3. [mzwiessele]
59111

60-
* [kern] fix #440. [mzwiessele]
112+
* Merge branch 'devel' into kurtCutajar-devel. [mzwiessele]
61113

62114
* [doc] cleanup. [mzwiessele]
63115

@@ -92,6 +144,63 @@
92144
* Added core code for GpSSM and GpGrid. [kcutajar]
93145

94146

147+
## v1.5.2 (2016-09-06)
148+
149+
### New
150+
151+
* Added deployment pull request instructions for developers. [mzwiessele]
152+
153+
### Other
154+
155+
* Bump version: 1.5.1 → 1.5.2. [mzwiessele]
156+
157+
* Minor readme changes. [mzwiessele]
158+
159+
160+
## v1.5.1 (2016-09-06)
161+
162+
### Fix
163+
164+
* What's new update fix #425 in changelog. [mzwiessele]
165+
166+
### Other
167+
168+
* Bump version: 1.5.0 → 1.5.1. [mzwiessele]
169+
170+
171+
## v1.5.0 (2016-09-06)
172+
173+
### New
174+
175+
* Using gitchangelog to keep track of changes and log new features. [mzwiessele]
176+
177+
### Other
178+
179+
* Bump version: 1.4.3 → 1.5.0. [mzwiessele]
180+
181+
182+
## v1.4.3 (2016-09-06)
183+
184+
### Changes
185+
186+
* Changelog update. [mzwiessele]
187+
188+
### Other
189+
190+
* Bump version: 1.4.2 → 1.4.3. [mzwiessele]
191+
192+
193+
## v1.4.2 (2016-09-06)
194+
195+
### Other
196+
197+
* Bump version: 1.4.1 → 1.4.2. [mzwiessele]
198+
199+
* Merge branch 'devel' of github.com:SheffieldML/GPy into devel. [mzwiessele]
200+
201+
* [kern] fix #440. [mzwiessele]
202+
203+
95204
## v1.4.1 (2016-09-06)
96205

97206
### Other

GPy/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "1.5.5"
1+
__version__ = "1.5.6"

GPy/inference/latent_function_inference/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ def __setstate__(self, state):
6767
from .expectation_propagation import EP, EPDTC
6868
from .dtc import DTC
6969
from .fitc import FITC
70+
from .pep import PEP
7071
from .var_dtc_parallel import VarDTC_minibatch
7172
from .var_gauss import VarGauss
7273
from .gaussian_grid_inference import GaussianGridInference
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
from .posterior import Posterior
2+
from ...util.linalg import jitchol, tdot, dtrtrs, dtrtri, pdinv
3+
from ...util import diag
4+
import numpy as np
5+
from . import LatentFunctionInference
6+
log_2_pi = np.log(2*np.pi)
7+
8+
class PEP(LatentFunctionInference):
9+
'''
10+
Sparse Gaussian processes using Power-Expectation Propagation
11+
for regression: alpha \approx 0 gives VarDTC and alpha = 1 gives FITC
12+
13+
Reference: A Unifying Framework for Sparse Gaussian Process Approximation using
14+
Power Expectation Propagation, https://arxiv.org/abs/1605.07066
15+
16+
'''
17+
const_jitter = 1e-6
18+
19+
def __init__(self, alpha):
20+
super(PEP, self).__init__()
21+
self.alpha = alpha
22+
23+
def inference(self, kern, X, Z, likelihood, Y, mean_function=None, Y_metadata=None):
24+
assert mean_function is None, "inference with a mean function not implemented"
25+
26+
num_inducing, _ = Z.shape
27+
num_data, output_dim = Y.shape
28+
29+
#make sure the noise is not hetero
30+
sigma_n = likelihood.gaussian_variance(Y_metadata)
31+
if sigma_n.size >1:
32+
raise NotImplementedError("no hetero noise with this implementation of PEP")
33+
34+
Kmm = kern.K(Z)
35+
Knn = kern.Kdiag(X)
36+
Knm = kern.K(X, Z)
37+
U = Knm
38+
39+
#factor Kmm
40+
diag.add(Kmm, self.const_jitter)
41+
Kmmi, L, Li, _ = pdinv(Kmm)
42+
43+
#compute beta_star, the effective noise precision
44+
LiUT = np.dot(Li, U.T)
45+
sigma_star = sigma_n + self.alpha * (Knn - np.sum(np.square(LiUT),0))
46+
beta_star = 1./sigma_star
47+
48+
# Compute and factor A
49+
A = tdot(LiUT*np.sqrt(beta_star)) + np.eye(num_inducing)
50+
LA = jitchol(A)
51+
52+
# back substitute to get b, P, v
53+
URiy = np.dot(U.T*beta_star,Y)
54+
tmp, _ = dtrtrs(L, URiy, lower=1)
55+
b, _ = dtrtrs(LA, tmp, lower=1)
56+
tmp, _ = dtrtrs(LA, b, lower=1, trans=1)
57+
v, _ = dtrtrs(L, tmp, lower=1, trans=1)
58+
tmp, _ = dtrtrs(LA, Li, lower=1, trans=0)
59+
P = tdot(tmp.T)
60+
61+
alpha_const_term = (1.0-self.alpha) / self.alpha
62+
63+
#compute log marginal
64+
log_marginal = -0.5*num_data*output_dim*np.log(2*np.pi) + \
65+
-np.sum(np.log(np.diag(LA)))*output_dim + \
66+
0.5*output_dim*(1+alpha_const_term)*np.sum(np.log(beta_star)) + \
67+
-0.5*np.sum(np.square(Y.T*np.sqrt(beta_star))) + \
68+
0.5*np.sum(np.square(b)) + 0.5*alpha_const_term*num_data*np.log(sigma_n)
69+
#compute dL_dR
70+
Uv = np.dot(U, v)
71+
dL_dR = 0.5*(np.sum(U*np.dot(U,P), 1) - (1.0+alpha_const_term)/beta_star + np.sum(np.square(Y), 1) - 2.*np.sum(Uv*Y, 1) \
72+
+ np.sum(np.square(Uv), 1))*beta_star**2
73+
74+
# Compute dL_dKmm
75+
vvT_P = tdot(v.reshape(-1,1)) + P
76+
dL_dK = 0.5*(Kmmi - vvT_P)
77+
KiU = np.dot(Kmmi, U.T)
78+
dL_dK += self.alpha * np.dot(KiU*dL_dR, KiU.T)
79+
80+
# Compute dL_dU
81+
vY = np.dot(v.reshape(-1,1),Y.T)
82+
dL_dU = vY - np.dot(vvT_P, U.T)
83+
dL_dU *= beta_star
84+
dL_dU -= self.alpha * 2.*KiU*dL_dR
85+
86+
dL_dthetaL = likelihood.exact_inference_gradients(dL_dR)
87+
dL_dthetaL += 0.5*alpha_const_term*num_data / sigma_n
88+
grad_dict = {'dL_dKmm': dL_dK, 'dL_dKdiag':dL_dR * self.alpha, 'dL_dKnm':dL_dU.T, 'dL_dthetaL':dL_dthetaL}
89+
90+
#construct a posterior object
91+
post = Posterior(woodbury_inv=Kmmi-P, woodbury_vector=v, K=Kmm, mean=None, cov=None, K_chol=L)
92+
93+
return post, log_marginal, grad_dict

GPy/kern/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
from .src.splitKern import SplitKern,DEtime
3333
from .src.splitKern import DEtime as DiffGenomeKern
3434
from .src.spline import Spline
35-
from .src.basis_funcs import LogisticBasisFuncKernel, LinearSlopeBasisFuncKernel, BasisFuncKernel, ChangePointBasisFuncKernel, DomainKernel
35+
from .src.basis_funcs import LogisticBasisFuncKernel, LinearSlopeBasisFuncKernel, BasisFuncKernel, ChangePointBasisFuncKernel, DomainKernel, PolynomialBasisFuncKernel
3636
from .src.grid_kerns import GridRBF
3737

3838
from .src.sde_matern import sde_Matern32

GPy/kern/src/basis_funcs.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,26 @@ def _K(self, X, X2):
102102
phi2 = phi2[:, None]
103103
return phi1.dot(phi2.T)
104104

105+
class PolynomialBasisFuncKernel(BasisFuncKernel):
106+
def __init__(self, input_dim, degree, variance=1., active_dims=None, ARD=True, name='polynomial_basis'):
107+
"""
108+
A linear segment transformation. The segments start at start, \
109+
are then linear to stop and constant again. The segments are
110+
normalized, so that they have exactly as much mass above
111+
as below the origin.
112+
113+
Start and stop can be tuples or lists of starts and stops.
114+
Behaviour of start stop is as np.where(X<start) would do.
115+
"""
116+
self.degree = degree
117+
super(PolynomialBasisFuncKernel, self).__init__(input_dim, variance, active_dims, ARD, name)
118+
119+
@Cache_this(limit=3, ignore_args=())
120+
def _phi(self, X):
121+
phi = np.empty((X.shape[0], self.degree+1))
122+
for i in range(self.degree+1):
123+
phi[:, [i]] = X**i
124+
return phi
105125

106126
class LinearSlopeBasisFuncKernel(BasisFuncKernel):
107127
def __init__(self, input_dim, start, stop, variance=1., active_dims=None, ARD=False, name='linear_segment'):

0 commit comments

Comments
 (0)