6 from GPy.util.linalg
import jitchol, backsub_both_sides, tdot, dtrtrs, dtrtri,pdinv
7 from GPy.util
import diag
8 from GPy.core.parameterization.variational
import VariationalPosterior
10 from GPy.inference.latent_function_inference
import LatentFunctionInference
11 from GPy.inference.latent_function_inference.posterior
import Posterior
12 log_2_pi = np.log(2*np.pi)
15 from mpi4py
import MPI
22 Inference the marginal likelihood through \frac{p(y,y*)}{p(y)} 30 return np.sum(np.square(Y))
35 return Y.view(np.ndarray)
37 return jitchol(tdot(Y))
39 def gatherPsiStat(self, kern, X, Z, Y, beta, uncertain_inputs, D, missing_data):
44 psi0 = kern.psi0(Z, X)
45 psi1 = kern.psi1(Z, X)*beta
46 psi2 = kern.psi2(Z, X)*beta
if not missing_data
else kern.psi2n(Z, X)*beta
51 psi2 = psi1[:,
None,:]*psi1[:,:,
None]*beta
53 psi2 = tdot(psi1.T)*beta
56 if isinstance(Y, VariationalPosterior):
57 m, s = Y.mean, Y.variance
58 psi1Y = np.dot(m.T,psi1)
59 YRY = (np.square(m).sum()+s.sum())*beta
60 psi0 = (D*psi0).sum()*beta
62 psi1Y = np.dot((Y).T,psi1)
65 psi0 = (psi0*D).sum()*beta
67 psi1Y = np.dot(Y.T,psi1)
70 psi0 = (psi0*D).sum()*beta
72 return psi0, psi2, YRY, psi1, psi1Y
74 def inference(self, kern, X, Z, likelihood, Y, qU):
76 The SVI-VarDTC inference 79 if isinstance(Y, np.ndarray)
and np.any(np.isnan(Y)):
81 N, M, Q = Y.shape[0], Z.shape[0], Z.shape[1]
82 Ds = Y.shape[1] - (np.isnan(Y)*1).sum(1)
83 Ymask = 1-np.isnan(Y)*1
84 Y_masked = np.zeros_like(Y)
85 Y_masked[Ymask==1] = Y[Ymask==1]
89 N, D, M, Q = Y.shape[0], Y.shape[1], Z.shape[0], Z.shape[1]
92 uncertain_inputs = isinstance(X, VariationalPosterior)
93 uncertain_outputs = isinstance(Y, VariationalPosterior)
95 beta = 1./np.fmax(likelihood.variance, 1e-6)
97 psi0, psi2, YRY, psi1, psi1Y = self.
gatherPsiStat(kern, X, Z, Y
if not missing_data
else Y_masked, beta, uncertain_inputs, D
if not missing_data
else Ds, missing_data)
103 mu, S = qU.mean, qU.covariance
104 mupsi1Y = mu.dot(psi1Y)
106 Kmm = kern.K(Z).copy()
111 S_mu = S[
None,:,:]+mu.T[:,:,
None]*mu.T[:,
None,:]
112 NS_mu = S_mu.T.dot(Ymask.T).T
115 LmInvPsi2LmInvT = np.swapaxes(psi2.dot(LmInv.T),1,2).dot(LmInv.T)
116 LmInvSmuLmInvT = np.swapaxes(NS_mu.dot(LmInv.T),1,2).dot(LmInv.T)
118 B = mupsi1Y+ mupsi1Y.T +(Ds[:,
None,
None]*psi2).sum(0)
119 tmp = backsub_both_sides(Lm, B,
'right')
121 logL = -ND*log_2_pi/2. +ND*np.log(beta)/2. - psi0/2. - YRY/2. \
122 -(LmInvSmuLmInvT*LmInvPsi2LmInvT).sum()/2. +np.trace(tmp)/2.
126 LmInvPsi2LmInvT = backsub_both_sides(Lm, psi2,
'right')
128 LmInvPsi2LmInvT = tdot(dtrtrs(Lm, psi1.T)[0])/beta
129 LmInvSmuLmInvT = backsub_both_sides(Lm, S_mu,
'right')
131 B = mupsi1Y+ mupsi1Y.T +D*psi2
132 tmp = backsub_both_sides(Lm, B,
'right')
134 logL = -ND*log_2_pi/2. +ND*np.log(beta)/2. - psi0/2. - YRY/2. \
135 -(LmInvSmuLmInvT*LmInvPsi2LmInvT).sum()/2. +np.trace(tmp)/2.
154 dL_dpsi0 = -Ds * (beta * np.ones((N,)))/2.
156 dL_dpsi0 = -D * (beta * np.ones((N,)))/2.
158 if uncertain_outputs:
159 Ym,Ys = Y.mean, Y.variance
160 dL_dpsi1 = dtrtrs(Lm, dtrtrs(Lm, Ym.dot(mu.T).T)[0], trans=1)[0].T*beta
163 dL_dpsi1 = dtrtrs(Lm, dtrtrs(Lm, (Y_masked).dot(mu.T).T)[0], trans=1)[0].T*beta
165 dL_dpsi1 = dtrtrs(Lm, dtrtrs(Lm, Y.dot(mu.T).T)[0], trans=1)[0].T*beta
169 dL_dpsi2 = np.swapaxes((Ds[:,
None,
None]*np.eye(M)[
None,:,:]-LmInvSmuLmInvT).dot(LmInv),1,2).dot(LmInv)*beta/2.
171 dL_dpsi2 = beta*backsub_both_sides(Lm, D*np.eye(M)-LmInvSmuLmInvT,
'left')/2.
173 dL_dpsi1 += beta*psi1.dot(dL_dpsi2+dL_dpsi2.T)
177 grad_dict = {
'dL_dKmm': dL_dKmm,
181 'dL_dthetaL':dL_dthetaL}
183 grad_dict = {
'dL_dKmm': dL_dKmm,
184 'dL_dKdiag':dL_dpsi0,
186 'dL_dthetaL':dL_dthetaL}
188 if uncertain_outputs:
190 grad_dict[
'dL_dYmean'] = -Ym*beta+ dtrtrs(Lm,psi1.T)[0].T.dot(dtrtrs(Lm,mu)[0])
191 grad_dict[
'dL_dYvar'] = beta/-2.
193 return logL, grad_dict
def gatherPsiStat(self, kern, X, Z, Y, beta, uncertain_inputs, D, missing_data)
Inference the marginal likelihood through {p(y,y*)}{p(y)}.
def inference(self, kern, X, Z, likelihood, Y, qU)
The SVI-VarDTC inference.
def get_YYTfactor(self, Y)
def __init__(self, mpi_comm=None)