Skip to content

Commit 766d3b1

Browse files
committed
run precommit
1 parent 1a63282 commit 766d3b1

File tree

2 files changed

+12
-12
lines changed

2 files changed

+12
-12
lines changed

pymc/gp/gp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -763,7 +763,7 @@ def marginal_likelihood(
763763
raise ValueError("noise argument must be specified")
764764
else:
765765
self.sigma = noise
766-
766+
767767
approx_logp = self._build_marginal_likelihood_logp(y, X, Xu, noise, JITTER_DEFAULT)
768768
pm.Potential("marginalapprox_logp_" + name, approx_logp)
769769

pymc/tests/test_gp.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -843,30 +843,31 @@ def testLatent2(self):
843843
latent_logp = model.compile_logp()({"f_rotated_": y_rotated, "p": self.pnew})
844844
npt.assert_allclose(latent_logp, self.logp, atol=5)
845845

846-
846+
847847
class TestMarginalVsMarginalApprox:
848848
R"""
849849
Compare test fits of models Marginal and MarginalApprox.
850850
"""
851+
851852
def setup_method(self):
852853
self.sigma = 0.1
853854
self.x = np.linspace(-5, 5, 30)
854-
self.y = 0.25 * self.x + self.sigma*np.random.randn(len(self.x))
855+
self.y = 0.25 * self.x + self.sigma * np.random.randn(len(self.x))
855856
with pm.Model() as model:
856857
cov_func = pm.gp.cov.Linear(1, c=0.0)
857-
c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value
858+
c = pm.Normal("c", mu=20.0, sigma=100.0) # far from true value
858859
mean_func = pm.gp.mean.Constant(c)
859860
self.gp = pm.gp.Marginal(mean_func=mean_func, cov_func=cov_func)
860861
sigma = pm.HalfNormal("sigma", sigma=100)
861862
self.gp.marginal_likelihood("lik", self.x[:, None], self.y, sigma)
862-
self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb
863-
863+
self.map_full = pm.find_MAP(method="bfgs") # bfgs seems to work much better than lbfgsb
864+
864865
self.x_new = np.linspace(-6, 6, 20)
865866
with model:
866867
self.pred_mu, self.pred_var = self.gp.predict(
867868
self.x_new[:, None], point=self.map_full, pred_noise=True, diag=True
868869
)
869-
870+
870871
with model:
871872
self.pred_mu, self.pred_covar = self.gp.predict(
872873
self.x_new[:, None], point=self.map_full, pred_noise=False, diag=False
@@ -883,29 +884,28 @@ def test_fits_and_preds(self, approx):
883884
sigma = pm.HalfNormal("sigma", sigma=100, initval=50.0)
884885
gp.marginal_likelihood("lik", self.x[:, None], self.x[:, None], self.y, sigma)
885886
map_approx = pm.find_MAP(method="bfgs")
886-
887+
887888
# use wide tolerances (but narrow relative to initial values of unknown parameters) because
888889
# test is likely flakey
889890
npt.assert_allclose(self.map_full["c"], map_approx["c"], atol=0.01, rtol=0.1)
890891
npt.assert_allclose(self.map_full["sigma"], map_approx["sigma"], atol=0.01, rtol=0.1)
891-
892+
892893
# check that predict (and conditional) work, include noise, with diagonal non-full pred var
893894
with model:
894895
pred_mu_approx, pred_var_approx = gp.predict(
895896
self.x_new[:, None], point=map_approx, pred_noise=True, diag=True
896897
)
897898
npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1)
898899
npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1)
899-
900+
900901
# check that predict (and conditional) work, no noise, full pred covariance
901902
with model:
902903
pred_mu_approx, pred_var_approx = gp.predict(
903904
self.x_new[:, None], point=map_approx, pred_noise=True, diag=True
904905
)
905906
npt.assert_allclose(self.pred_mu, pred_mu_approx, atol=0.0, rtol=0.1)
906907
npt.assert_allclose(self.pred_var, pred_var_approx, atol=0.0, rtol=0.1)
907-
908-
908+
909909

910910
class TestGPAdditive:
911911
def setup_method(self):

0 commit comments

Comments
 (0)