Skip to content

Commit 01c28bd

Browse files
committed
minor cleanup and bugfixing
1 parent a633a95 commit 01c28bd

File tree

3 files changed

+21
-11
lines changed

3 files changed

+21
-11
lines changed

pymc/Node.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,25 @@ def logp_of_set(s):
2929
raise exc[0], exc[1], exc[2]
3030

3131
def logp_gradient_of_set(variable_set, calculation_set = None):
32-
33-
logp_gradient = {}
34-
for var in variable_set:
35-
logp_gradient[var] = var.logp_gradient_contribution(calculation_set)
36-
37-
return logp_gradient
32+
"""
33+
Calculates the gradient of the joint log posterior with respect to all the variables in variable_set.
34+
Calculation of the log posterior is restricted to the variables in calculation_set.
3835
36+
Returns a dictionary of the gradients.
37+
"""
38+
logp_gradients = {}
39+
for variable in variable_set:
40+
logp_gradients[variable] = logp_gradient(variable, calculation_set)
41+
42+
return logp_gradients
3943

44+
def logp_gradient(variable, calculation_set = None):
45+
"""
46+
Calculates the gradient of the joint log posterior with respect to variable.
47+
Calculation of the log posterior is restricted to the variables in calculation_set.
48+
"""
49+
return variable.logp_partial_gradient(variable, calculation_set) + sum([child.logp_partial_gradient(variable, calculation_set) for child in variable.children] )
50+
4051

4152
def batchsd(trace, batches=5):
4253
"""

pymc/distributions.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,6 @@ def stochastic_from_dist(name, logp, random=None, logp_partial_gradients={}, dty
305305

306306
wrapped_logp_partial_gradients = {}
307307

308-
print logp_partial_gradients
309308
for parameter, func in logp_partial_gradients.iteritems():
310309
wrapped_logp_partial_gradients[parameter] = valuewrapper(logp_partial_gradients[parameter], arguments = distribution_arguments)
311310

@@ -1123,13 +1122,13 @@ def exponweib_like(x, alpha, k, loc=0, scale=1):
11231122
11241123
"""
11251124
return flib.exponweib(x,alpha,k,loc,scale)
1126-
1125+
"""
11271126
exponweib_grad_like = {'value' : flib.exponweib_gx,
11281127
'alpha' : flib.exponweib_ga,
11291128
'k' : flib.exponweib_gk,
11301129
'loc' : flib.exponweib_gl,
11311130
'scale' : flib.exponweib_gs}
1132-
1131+
"""
11331132
# Gamma----------------------------------------------
11341133
@randomwrap
11351134
def rgamma(alpha, beta, size=None):

pymc/tests/test_gradients.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -305,8 +305,8 @@ def test_gradients(self):
305305
negative_binomial = NegativeBinomial('negative_binomial', mu = c, alpha = d )
306306
self.check_gradients(negative_binomial)
307307

308-
exponweib = Exponweib('exponweib', alpha = c, k =d , loc = a, scale = e )
309-
self.check_gradients(exponweib)
308+
#exponweib = Exponweib('exponweib', alpha = c, k =d , loc = a, scale = e )
309+
#self.check_gradients(exponweib)
310310

311311

312312
def check_model_gradients(self, model):

0 commit comments

Comments
 (0)