Skip to content

Commit c23b72b

Browse files
author
Christopher Fonnesbeck
committed
Added Model verbosity argument
1 parent ca40cd3 commit c23b72b

File tree

3 files changed

+22
-12
lines changed

3 files changed

+22
-12
lines changed

pymc3/model.py

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -158,16 +158,25 @@ def logpt(self):
158158
return t.sum(self.logp_elemwiset)
159159

160160
class Model(Context, Factor):
161-
"""Encapsulates the variables and likelihood factors of a model."""
161+
"""Encapsulates the variables and likelihood factors of a model.
162+
163+
Parameters
164+
----------
165+
verbose : int
166+
Model verbosity setting, determining how much feedback various operations
167+
provide. Normal verbosity is verbose=1 (default), silence is verbose=0, high
168+
is any value greater than 1.
169+
"""
162170

163-
def __init__(self):
171+
def __init__(self, verbose=1):
164172
self.named_vars = {}
165173
self.free_RVs = []
166174
self.observed_RVs = []
167175
self.deterministics = []
168176
self.potentials = []
169177
self.missing_values = []
170178
self.model = self
179+
self.verbose = verbose
171180

172181
@property
173182
@memoize
@@ -227,11 +236,12 @@ def Var(self, name, dist, data=None):
227236
self.free_RVs.append(var)
228237
else:
229238
var = TransformedRV(name=name, distribution=dist, model=self, transform=dist.transform)
230-
print('Applied {transform}-transform to {name}'
231-
' and added transformed {orig_name} to model.'.format(
232-
transform=dist.transform.name,
233-
name=name,
234-
orig_name='{}_{}'.format(name, dist.transform.name)))
239+
if model.verbose:
240+
print('Applied {transform}-transform to {name}'
241+
' and added transformed {orig_name} to model.'.format(
242+
transform=dist.transform.name,
243+
name=name,
244+
orig_name='{}_{}'.format(name, dist.transform.name)))
235245
self.deterministics.append(var)
236246
return var
237247
elif isinstance(data, dict):

pymc3/sampling.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,8 @@ def assign_step_methods(model, step=None,
6161

6262
selected = max(competences.keys(), key=(lambda k: competences[k]))
6363

64-
print('Assigned {0} to {1}'.format(selected.__name__, var))
64+
if model.verbose:
65+
print('Assigned {0} to {1}'.format(selected.__name__, var))
6566
selected_steps[selected].append(var)
6667

6768
# Instantiate all selected step methods

pymc3/tuning/starting.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616

1717
def find_MAP(start=None, vars=None, fmin=None, return_raw=False,
18-
disp=False, model=None, *args, **kwargs):
18+
model=None, *args, **kwargs):
1919
"""
2020
Sets state to the local maximum a posteriori point given a model.
2121
Current default of fmin_Hessian does not deal well with optimizing close
@@ -32,9 +32,6 @@ def find_MAP(start=None, vars=None, fmin=None, return_raw=False,
3232
`scipy.optimize.fmin_powell` which will perform better).
3333
return_raw : Bool
3434
Whether to return extra value returned by fmin (Defaults to `False`)
35-
disp : Bool
36-
Display helpful warnings, and verbose output of `fmin` (Defaults to
37-
`False`)
3835
model : Model (optional if in `with` context)
3936
*args, **kwargs
4037
Extra args passed to fmin
@@ -48,6 +45,8 @@ def find_MAP(start=None, vars=None, fmin=None, return_raw=False,
4845
vars = inputvars(vars)
4946

5047
disc_vars = list(typefilter(vars, discrete_types))
48+
49+
disp = model.verbose > 1
5150

5251
if disc_vars and disp:
5352
print("Warning: vars contains discrete variables. MAP " +

0 commit comments

Comments
 (0)