forked from pymc-devs/pymc
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy patharraystep.py
252 lines (206 loc) · 7.9 KB
/
arraystep.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
from .compound import CompoundStep
from ..model import modelcontext
from ..theanof import inputvars
from ..blocking import ArrayOrdering, DictToArrayBijection
import numpy as np
from numpy.random import uniform
from enum import IntEnum, unique
__all__ = [
'ArrayStep', 'ArrayStepShared', 'metrop_select', 'Competence']
@unique
class Competence(IntEnum):
"""Enum for charaterizing competence classes of step methods.
Values include:
0: INCOMPATIBLE
1: COMPATIBLE
2: PREFERRED
3: IDEAL
"""
INCOMPATIBLE = 0
COMPATIBLE = 1
PREFERRED = 2
IDEAL = 3
class BlockedStep(object):
generates_stats = False
def __new__(cls, *args, **kwargs):
blocked = kwargs.get('blocked')
if blocked is None:
# Try to look up default value from class
blocked = getattr(cls, 'default_blocked', True)
kwargs['blocked'] = blocked
model = modelcontext(kwargs.get('model'))
kwargs.update({'model':model})
# vars can either be first arg or a kwarg
if 'vars' not in kwargs and len(args) >= 1:
vars = args[0]
args = args[1:]
elif 'vars' in kwargs:
vars = kwargs.pop('vars')
else: # Assume all model variables
vars = model.vars
# get the actual inputs from the vars
vars = inputvars(vars)
if len(vars) == 0:
raise ValueError('No free random variables to sample.')
if not blocked and len(vars) > 1:
# In this case we create a separate sampler for each var
# and append them to a CompoundStep
steps = []
for var in vars:
step = super(BlockedStep, cls).__new__(cls)
# If we don't return the instance we have to manually
# call __init__
step.__init__([var], *args, **kwargs)
# Hack for creating the class correctly when unpickling.
step.__newargs = ([var], ) + args, kwargs
steps.append(step)
return CompoundStep(steps)
else:
step = super(BlockedStep, cls).__new__(cls)
# Hack for creating the class correctly when unpickling.
step.__newargs = (vars, ) + args, kwargs
return step
# Hack for creating the class correctly when unpickling.
def __getnewargs_ex__(self):
return self.__newargs
@staticmethod
def competence(var, has_grad):
return Competence.INCOMPATIBLE
@classmethod
def _competence(cls, vars, have_grad):
vars = np.atleast_1d(vars)
have_grad = np.atleast_1d(have_grad)
competences = []
for var,has_grad in zip(vars, have_grad):
try:
competences.append(cls.competence(var, has_grad))
except TypeError:
competences.append(cls.competence(var))
return competences
class ArrayStep(BlockedStep):
"""
Blocked step method that is generalized to accept vectors of variables.
Parameters
----------
vars : list
List of variables for sampler.
fs: list of logp theano functions
allvars: Boolean (default False)
blocked: Boolean (default True)
"""
def __init__(self, vars, fs, allvars=False, blocked=True):
self.vars = vars
self.ordering = ArrayOrdering(vars)
self.fs = fs
self.allvars = allvars
self.blocked = blocked
def step(self, point):
bij = DictToArrayBijection(self.ordering, point)
inputs = [bij.mapf(x) for x in self.fs]
if self.allvars:
inputs.append(point)
if self.generates_stats:
apoint, stats = self.astep(bij.map(point), *inputs)
return bij.rmap(apoint), stats
else:
apoint = self.astep(bij.map(point), *inputs)
return bij.rmap(apoint)
class ArrayStepShared(BlockedStep):
"""Faster version of ArrayStep that requires the substep method that does not wrap
the functions the step method uses.
Works by setting shared variables before using the step. This eliminates the mapping
and unmapping overhead as well as moving fewer variables around.
"""
def __init__(self, vars, shared, blocked=True):
"""
Parameters
----------
vars : list of sampling variables
shared : dict of theano variable -> shared variable
blocked : Boolean (default True)
"""
self.vars = vars
self.ordering = ArrayOrdering(vars)
self.shared = {str(var): shared for var, shared in shared.items()}
self.blocked = blocked
self.bij = None
def step(self, point):
for var, share in self.shared.items():
share.set_value(point[var])
self.bij = DictToArrayBijection(self.ordering, point)
if self.generates_stats:
apoint, stats = self.astep(self.bij.map(point))
return self.bij.rmap(apoint), stats
else:
apoint = self.astep(self.bij.map(point))
return self.bij.rmap(apoint)
class PopulationArrayStepShared(ArrayStepShared):
"""Version of ArrayStepShared that allows samplers to access the states
of other chains in the population.
Works by linking a list of Points that is updated as the chains are iterated.
"""
def __init__(self, vars, shared, blocked=True):
"""
Parameters
----------
vars : list of sampling variables
shared : dict of theano variable -> shared variable
blocked : Boolean (default True)
"""
self.population = None
self.this_chain = None
self.other_chains = None
return super(PopulationArrayStepShared, self).__init__(vars, shared, blocked)
def link_population(self, population, chain_index):
"""Links the sampler to the population.
Parameters
----------
population : list of Points. (The elements of this list must be
replaced with current chain states in every iteration.)
chain_index : int of the index of this sampler in the population
"""
self.population = population
self.this_chain = chain_index
self.other_chains = [c for c in range(len(population)) if c != chain_index]
if not len(self.other_chains) > 1:
raise ValueError('Population is just {} + {}. This is too small. You should ' \
'increase the number of chains.'.format(self.this_chain, self.other_chains))
return
class GradientSharedStep(BlockedStep):
def __init__(self, vars, model=None, blocked=True,
dtype=None, **theano_kwargs):
model = modelcontext(model)
self.vars = vars
self.blocked = blocked
self._logp_dlogp_func = model.logp_dlogp_function(
vars, dtype=dtype, **theano_kwargs)
def step(self, point):
self._logp_dlogp_func.set_extra_values(point)
array = self._logp_dlogp_func.dict_to_array(point)
if self.generates_stats:
apoint, stats = self.astep(array)
point = self._logp_dlogp_func.array_to_full_dict(apoint)
return point, stats
else:
apoint = self.astep(array)
point = self._logp_dlogp_func.array_to_full_dict(apoint)
return point
def metrop_select(mr, q, q0):
"""Perform rejection/acceptance step for Metropolis class samplers.
Returns the new sample q if a uniform random number is less than the
metropolis acceptance rate (`mr`), and the old sample otherwise, along
with a boolean indicating whether the sample was accepted.
Parameters
----------
mr : float, Metropolis acceptance rate
q : proposed sample
q0 : current sample
Returns
-------
q or q0
"""
# Compare acceptance ratio to uniform random number
if np.isfinite(mr) and np.log(uniform()) < mr:
return q, True
else:
return q0, False