@@ -122,6 +122,97 @@ def R2D2M2CP(
122
122
123
123
- ``(R2D2M2)``CP is taken from https://arxiv.org/abs/2208.07132
124
124
- R2D2M2``(CP)``, (Correlation Probability) is proposed and implemented by Max Kochurov (@ferrine)
125
+
126
+ Examples
127
+ --------
128
+ Here are arguments explained in a synthetic example
129
+
130
+ >>> import pymc_experimental as pmx
131
+ >>> import pymc as pm
132
+ >>> import numpy as np
133
+ >>> X = np.random.randn(10, 3)
134
+ >>> b = np.random.randn(3)
135
+ >>> y = X @ b + np.random.randn(10) * 0.04 + 5
136
+ >>> with pm.Model(coords=dict(variables=["a", "b", "c"])) as model:
137
+ ... eps, beta = pmx.distributions.R2D2M2CP(
138
+ ... "beta",
139
+ ... y.std(),
140
+ ... X.std(0),
141
+ ... dims="variables",
142
+ ... # NOTE: global shrinkage
143
+ ... r2=0.8,
144
+ ... # NOTE: if you are unsure about r2
145
+ ... r2_std=0.2,
146
+ ... # NOTE: if you know where a variable should go
147
+ ... # if you do not know, leave as 0.5
148
+ ... positive_probs=[0.8, 0.5, 0.1],
149
+ ... # NOTE: if you have different opinions about
150
+ ... # where a variable should go.
151
+ ... # NOTE: if you put 0.5 previously,
152
+ ... # just put 0.1 there, but other
153
+ ... # sigmas should work fine too
154
+ ... positive_probs_std=[0.3, 0.1, 0.2],
155
+ ... # NOTE: variable importances are relative to each other,
156
+ ... # but larget numbers put "more" weight in the relation
157
+ ... # use
158
+ ... # * 1-10 for small confidence
159
+ ... # * 10-30 for moderate confidence
160
+ ... # * 30+ for high confidence
161
+ ... # EXAMPLE:
162
+ ... # "a" - is likely to be useful
163
+ ... # "b" - no idea if it is useful
164
+ ... # "c" - a must have in the relation
165
+ ... variables_importance=[10, 1, 34],
166
+ ... # NOTE: try both
167
+ ... centered=True
168
+ ... )
169
+ ... intercept = y.mean()
170
+ ... obs = pm.Normal("obs", intercept + X @ beta, eps, observed=y)
171
+
172
+ There can be special cases by choosing specific set of arguments
173
+
174
+ Here the prior distribution of beta is ``Normal(0, y.std() * r2 ** .5)``
175
+
176
+ >>> with pm.Model(coords=dict(variables=["a", "b", "c"])) as model:
177
+ ... eps, beta = pmx.distributions.R2D2M2CP(
178
+ ... "beta",
179
+ ... y.std(),
180
+ ... X.std(0),
181
+ ... dims="variables",
182
+ ... # NOTE: global shrinkage
183
+ ... r2=0.8,
184
+ ... # NOTE: if you are unsure about r2
185
+ ... r2_std=0.2,
186
+ ... # NOTE: if you know where a variable should go
187
+ ... # if you do not know, leave as 0.5
188
+ ... centered=False
189
+ ... )
190
+ ... intercept = y.mean()
191
+ ... obs = pm.Normal("obs", intercept + X @ beta, eps, observed=y)
192
+
193
+
194
+ It is fine to leave some of the ``_std`` arguments unspecified.
195
+ You can also specify only ``positive_probs``, and all
196
+ the variables are assumed to explain same amount of variance (same importance)
197
+
198
+ >>> with pm.Model(coords=dict(variables=["a", "b", "c"])) as model:
199
+ ... eps, beta = pmx.distributions.R2D2M2CP(
200
+ ... "beta",
201
+ ... y.std(),
202
+ ... X.std(0),
203
+ ... dims="variables",
204
+ ... # NOTE: global shrinkage
205
+ ... r2=0.8,
206
+ ... # NOTE: if you are unsure about r2
207
+ ... r2_std=0.2,
208
+ ... # NOTE: if you know where a variable should go
209
+ ... # if you do not know, leave as 0.5
210
+ ... positive_probs=[0.8, 0.5, 0.1],
211
+ ... # NOTE: try both
212
+ ... centered=True
213
+ ... )
214
+ ... intercept = y.mean()
215
+ ... obs = pm.Normal("obs", intercept + X @ beta, eps, observed=y)
125
216
"""
126
217
if not isinstance (dims , (list , tuple )):
127
218
dims = (dims ,)
0 commit comments