Skip to content

Commit ff67bea

Browse files
twieckiOriolAbril
andauthored
Replace at -> pt. (pymc-devs#485)
* Replace at -> pt. * Rename two remaining at->pt. * Add pytensor to intersphinx. * Add pytensor to pre-commit. * remove aesara from intersphinx mapping Co-authored-by: Oriol (ZBook) <[email protected]>
1 parent 5238dc3 commit ff67bea

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+284
-285
lines changed

.pre-commit-config.yaml

+1-2
Original file line numberDiff line numberDiff line change
@@ -83,15 +83,14 @@ repos:
8383
examples/howto/custom_distribution.ipynb)
8484
entry: >
8585
(?x)(arviz-devs.github.io|
86-
aesara.readthedocs.io|
87-
aeppl.readthedocs.io|
8886
pymc-experimental.readthedocs.io|
8987
docs.pymc.io|
9088
numpy.org/doc|
9189
pymc-examples.readthedocs.io|
9290
docs.python.org|
9391
xarray.pydata.org
9492
python.arviz.org|
93+
pytensor.readthedocs.io|
9594
docs.xarray.dev|
9695
www.pymc.io|
9796
docs.scipy.org/doc)

examples/case_studies/GEV.ipynb

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@
5050
"import numpy as np\n",
5151
"import pymc as pm\n",
5252
"import pymc_experimental.distributions as pmx\n",
53-
"import pytensor.tensor as at\n",
53+
"import pytensor.tensor as pt\n",
5454
"\n",
5555
"from arviz.plots import plot_utils as azpu"
5656
]

examples/case_studies/GEV.myst.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ import matplotlib.pyplot as plt
4545
import numpy as np
4646
import pymc as pm
4747
import pymc_experimental.distributions as pmx
48-
import pytensor.tensor as at
48+
import pytensor.tensor as pt
4949
5050
from arviz.plots import plot_utils as azpu
5151
```

examples/case_studies/binning.ipynb

+17-17
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@
7272
"We are now in a position to sketch out a generative PyMC model:\n",
7373
"\n",
7474
"```python\n",
75-
"import pytensor.tensor as at\n",
75+
"import pytensor.tensor as pt\n",
7676
"\n",
7777
"with pm.Model() as model:\n",
7878
" # priors\n",
@@ -81,7 +81,7 @@
8181
" # generative process\n",
8282
" probs = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), cutpoints))\n",
8383
" probs = pm.math.concatenate([[0], probs, [1]])\n",
84-
" probs = at.extra_ops.diff(probs)\n",
84+
" probs = pt.extra_ops.diff(probs)\n",
8585
" # likelihood\n",
8686
" pm.Multinomial(\"counts\", p=probs, n=sum(counts), observed=counts)\n",
8787
"```\n",
@@ -98,7 +98,7 @@
9898
"simply concatenates the cumulative density at $-\\infty$ (which is zero) and at $\\infty$ (which is 1).\n",
9999
"The third line\n",
100100
"```python\n",
101-
"probs = at.extra_ops.diff(probs)\n",
101+
"probs = pt.extra_ops.diff(probs)\n",
102102
"```\n",
103103
"calculates the difference between consecutive cumulative densities to give the actual probability of a datum falling in any given bin.\n",
104104
"\n",
@@ -125,7 +125,7 @@
125125
"import numpy as np\n",
126126
"import pandas as pd\n",
127127
"import pymc as pm\n",
128-
"import pytensor.tensor as at\n",
128+
"import pytensor.tensor as pt\n",
129129
"import seaborn as sns\n",
130130
"\n",
131131
"warnings.filterwarnings(action=\"ignore\", category=UserWarning)"
@@ -320,7 +320,7 @@
320320
" mu = pm.Normal(\"mu\")\n",
321321
"\n",
322322
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))\n",
323-
" probs1 = at.extra_ops.diff(pm.math.concatenate([[0], probs1, [1]]))\n",
323+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([[0], probs1, [1]]))\n",
324324
" pm.Multinomial(\"counts1\", p=probs1, n=c1.sum(), observed=c1.values)"
325325
]
326326
},
@@ -841,7 +841,7 @@
841841
" mu = pm.Normal(\"mu\")\n",
842842
"\n",
843843
" probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d2))\n",
844-
" probs2 = at.extra_ops.diff(pm.math.concatenate([[0], probs2, [1]]))\n",
844+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([[0], probs2, [1]]))\n",
845845
" pm.Multinomial(\"counts2\", p=probs2, n=c2.sum(), observed=c2.values)"
846846
]
847847
},
@@ -1238,11 +1238,11 @@
12381238
" mu = pm.Normal(\"mu\")\n",
12391239
"\n",
12401240
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))\n",
1241-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
1241+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
12421242
" probs1 = pm.Deterministic(\"normal1_cdf\", probs1)\n",
12431243
"\n",
12441244
" probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d2))\n",
1245-
" probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
1245+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
12461246
" probs2 = pm.Deterministic(\"normal2_cdf\", probs2)\n",
12471247
"\n",
12481248
" pm.Multinomial(\"counts1\", p=probs1, n=c1.sum(), observed=c1.values)\n",
@@ -1719,7 +1719,7 @@
17191719
" mu = pm.Normal(\"mu\")\n",
17201720
" # study 1\n",
17211721
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))\n",
1722-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
1722+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
17231723
" probs1 = pm.Deterministic(\"normal1_cdf\", probs1)\n",
17241724
" pm.Multinomial(\"counts1\", p=probs1, n=c1.sum(), observed=c1.values)\n",
17251725
" # study 2\n",
@@ -2149,12 +2149,12 @@
21492149
"\n",
21502150
" # Study 1\n",
21512151
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))\n",
2152-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
2152+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
21532153
" probs1 = pm.Deterministic(\"normal1_cdf\", probs1, dims=\"bin1\")\n",
21542154
"\n",
21552155
" # Study 2\n",
21562156
" probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))\n",
2157-
" probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
2157+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
21582158
" probs2 = pm.Deterministic(\"normal2_cdf\", probs2, dims=\"bin2\")\n",
21592159
"\n",
21602160
" # Likelihood\n",
@@ -2392,12 +2392,12 @@
23922392
"\n",
23932393
" # Study 1\n",
23942394
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))\n",
2395-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
2395+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
23962396
" probs1 = pm.Deterministic(\"normal1_cdf\", probs1, dims=\"bin1\")\n",
23972397
"\n",
23982398
" # Study 2\n",
23992399
" probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))\n",
2400-
" probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
2400+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
24012401
" probs2 = pm.Deterministic(\"normal2_cdf\", probs2, dims=\"bin2\")\n",
24022402
"\n",
24032403
" # Likelihood\n",
@@ -2927,12 +2927,12 @@
29272927
" \n",
29282928
" # Study 1\n",
29292929
" probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))\n",
2930-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
2930+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
29312931
" probs1 = pm.Deterministic(\"normal1_cdf\", probs1, dims='bin1')\n",
29322932
"\n",
29332933
" # Study 2\n",
29342934
" probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))\n",
2935-
" probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
2935+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
29362936
" probs2 = pm.Deterministic(\"normal2_cdf\", probs2, dims='bin2')\n",
29372937
"\n",
29382938
" # Likelihood\n",
@@ -3091,11 +3091,11 @@
30913091
" beta = pm.HalfNormal(\"beta\", 10)\n",
30923092
"\n",
30933093
" probs1 = pm.math.exp(pm.logcdf(pm.Gumbel.dist(mu=mu, beta=beta), d1))\n",
3094-
" probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
3094+
" probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))\n",
30953095
" probs1 = pm.Deterministic(\"gumbel_cdf1\", probs1)\n",
30963096
"\n",
30973097
" probs2 = pm.math.exp(pm.logcdf(pm.Gumbel.dist(mu=mu, beta=beta), d2))\n",
3098-
" probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
3098+
" probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))\n",
30993099
" probs2 = pm.Deterministic(\"gumbel_cdf2\", probs2)\n",
31003100
"\n",
31013101
" pm.Multinomial(\"counts1\", p=probs1, n=c1.sum(), observed=c1.values)\n",

examples/case_studies/binning.myst.md

+17-17
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ In ordinal regression, the cutpoints are treated as latent variables and the par
6969
We are now in a position to sketch out a generative PyMC model:
7070

7171
```python
72-
import pytensor.tensor as at
72+
import pytensor.tensor as pt
7373

7474
with pm.Model() as model:
7575
# priors
@@ -78,7 +78,7 @@ with pm.Model() as model:
7878
# generative process
7979
probs = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), cutpoints))
8080
probs = pm.math.concatenate([[0], probs, [1]])
81-
probs = at.extra_ops.diff(probs)
81+
probs = pt.extra_ops.diff(probs)
8282
# likelihood
8383
pm.Multinomial("counts", p=probs, n=sum(counts), observed=counts)
8484
```
@@ -95,7 +95,7 @@ probs = pm.math.concatenate([[0], probs, [1]])
9595
simply concatenates the cumulative density at $-\infty$ (which is zero) and at $\infty$ (which is 1).
9696
The third line
9797
```python
98-
probs = at.extra_ops.diff(probs)
98+
probs = pt.extra_ops.diff(probs)
9999
```
100100
calculates the difference between consecutive cumulative densities to give the actual probability of a datum falling in any given bin.
101101

@@ -115,7 +115,7 @@ import matplotlib.pyplot as plt
115115
import numpy as np
116116
import pandas as pd
117117
import pymc as pm
118-
import pytensor.tensor as at
118+
import pytensor.tensor as pt
119119
import seaborn as sns
120120
121121
warnings.filterwarnings(action="ignore", category=UserWarning)
@@ -226,7 +226,7 @@ with pm.Model() as model1:
226226
mu = pm.Normal("mu")
227227
228228
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))
229-
probs1 = at.extra_ops.diff(pm.math.concatenate([[0], probs1, [1]]))
229+
probs1 = pt.extra_ops.diff(pm.math.concatenate([[0], probs1, [1]]))
230230
pm.Multinomial("counts1", p=probs1, n=c1.sum(), observed=c1.values)
231231
```
232232

@@ -331,7 +331,7 @@ with pm.Model() as model2:
331331
mu = pm.Normal("mu")
332332
333333
probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d2))
334-
probs2 = at.extra_ops.diff(pm.math.concatenate([[0], probs2, [1]]))
334+
probs2 = pt.extra_ops.diff(pm.math.concatenate([[0], probs2, [1]]))
335335
pm.Multinomial("counts2", p=probs2, n=c2.sum(), observed=c2.values)
336336
```
337337

@@ -426,11 +426,11 @@ with pm.Model() as model3:
426426
mu = pm.Normal("mu")
427427
428428
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))
429-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
429+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
430430
probs1 = pm.Deterministic("normal1_cdf", probs1)
431431
432432
probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d2))
433-
probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
433+
probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
434434
probs2 = pm.Deterministic("normal2_cdf", probs2)
435435
436436
pm.Multinomial("counts1", p=probs1, n=c1.sum(), observed=c1.values)
@@ -519,7 +519,7 @@ with pm.Model() as model4:
519519
mu = pm.Normal("mu")
520520
# study 1
521521
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu, sigma=sigma), d1))
522-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
522+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
523523
probs1 = pm.Deterministic("normal1_cdf", probs1)
524524
pm.Multinomial("counts1", p=probs1, n=c1.sum(), observed=c1.values)
525525
# study 2
@@ -612,12 +612,12 @@ with pm.Model(coords=coords) as model5:
612612
613613
# Study 1
614614
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))
615-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
615+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
616616
probs1 = pm.Deterministic("normal1_cdf", probs1, dims="bin1")
617617
618618
# Study 2
619619
probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))
620-
probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
620+
probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
621621
probs2 = pm.Deterministic("normal2_cdf", probs2, dims="bin2")
622622
623623
# Likelihood
@@ -645,12 +645,12 @@ with pm.Model(coords=coords) as model5:
645645
646646
# Study 1
647647
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))
648-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
648+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
649649
probs1 = pm.Deterministic("normal1_cdf", probs1, dims="bin1")
650650
651651
# Study 2
652652
probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))
653-
probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
653+
probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
654654
probs2 = pm.Deterministic("normal2_cdf", probs2, dims="bin2")
655655
656656
# Likelihood
@@ -748,12 +748,12 @@ with pm.Model(coords=coords) as model5:
748748

749749
# Study 1
750750
probs1 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[0], sigma=sigma[0]), d1))
751-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
751+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
752752
probs1 = pm.Deterministic("normal1_cdf", probs1, dims='bin1')
753753

754754
# Study 2
755755
probs2 = pm.math.exp(pm.logcdf(pm.Normal.dist(mu=mu[1], sigma=sigma[1]), d2))
756-
probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
756+
probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
757757
probs2 = pm.Deterministic("normal2_cdf", probs2, dims='bin2')
758758

759759
# Likelihood
@@ -855,11 +855,11 @@ with pm.Model() as model6:
855855
beta = pm.HalfNormal("beta", 10)
856856
857857
probs1 = pm.math.exp(pm.logcdf(pm.Gumbel.dist(mu=mu, beta=beta), d1))
858-
probs1 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
858+
probs1 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs1, np.array([1])]))
859859
probs1 = pm.Deterministic("gumbel_cdf1", probs1)
860860
861861
probs2 = pm.math.exp(pm.logcdf(pm.Gumbel.dist(mu=mu, beta=beta), d2))
862-
probs2 = at.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
862+
probs2 = pt.extra_ops.diff(pm.math.concatenate([np.array([0]), probs2, np.array([1])]))
863863
probs2 = pm.Deterministic("gumbel_cdf2", probs2)
864864
865865
pm.Multinomial("counts1", p=probs1, n=c1.sum(), observed=c1.values)

0 commit comments

Comments
 (0)