Skip to content

Commit 79fafb0

Browse files
Armavicatwiecki
authored andcommitted
Enable sphinx-lint pre-commit hook
1 parent af5ea5c commit 79fafb0

File tree

9 files changed

+4946
-4940
lines changed

9 files changed

+4946
-4940
lines changed

.pre-commit-config.yaml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,11 @@ repos:
2020
rev: 054bda51dbe278b3e86f27c890e3f3ac877d616c
2121
hooks:
2222
- id: validate-cff
23+
- repo: https://github.com/sphinx-contrib/sphinx-lint
24+
rev: v1.0.0
25+
hooks:
26+
- id: sphinx-lint
27+
args: ["."]
2328
- repo: https://github.com/lucianopaz/head_of_apache
2429
rev: "0.0.3"
2530
hooks:
@@ -31,7 +36,7 @@ repos:
3136
- --exclude=binder/
3237
- --exclude=versioneer.py
3338
- repo: https://github.com/astral-sh/ruff-pre-commit
34-
rev: v0.5.4
39+
rev: v0.6.5
3540
hooks:
3641
- id: ruff
3742
args: ["--fix", "--output-format=full"]

docs/source/guides/Gaussian_Processes.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ conditioned on.
151151
Calling the `prior` method will create a PyMC random variable that represents
152152
the latent function :math:`f(x) = \mathbf{f}`::
153153

154-
f = gp.prior("f", X)
154+
f = gp.prior("f", X)
155155

156156
:code:`f` is a random variable that can be used within a PyMC model like any
157157
other type of random variable. The first argument is the name of the random
@@ -166,7 +166,7 @@ Usually at this point, inference is performed on the model. The
166166
distribution over the latent function at arbitrary :math:`x_*` input points,
167167
:math:`f(x_*)`. To construct the conditional distribution we write::
168168

169-
f_star = gp.conditional("f_star", X_star)
169+
f_star = gp.conditional("f_star", X_star)
170170

171171
Additive GPs
172172
============

docs/source/learn/core_notebooks/GLM_linear.ipynb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,9 +67,10 @@
6767
"import matplotlib.pyplot as plt\n",
6868
"import numpy as np\n",
6969
"import pandas as pd\n",
70-
"import pymc as pm\n",
7170
"import xarray as xr\n",
7271
"\n",
72+
"import pymc as pm\n",
73+
"\n",
7374
"from pymc import HalfCauchy, Model, Normal, sample\n",
7475
"\n",
7576
"print(f\"Running on PyMC v{pm.__version__}\")"
@@ -256,8 +257,6 @@
256257
"metadata": {},
257258
"outputs": [],
258259
"source": [
259-
"import sys\n",
260-
"\n",
261260
"try:\n",
262261
" import bambi as bmb\n",
263262
"except ImportError:\n",

docs/source/learn/core_notebooks/Gaussian_Processes.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ conditioned on.
148148
Calling the `prior` method will create a PyMC random variable that represents
149149
the latent function :math:`f(x) = \mathbf{f}`::
150150

151-
f = gp.prior("f", X)
151+
f = gp.prior("f", X)
152152

153153
:code:`f` is a random variable that can be used within a PyMC model like any
154154
other type of random variable. The first argument is the name of the random
@@ -163,7 +163,7 @@ Usually at this point, inference is performed on the model. The
163163
distribution over the latent function at arbitrary :math:`x_*` input points,
164164
:math:`f(x_*)`. To construct the conditional distribution we write::
165165

166-
f_star = gp.conditional("f_star", X_star)
166+
f_star = gp.conditional("f_star", X_star)
167167

168168
.. _additive_gp:
169169

docs/source/learn/core_notebooks/dimensionality.ipynb

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,10 @@
3737
"source": [
3838
"from functools import partial\n",
3939
"\n",
40-
"import pymc as pm\n",
4140
"import numpy as np\n",
42-
"import pytensor.tensor as pt"
41+
"import pytensor.tensor as pt\n",
42+
"\n",
43+
"import pymc as pm"
4344
]
4445
},
4546
{

docs/source/learn/core_notebooks/model_comparison.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@
1515
],
1616
"source": [
1717
"import arviz as az\n",
18-
"import matplotlib.pyplot as plt\n",
1918
"import numpy as np\n",
19+
"\n",
2020
"import pymc as pm\n",
2121
"\n",
2222
"print(f\"Running on PyMC v{pm.__version__}\")"

docs/source/learn/core_notebooks/posterior_predictive.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,11 @@
3636
"import arviz as az\n",
3737
"import matplotlib.pyplot as plt\n",
3838
"import numpy as np\n",
39-
"import pymc as pm\n",
4039
"import xarray as xr\n",
4140
"\n",
4241
"from scipy.special import expit as logistic\n",
4342
"\n",
43+
"import pymc as pm\n",
4444
"\n",
4545
"print(f\"Running on PyMC v{pm.__version__}\")"
4646
]

docs/source/learn/core_notebooks/pymc_overview.ipynb

Lines changed: 4922 additions & 4922 deletions
Large diffs are not rendered by default.

docs/source/learn/core_notebooks/pymc_pytensor.ipynb

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,13 @@
3434
},
3535
"outputs": [],
3636
"source": [
37-
"import pytensor\n",
38-
"import pytensor.tensor as pt\n",
39-
"import pymc as pm\n",
4037
"import matplotlib.pyplot as plt\n",
4138
"import numpy as np\n",
42-
"import scipy.stats"
39+
"import pytensor\n",
40+
"import pytensor.tensor as pt\n",
41+
"import scipy.stats\n",
42+
"\n",
43+
"import pymc as pm"
4344
]
4445
},
4546
{
@@ -1838,7 +1839,7 @@
18381839
"text": [
18391840
"\n",
18401841
"mu_value -> -1.612085713764618\n",
1841-
"sigma_log_value -> -11.324403641427345 \n",
1842+
"sigma_log_value -> -11.324403641427345\n",
18421843
"x_value -> 9.081061466795328\n",
18431844
"\n"
18441845
]
@@ -1848,7 +1849,7 @@
18481849
"print(\n",
18491850
" f\"\"\"\n",
18501851
"mu_value -> {scipy.stats.norm.logpdf(x=0, loc=0, scale=2)}\n",
1851-
"sigma_log_value -> {- 10 + scipy.stats.halfnorm.logpdf(x=np.exp(-10), loc=0, scale=3)} \n",
1852+
"sigma_log_value -> {- 10 + scipy.stats.halfnorm.logpdf(x=np.exp(-10), loc=0, scale=3)}\n",
18521853
"x_value -> {scipy.stats.norm.logpdf(x=0, loc=0, scale=np.exp(-10))}\n",
18531854
"\"\"\"\n",
18541855
")"

0 commit comments

Comments
 (0)