diff --git a/db_dtypes/json.py b/db_dtypes/json.py index 99e0c67..835d638 100644 --- a/db_dtypes/json.py +++ b/db_dtypes/json.py @@ -257,8 +257,10 @@ def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: class JSONArrowScalar(pa.ExtensionScalar): - def as_py(self): - return JSONArray._deserialize_json(self.value.as_py() if self.value else None) + def as_py(self, **kwargs): + return JSONArray._deserialize_json( + self.value.as_py(**kwargs) if self.value else None + ) class JSONArrowType(pa.ExtensionType): diff --git a/noxfile.py b/noxfile.py index bf5b3d2..c487cd7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -199,10 +199,7 @@ def prerelease(session, tests_path): "--prefer-binary", "--pre", "--upgrade", - # Limit pyarrow to versions prior to 20.0.0.dev19 to prevent a RuntimeWarning - # during import. This workaround can be removed once the underlying issue - # in pyarrow is resolved (see: https://github.com/apache/arrow/issues/45380). - "pyarrow<=20.0.0.dev18", + "pyarrow", ) # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer # prerelease comes out, this constraint can be removed. See