Skip to content

ENH: control a random stream to be from either numpy or pytorch #135

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
May 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions torch_np/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""
from __future__ import annotations

import functools
from math import sqrt
from typing import Optional

Expand All @@ -29,21 +30,44 @@
"randint",
"shuffle",
"uniform",
"USE_NUMPY_RANDOM",
]


USE_NUMPY_RANDOM = False


def deco_stream(func):
@functools.wraps(func)
def inner(*args, **kwds):
if USE_NUMPY_RANDOM is False:
return func(*args, **kwds)
elif USE_NUMPY_RANDOM is True:
from numpy import random as nr

f = getattr(nr, func.__name__)
return f(*args, **kwds)
else:
raise ValueError(f"USE_NUMPY_RANDOM={USE_NUMPY_RANDOM} not understood.")

return inner


@deco_stream
def seed(seed=None):
if seed is not None:
torch.random.manual_seed(seed)


@deco_stream
def random_sample(size=None):
if size is None:
size = ()
values = torch.empty(size, dtype=_default_dtype).uniform_()
return array_or_scalar(values, return_scalar=size is None)


@deco_stream
def rand(*size):
return random_sample(size)

Expand All @@ -52,32 +76,37 @@ def rand(*size):
random = random_sample


@deco_stream
def uniform(low=0.0, high=1.0, size=None):
if size is None:
size = ()
values = torch.empty(size, dtype=_default_dtype).uniform_(low, high)
return array_or_scalar(values, return_scalar=size is None)


@deco_stream
def randn(*size):
values = torch.randn(size, dtype=_default_dtype)
return array_or_scalar(values, return_scalar=size is None)


@deco_stream
def normal(loc=0.0, scale=1.0, size=None):
if size is None:
size = ()
values = torch.empty(size, dtype=_default_dtype).normal_(loc, scale)
return array_or_scalar(values, return_scalar=size is None)


@deco_stream
@normalizer
def shuffle(x: ArrayLike):
perm = torch.randperm(x.shape[0])
xp = x[perm]
x.copy_(xp)


@deco_stream
def randint(low, high=None, size=None):
if size is None:
size = ()
Expand All @@ -89,6 +118,7 @@ def randint(low, high=None, size=None):
return array_or_scalar(values, int, return_scalar=size is None)


@deco_stream
@normalizer
def choice(a: ArrayLike, size=None, replace=True, p: Optional[ArrayLike] = None):

Expand Down
48 changes: 48 additions & 0 deletions torch_np/tests/test_random.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
"""Light smoke test switching between numpy to pytorch random streams.
"""
import pytest

import torch_np as tnp
from torch_np.testing import assert_equal


def test_uniform():
r = tnp.random.uniform(0, 1, size=10)


def test_shuffle():
x = tnp.arange(10)
tnp.random.shuffle(x)


def test_numpy_global():
tnp.random.USE_NUMPY_RANDOM = True
tnp.random.seed(12345)
x = tnp.random.uniform(0, 1, size=11)

# check that the stream is identical to numpy's
import numpy as _np

_np.random.seed(12345)
x_np = _np.random.uniform(0, 1, size=11)

assert_equal(x, tnp.asarray(x_np))

# switch to the pytorch stream, variates differ
tnp.random.USE_NUMPY_RANDOM = False
tnp.random.seed(12345)

x_1 = tnp.random.uniform(0, 1, size=11)
assert not (x_1 == x).all()


def test_wrong_global():
try:
oldstate = tnp.random.USE_NUMPY_RANDOM

tnp.random.USE_NUMPY_RANDOM = "oops"
with pytest.raises(ValueError):
tnp.random.rand()

finally:
tnp.random.USE_NUMPY_RANDOM = oldstate