-
Notifications
You must be signed in to change notification settings - Fork 4
wrap returns out annotation #93
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -44,7 +44,7 @@ def wrapped( | |
tensors = tuple(torch.broadcast_to(t, shape) for t in tensors) | ||
|
||
result = torch_func(*tensors) | ||
return _helpers.result_or_out(result, out) | ||
return result | ||
|
||
return wrapped | ||
|
||
|
@@ -77,69 +77,57 @@ def matmul( | |
|
||
# NB: do not broadcast input tensors against the out=... array | ||
result = _binary_ufuncs.matmul(*tensors) | ||
return _helpers.result_or_out(result, out) | ||
|
||
|
||
# | ||
# For each torch ufunc implementation, decorate and attach the decorated name | ||
# to this module. Its contents is then exported to the public namespace in __init__.py | ||
# | ||
for name in __all__: | ||
ufunc = getattr(_binary_ufuncs, name) | ||
decorated = normalizer(deco_binary_ufunc(ufunc)) | ||
|
||
decorated.__qualname__ = name # XXX: is this really correct? | ||
decorated.__name__ = name | ||
vars()[name] = decorated | ||
|
||
|
||
# a stub implementation of divmod, should be improved after | ||
# https://github.com/pytorch/pytorch/issues/90820 is fixed in pytorch | ||
# | ||
# Implementation details: we just call two ufuncs which have been created | ||
# just above, for x1 // x2 and x1 % x2. | ||
# This means we are normalizing x1, x2 in each of the ufuncs --- note that there | ||
# is no @normalizer on divmod. | ||
return result | ||
|
||
|
||
def divmod( | ||
x1, | ||
x2, | ||
x1: ArrayLike, | ||
x2: ArrayLike, | ||
out1: Optional[NDArray] = None, | ||
out2: Optional[NDArray] = None, | ||
/, | ||
out=None, | ||
out: Optional[tuple[NDArray]] = (None, None), | ||
*, | ||
where=True, | ||
casting="same_kind", | ||
order="K", | ||
dtype=None, | ||
dtype: DTypeLike = None, | ||
subok: SubokLike = False, | ||
signature=None, | ||
extobj=None, | ||
): | ||
out1, out2 = None, None | ||
num_outs = sum(x is None for x in [out1, out2]) | ||
if sum_outs == 1: | ||
raise ValueError("both out1 and out2 need to be provided") | ||
if sum_outs != 0 and out != (None, None): | ||
raise ValueError("Either provide out1 and out2, or out.") | ||
if out is not None: | ||
out1, out2 = out | ||
if out1.shape != out2.shape or out1.dtype != out2.dtype: | ||
raise ValueError("out1, out2 must be compatible") | ||
|
||
kwds = dict( | ||
where=where, | ||
casting=casting, | ||
order=order, | ||
dtype=dtype, | ||
subok=subok, | ||
signature=signature, | ||
extobj=extobj, | ||
tensors = _helpers.ufunc_preprocess( | ||
(x1, x2), out, True, casting, order, dtype, subok, signature, extobj | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. As discussed There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's still in the TODO pipeline. Will do after the current "stack" is done. |
||
) | ||
|
||
# NB: use local names for | ||
quot = floor_divide(x1, x2, out=out1, **kwds) | ||
rem = remainder(x1, x2, out=out2, **kwds) | ||
|
||
quot = _helpers.result_or_out(quot.tensor, out1) | ||
rem = _helpers.result_or_out(rem.tensor, out2) | ||
result = _binary_ufuncs.divmod(*tensors) | ||
|
||
return quot, rem | ||
|
||
|
||
# | ||
# For each torch ufunc implementation, decorate and attach the decorated name | ||
# to this module. Its contents is then exported to the public namespace in __init__.py | ||
# | ||
for name in __all__: | ||
ufunc = getattr(_binary_ufuncs, name) | ||
decorated = normalizer(deco_binary_ufunc(ufunc)) | ||
|
||
decorated.__qualname__ = name # XXX: is this really correct? | ||
decorated.__name__ = name | ||
vars()[name] = decorated | ||
|
||
|
||
def modf(x, /, *args, **kwds): | ||
quot, rem = divmod(x, 1, *args, **kwds) | ||
return rem, quot | ||
|
This file was deleted.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit
tuple[Optional[NDArray], Optional[NDArray]]
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why? It's either
(None, None)
or(NDArray, NDArray)
; can't have(out1, None)
(I think)There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
tuple
, as opposed, tolist
takes the number of arguments that it accepts explicitly. As such, there you are declaring there a tuple of length 1 and one NDArray.Then,
Optional[tuple[NDArray, NDArray]]
accepts either a tuple of ndarrays (fine) orNone
(not(None, None)
).What you want is to have a (partial) function that accepts
tuple[Optional[NDArray], Optional[NDArray]]
and errors out when just one of the elements of the tuple isNone
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Feel free to patch this small correction in any other PR.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ouch. python typing meta-language is seriously weird.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Note that this is not specific to Python, the same would be true in C++. This would be an
std::tuple<std::optional<array, std::optional<array>>
rather than astd::optional<std::tuple<array, array>>
:)