Skip to content

Commit 6dbed4b

Browse files
committed
MAINT: lint
1 parent 95047a2 commit 6dbed4b

File tree

4 files changed

+15
-15
lines changed

4 files changed

+15
-15
lines changed

torch_np/_detail/implementations.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -618,6 +618,7 @@ def _flatten(tensor, order="C"):
618618

619619
# ### swap/move/roll axis ###
620620

621+
621622
def moveaxis(tensor, source, destination):
622623
source = _util.normalize_axis_tuple(source, tensor.ndim, "source")
623624
destination = _util.normalize_axis_tuple(destination, tensor.ndim, "destination")

torch_np/_funcs.py

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import torch
22

33
from . import _decorators, _helpers
4-
from ._detail import _util, _flips
4+
from ._detail import _flips, _util
55
from ._detail import implementations as _impl
66

77

@@ -120,54 +120,52 @@ def searchsorted(a, v, side="left", sorter=None):
120120

121121

122122
def moveaxis(a, source, destination):
123-
tensor, = _helpers.to_tensors(a)
123+
(tensor,) = _helpers.to_tensors(a)
124124
result = _impl.moveaxis(tensor, source, destination)
125125
return _helpers.array_from(result)
126126

127127

128128
def swapaxes(a, axis1, axis2):
129-
tensor, = _helpers.to_tensors(a)
129+
(tensor,) = _helpers.to_tensors(a)
130130
result = _flips.swapaxes(tensor, axis1, axis2)
131131
return _helpers.array_from(result)
132132

133133

134134
def rollaxis(a, axis, start=0):
135-
tensor, = _helpers.to_tensors(a)
135+
(tensor,) = _helpers.to_tensors(a)
136136
result = _flips.rollaxis(a, axis, start)
137137
return _helpers.array_from(result)
138138

139139

140140
# ### shape manipulations ###
141141

142+
142143
def squeeze(a, axis=None):
143-
tensor, = _helpers.to_tensors(a)
144+
(tensor,) = _helpers.to_tensors(a)
144145
result = _impl.squeeze(tensor, axis)
145146
return _helpers.array_from(result, a)
146147

147148

148149
def reshape(a, newshape, order="C"):
149-
tensor, = _helpers.to_tensors(a)
150+
(tensor,) = _helpers.to_tensors(a)
150151
result = _impl.reshape(tensor, newshape, order=order)
151152
return _helpers.array_from(result, a)
152153

153154

154155
def transpose(a, axes=None):
155-
tensor, = _helpers.to_tensors(a)
156+
(tensor,) = _helpers.to_tensors(a)
156157
result = _impl.transpose(tensor, axes)
157158
return _helpers.array_from(result, a)
158159

159160

160161
def ravel(a, order="C"):
161-
tensor, = _helpers.to_tensors(a)
162+
(tensor,) = _helpers.to_tensors(a)
162163
result = _impl.ravel(tensor)
163-
return _helpers.array_from(result, a)
164+
return _helpers.array_from(result, a)
164165

165166

166167
# leading underscore since arr.flatten exists but np.flatten does not
167168
def _flatten(a, order="C"):
168-
tensor, = _helpers.to_tensors(a)
169+
(tensor,) = _helpers.to_tensors(a)
169170
result = _impl._flatten(tensor)
170-
return _helpers.array_from(result, a)
171-
172-
173-
171+
return _helpers.array_from(result, a)

torch_np/_helpers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,7 @@ def result_or_out(result_tensor, out_array=None, promote_scalar=False):
7575

7676
def array_from(tensor, base=None):
7777
from ._ndarray import ndarray
78+
7879
base = base if isinstance(base, ndarray) else None
7980
return ndarray._from_tensor_and_base(tensor, base) # XXX: nuke .base
8081

torch_np/_wrapper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,8 +110,8 @@ def _concat_check(tup, dtype, out):
110110

111111
@_decorators.dtype_to_torch
112112
def concatenate(ar_tuple, axis=0, out=None, dtype=None, casting="same_kind"):
113-
_concat_check(ar_tuple, dtype, out=out)
114113
tensors = _helpers.to_tensors(*ar_tuple)
114+
_concat_check(tensors, dtype, out=out)
115115
result = _impl.concatenate(tensors, axis, out, dtype, casting)
116116
return _helpers.result_or_out(result, out)
117117

0 commit comments

Comments
 (0)