@@ -82,58 +82,3 @@ def wrapped(x1, x2, *args, **kwds):
82
82
83
83
return wrapped
84
84
85
-
86
- def axis_keepdims_wrapper (func ):
87
- """`func` accepts an array-like as a 1st arg, returns a tensor.
88
-
89
- This decorator implements the generic handling of axis, out and keepdims
90
- arguments for reduction functions.
91
-
92
- Note that we peel off `out=...` and `keepdims=...` args (torch functions never
93
- see them). The `axis` argument we normalize and pass through to pytorch functions.
94
-
95
- """
96
- # TODO: sort out function signatures: how they flow through all decorators etc
97
- @functools .wraps (func )
98
- def wrapped (a , axis = None , keepdims = NoValue , * args , ** kwds ):
99
- from ._ndarray import asarray , ndarray
100
-
101
- tensor = asarray (a ).get ()
102
-
103
- # standardize the axis argument
104
- if isinstance (axis , ndarray ):
105
- axis = operator .index (axis )
106
-
107
- result = _util .axis_expand_func (func , tensor , axis , * args , ** kwds )
108
-
109
- if keepdims :
110
- result = _util .apply_keepdims (result , axis , tensor .ndim )
111
-
112
- return result
113
-
114
- return wrapped
115
-
116
-
117
- def axis_none_ravel_wrapper (func ):
118
- """`func` accepts an array-like as a 1st arg, returns a tensor.
119
-
120
- This decorator implements the generic handling of axis=None acting on a
121
- raveled array. One use is cumprod / cumsum. concatenate also uses a
122
- similar logic.
123
-
124
- """
125
-
126
- @functools .wraps (func )
127
- def wrapped (a , axis = None , * args , ** kwds ):
128
- from ._ndarray import asarray , ndarray
129
-
130
- tensor = asarray (a ).get ()
131
-
132
- # standardize the axis argument
133
- if isinstance (axis , ndarray ):
134
- axis = operator .index (axis )
135
-
136
- result = _util .axis_ravel_func (func , tensor , axis , * args , ** kwds )
137
- return result
138
-
139
- return wrapped
0 commit comments