Skip to content

Commit 94f3923

Browse files
jbrockmendeljreback
authored andcommitted
remove unused (#18533)
1 parent 6148e58 commit 94f3923

File tree

6 files changed

+0
-332
lines changed

6 files changed

+0
-332
lines changed

pandas/_libs/algos.pyx

-45
Original file line numberDiff line numberDiff line change
@@ -212,51 +212,6 @@ cpdef numeric median(numeric[:] arr):
212212
kth_smallest(arr, n // 2 - 1)) / 2
213213

214214

215-
# -------------- Min, Max subsequence
216-
217-
@cython.boundscheck(False)
218-
@cython.wraparound(False)
219-
def max_subseq(ndarray[double_t] arr):
220-
cdef:
221-
Py_ssize_t i=0, s=0, e=0, T, n
222-
double m, S
223-
224-
n = len(arr)
225-
226-
if len(arr) == 0:
227-
return (-1, -1, None)
228-
229-
m = arr[0]
230-
S = m
231-
T = 0
232-
233-
with nogil:
234-
for i in range(1, n):
235-
# S = max { S + A[i], A[i] )
236-
if (S > 0):
237-
S = S + arr[i]
238-
else:
239-
S = arr[i]
240-
T = i
241-
if S > m:
242-
s = T
243-
e = i
244-
m = S
245-
246-
return (s, e, m)
247-
248-
249-
@cython.boundscheck(False)
250-
@cython.wraparound(False)
251-
def min_subseq(ndarray[double_t] arr):
252-
cdef:
253-
Py_ssize_t s, e
254-
double m
255-
256-
(s, e, m) = max_subseq(-arr)
257-
258-
return (s, e, -m)
259-
260215
# ----------------------------------------------------------------------
261216
# Pairwise correlation/covariance
262217

pandas/_libs/groupby.pyx

-101
Original file line numberDiff line numberDiff line change
@@ -75,57 +75,6 @@ def group_nth_object(ndarray[object, ndim=2] out,
7575
out[i, j] = resx[i, j]
7676

7777

78-
@cython.boundscheck(False)
79-
@cython.wraparound(False)
80-
def group_nth_bin_object(ndarray[object, ndim=2] out,
81-
ndarray[int64_t] counts,
82-
ndarray[object, ndim=2] values,
83-
ndarray[int64_t] bins, int64_t rank):
84-
"""
85-
Only aggregates on axis=0
86-
"""
87-
cdef:
88-
Py_ssize_t i, j, N, K, ngroups, b
89-
object val
90-
float64_t count
91-
ndarray[object, ndim=2] resx
92-
ndarray[float64_t, ndim=2] nobs
93-
94-
nobs = np.zeros((<object> out).shape, dtype=np.float64)
95-
resx = np.empty((<object> out).shape, dtype=object)
96-
97-
if len(bins) == 0:
98-
return
99-
if bins[len(bins) - 1] == len(values):
100-
ngroups = len(bins)
101-
else:
102-
ngroups = len(bins) + 1
103-
104-
N, K = (<object> values).shape
105-
106-
b = 0
107-
for i in range(N):
108-
while b < ngroups - 1 and i >= bins[b]:
109-
b += 1
110-
111-
counts[b] += 1
112-
for j in range(K):
113-
val = values[i, j]
114-
115-
# not nan
116-
if val == val:
117-
nobs[b, j] += 1
118-
if nobs[b, j] == rank:
119-
resx[b, j] = val
120-
121-
for i in range(ngroups):
122-
for j in range(K):
123-
if nobs[i, j] == 0:
124-
out[i, j] = nan
125-
else:
126-
out[i, j] = resx[i, j]
127-
128-
12978
@cython.boundscheck(False)
13079
@cython.wraparound(False)
13180
def group_last_object(ndarray[object, ndim=2] out,
@@ -169,56 +118,6 @@ def group_last_object(ndarray[object, ndim=2] out,
169118
out[i, j] = resx[i, j]
170119

171120

172-
@cython.boundscheck(False)
173-
@cython.wraparound(False)
174-
def group_last_bin_object(ndarray[object, ndim=2] out,
175-
ndarray[int64_t] counts,
176-
ndarray[object, ndim=2] values,
177-
ndarray[int64_t] bins):
178-
"""
179-
Only aggregates on axis=0
180-
"""
181-
cdef:
182-
Py_ssize_t i, j, N, K, ngroups, b
183-
object val
184-
float64_t count
185-
ndarray[object, ndim=2] resx
186-
ndarray[float64_t, ndim=2] nobs
187-
188-
nobs = np.zeros((<object> out).shape, dtype=np.float64)
189-
resx = np.empty((<object> out).shape, dtype=object)
190-
191-
if len(bins) == 0:
192-
return
193-
if bins[len(bins) - 1] == len(values):
194-
ngroups = len(bins)
195-
else:
196-
ngroups = len(bins) + 1
197-
198-
N, K = (<object> values).shape
199-
200-
b = 0
201-
for i in range(N):
202-
while b < ngroups - 1 and i >= bins[b]:
203-
b += 1
204-
205-
counts[b] += 1
206-
for j in range(K):
207-
val = values[i, j]
208-
209-
# not nan
210-
if val == val:
211-
nobs[b, j] += 1
212-
resx[b, j] = val
213-
214-
for i in range(ngroups):
215-
for j in range(K):
216-
if nobs[i, j] == 0:
217-
out[i, j] = nan
218-
else:
219-
out[i, j] = resx[i, j]
220-
221-
222121
cdef inline float64_t _median_linear(float64_t* a, int n) nogil:
223122
cdef int i, j, na_count = 0
224123
cdef float64_t result

pandas/_libs/hashing.pyx

-5
Original file line numberDiff line numberDiff line change
@@ -105,11 +105,6 @@ cdef inline void u32to8_le(uint8_t* p, uint32_t v) nogil:
105105
p[3] = <uint8_t>(v >> 24)
106106

107107

108-
cdef inline void u64to8_le(uint8_t* p, uint64_t v) nogil:
109-
u32to8_le(p, <uint32_t>v)
110-
u32to8_le(p + 4, <uint32_t>(v >> 32))
111-
112-
113108
cdef inline uint64_t u8to64_le(uint8_t* p) nogil:
114109
return (<uint64_t>p[0] |
115110
<uint64_t>p[1] << 8 |

pandas/_libs/join.pyx

-24
Original file line numberDiff line numberDiff line change
@@ -240,28 +240,4 @@ def ffill_indexer(ndarray[int64_t] indexer):
240240
return result
241241

242242

243-
def ffill_by_group(ndarray[int64_t] indexer, ndarray[int64_t] group_ids,
244-
int64_t max_group):
245-
cdef:
246-
Py_ssize_t i, n = len(indexer)
247-
ndarray[int64_t] result, last_obs
248-
int64_t gid, val
249-
250-
result = np.empty(n, dtype=np.int64)
251-
252-
last_obs = np.empty(max_group, dtype=np.int64)
253-
last_obs.fill(-1)
254-
255-
for i in range(n):
256-
gid = group_ids[i]
257-
val = indexer[i]
258-
if val == -1:
259-
result[i] = last_obs[gid]
260-
else:
261-
result[i] = val
262-
last_obs[gid] = val
263-
264-
return result
265-
266-
267243
include "join_helper.pxi"

pandas/_libs/lib.pyx

-106
Original file line numberDiff line numberDiff line change
@@ -76,27 +76,6 @@ def values_from_object(object o):
7676
return o
7777

7878

79-
cpdef map_indices_list(list index):
80-
"""
81-
Produce a dict mapping the values of the input array to their respective
82-
locations.
83-
84-
Example:
85-
array(['hi', 'there']) --> {'hi' : 0 , 'there' : 1}
86-
87-
Better to do this with Cython because of the enormous speed boost.
88-
"""
89-
cdef Py_ssize_t i, length
90-
cdef dict result = {}
91-
92-
length = len(index)
93-
94-
for i from 0 <= i < length:
95-
result[index[i]] = i
96-
97-
return result
98-
99-
10079
@cython.wraparound(False)
10180
@cython.boundscheck(False)
10281
def memory_usage_of_objects(ndarray[object, ndim=1] arr):
@@ -1094,27 +1073,6 @@ def get_level_sorter(ndarray[int64_t, ndim=1] label,
10941073
return out
10951074

10961075

1097-
def group_count(ndarray[int64_t] values, Py_ssize_t size):
1098-
cdef:
1099-
Py_ssize_t i, n = len(values)
1100-
ndarray[int64_t] counts
1101-
1102-
counts = np.zeros(size, dtype=np.int64)
1103-
for i in range(n):
1104-
counts[values[i]] += 1
1105-
return counts
1106-
1107-
1108-
def lookup_values(ndarray[object] values, dict mapping):
1109-
cdef:
1110-
Py_ssize_t i, n = len(values)
1111-
1112-
result = np.empty(n, dtype='O')
1113-
for i in range(n):
1114-
result[i] = mapping[values[i]]
1115-
return maybe_convert_objects(result)
1116-
1117-
11181076
@cython.boundscheck(False)
11191077
@cython.wraparound(False)
11201078
def count_level_2d(ndarray[uint8_t, ndim=2, cast=True] mask,
@@ -1145,70 +1103,6 @@ def count_level_2d(ndarray[uint8_t, ndim=2, cast=True] mask,
11451103
return counts
11461104

11471105

1148-
cdef class _PandasNull:
1149-
1150-
def __richcmp__(_PandasNull self, object other, int op):
1151-
if op == 2: # ==
1152-
return isinstance(other, _PandasNull)
1153-
elif op == 3: # !=
1154-
return not isinstance(other, _PandasNull)
1155-
else:
1156-
return False
1157-
1158-
def __hash__(self):
1159-
return 0
1160-
1161-
pandas_null = _PandasNull()
1162-
1163-
1164-
def fast_zip_fillna(list ndarrays, fill_value=pandas_null):
1165-
"""
1166-
For zipping multiple ndarrays into an ndarray of tuples
1167-
"""
1168-
cdef:
1169-
Py_ssize_t i, j, k, n
1170-
ndarray[object] result
1171-
flatiter it
1172-
object val, tup
1173-
1174-
k = len(ndarrays)
1175-
n = len(ndarrays[0])
1176-
1177-
result = np.empty(n, dtype=object)
1178-
1179-
# initialize tuples on first pass
1180-
arr = ndarrays[0]
1181-
it = <flatiter> PyArray_IterNew(arr)
1182-
for i in range(n):
1183-
val = PyArray_GETITEM(arr, PyArray_ITER_DATA(it))
1184-
tup = PyTuple_New(k)
1185-
1186-
if val != val:
1187-
val = fill_value
1188-
1189-
PyTuple_SET_ITEM(tup, 0, val)
1190-
Py_INCREF(val)
1191-
result[i] = tup
1192-
PyArray_ITER_NEXT(it)
1193-
1194-
for j in range(1, k):
1195-
arr = ndarrays[j]
1196-
it = <flatiter> PyArray_IterNew(arr)
1197-
if len(arr) != n:
1198-
raise ValueError('all arrays must be same length')
1199-
1200-
for i in range(n):
1201-
val = PyArray_GETITEM(arr, PyArray_ITER_DATA(it))
1202-
if val != val:
1203-
val = fill_value
1204-
1205-
PyTuple_SET_ITEM(result[i], j, val)
1206-
Py_INCREF(val)
1207-
PyArray_ITER_NEXT(it)
1208-
1209-
return result
1210-
1211-
12121106
def generate_slices(ndarray[int64_t] labels, Py_ssize_t ngroups):
12131107
cdef:
12141108
Py_ssize_t i, group_size, n, start

0 commit comments

Comments
 (0)