Skip to content

Commit 3171674

Browse files
committed
Fix some leftover size_t references
1 parent 0985cf3 commit 3171674

File tree

2 files changed

+28
-28
lines changed

2 files changed

+28
-28
lines changed

pandas/_libs/src/parser/tokenizer.c

+17-17
Original file line numberDiff line numberDiff line change
@@ -305,11 +305,11 @@ static int make_stream_space(parser_t *self, size_t nbytes) {
305305
"self->words_cap=%d\n",
306306
nbytes, self->words_cap))
307307
newptr = safe_realloc((void *)self->word_starts,
308-
sizeof(size_t) * self->words_cap);
308+
sizeof(int64_t) * self->words_cap);
309309
if (newptr == NULL) {
310310
return PARSER_OUT_OF_MEMORY;
311311
} else {
312-
self->word_starts = (size_t *)newptr;
312+
self->word_starts = (int64_t *)newptr;
313313
}
314314
}
315315

@@ -318,8 +318,8 @@ static int make_stream_space(parser_t *self, size_t nbytes) {
318318
*/
319319
cap = self->lines_cap;
320320
self->line_start =
321-
(size_t *)grow_buffer((void *)self->line_start, self->lines + 1,
322-
&self->lines_cap, nbytes, sizeof(size_t), &status);
321+
(int64_t *)grow_buffer((void *)self->line_start, self->lines + 1,
322+
&self->lines_cap, nbytes, sizeof(int64_t), &status);
323323
TRACE((
324324
"make_stream_space: grow_buffer(self->line_start, %zu, %zu, %zu, %d)\n",
325325
self->lines + 1, self->lines_cap, nbytes, status))
@@ -332,11 +332,11 @@ static int make_stream_space(parser_t *self, size_t nbytes) {
332332
TRACE(("make_stream_space: cap != self->lines_cap, nbytes = %d\n",
333333
nbytes))
334334
newptr = safe_realloc((void *)self->line_fields,
335-
sizeof(size_t) * self->lines_cap);
335+
sizeof(int64_t) * self->lines_cap);
336336
if (newptr == NULL) {
337337
return PARSER_OUT_OF_MEMORY;
338338
} else {
339-
self->line_fields = (size_t *)newptr;
339+
self->line_fields = (int64_t *)newptr;
340340
}
341341
}
342342

@@ -718,8 +718,8 @@ int skip_this_line(parser_t *self, int64_t rownum) {
718718
}
719719
}
720720

721-
int tokenize_bytes(parser_t *self, size_t line_limit, size_t start_lines) {
722-
size_t i, slen;
721+
int tokenize_bytes(parser_t *self, size_t line_limit, int64_t start_lines) {
722+
int64_t i, slen;
723723
int should_skip;
724724
char c;
725725
char *stream;
@@ -1235,7 +1235,7 @@ int parser_trim_buffers(parser_t *self) {
12351235
size_t new_cap;
12361236
void *newptr;
12371237

1238-
size_t i;
1238+
int64_t i;
12391239

12401240
/* trim words, word_starts */
12411241
new_cap = _next_pow2(self->words_len) + 1;
@@ -1248,11 +1248,11 @@ int parser_trim_buffers(parser_t *self) {
12481248
self->words = (char **)newptr;
12491249
}
12501250
newptr = safe_realloc((void *)self->word_starts,
1251-
new_cap * sizeof(size_t));
1251+
new_cap * sizeof(int64_t));
12521252
if (newptr == NULL) {
12531253
return PARSER_OUT_OF_MEMORY;
12541254
} else {
1255-
self->word_starts = (size_t *)newptr;
1255+
self->word_starts = (int64_t *)newptr;
12561256
self->words_cap = new_cap;
12571257
}
12581258
}
@@ -1299,18 +1299,18 @@ int parser_trim_buffers(parser_t *self) {
12991299
if (new_cap < self->lines_cap) {
13001300
TRACE(("parser_trim_buffers: new_cap < self->lines_cap\n"));
13011301
newptr = safe_realloc((void *)self->line_start,
1302-
new_cap * sizeof(size_t));
1302+
new_cap * sizeof(int64_t));
13031303
if (newptr == NULL) {
13041304
return PARSER_OUT_OF_MEMORY;
13051305
} else {
1306-
self->line_start = (size_t *)newptr;
1306+
self->line_start = (int64_t *)newptr;
13071307
}
13081308
newptr = safe_realloc((void *)self->line_fields,
1309-
new_cap * sizeof(size_t));
1309+
new_cap * sizeof(int64_t));
13101310
if (newptr == NULL) {
13111311
return PARSER_OUT_OF_MEMORY;
13121312
} else {
1313-
self->line_fields = (size_t *)newptr;
1313+
self->line_fields = (int64_t *)newptr;
13141314
self->lines_cap = new_cap;
13151315
}
13161316
}
@@ -1319,7 +1319,7 @@ int parser_trim_buffers(parser_t *self) {
13191319
}
13201320

13211321
void debug_print_parser(parser_t *self) {
1322-
size_t j, line;
1322+
int64_t j, line;
13231323
char *token;
13241324

13251325
for (line = 0; line < self->lines; ++line) {
@@ -1340,7 +1340,7 @@ void debug_print_parser(parser_t *self) {
13401340

13411341
int _tokenize_helper(parser_t *self, size_t nrows, int all) {
13421342
int status = 0;
1343-
size_t start_lines = self->lines;
1343+
int64_t start_lines = self->lines;
13441344

13451345
if (self->state == FINISHED) {
13461346
return 0;

pandas/_libs/src/parser/tokenizer.h

+11-11
Original file line numberDiff line numberDiff line change
@@ -137,9 +137,9 @@ typedef struct parser_t {
137137
io_callback cb_io;
138138
io_cleanup cb_cleanup;
139139

140-
int64_t chunksize; // Number of bytes to prepare for each chunk
141-
char *data; // pointer to data to be processed
142-
int64_t datalen; // amount of data available
140+
int64_t chunksize; // Number of bytes to prepare for each chunk
141+
char *data; // pointer to data to be processed
142+
int64_t datalen; // amount of data available
143143
int64_t datapos;
144144

145145
// where to write out tokenized data
@@ -149,18 +149,18 @@ typedef struct parser_t {
149149

150150
// Store words in (potentially ragged) matrix for now, hmm
151151
char **words;
152-
int64_t *word_starts; // where we are in the stream
152+
int64_t *word_starts; // where we are in the stream
153153
int64_t words_len;
154154
int64_t words_cap;
155155

156-
char *pword_start; // pointer to stream start of current field
156+
char *pword_start; // pointer to stream start of current field
157157
int64_t word_start; // position start of current field
158158

159-
int64_t *line_start; // position in words for start of line
160-
int64_t *line_fields; // Number of fields in each line
161-
int64_t lines; // Number of (good) lines observed
162-
int64_t file_lines; // Number of lines observed (including bad or skipped)
163-
int64_t lines_cap; // Vector capacity
159+
int64_t *line_start; // position in words for start of line
160+
int64_t *line_fields; // Number of fields in each line
161+
int64_t lines; // Number of (good) lines observed
162+
int64_t file_lines; // Number of lines observed (including bad or skipped)
163+
int64_t lines_cap; // Vector capacity
164164

165165
// Tokenizing stuff
166166
ParserState state;
@@ -193,7 +193,7 @@ typedef struct parser_t {
193193
// thousands separator (comma, period)
194194
char thousands;
195195

196-
int header; // Boolean: 1: has header, 0: no header
196+
int header; // Boolean: 1: has header, 0: no header
197197
int64_t header_start; // header row start
198198
int64_t header_end; // header row end
199199

0 commit comments

Comments
 (0)