Skip to content

Commit 0d6027c

Browse files
committed
Fixes pandas-dev#7626 - Corrects stopping logic when nrows argument is supplied
1 parent 75b606a commit 0d6027c

File tree

1 file changed

+3
-5
lines changed

1 file changed

+3
-5
lines changed

pandas/src/parser/tokenizer.c

+3-5
Original file line numberDiff line numberDiff line change
@@ -726,16 +726,14 @@ int skip_this_line(parser_t *self, int64_t rownum) {
726726
}
727727
}
728728

729-
int tokenize_bytes(parser_t *self, size_t line_limit)
729+
int tokenize_bytes(parser_t *self, size_t line_limit, int start_lines)
730730
{
731-
int i, slen, start_lines;
731+
int i, slen;
732732
long maxstreamsize;
733733
char c;
734734
char *stream;
735735
char *buf = self->data + self->datapos;
736736

737-
start_lines = self->lines;
738-
739737
if (make_stream_space(self, self->datalen - self->datapos) < 0) {
740738
self->error_msg = "out of memory";
741739
return -1;
@@ -1384,7 +1382,7 @@ int _tokenize_helper(parser_t *self, size_t nrows, int all) {
13841382
TRACE(("_tokenize_helper: Trying to process %d bytes, datalen=%d, datapos= %d\n",
13851383
self->datalen - self->datapos, self->datalen, self->datapos));
13861384

1387-
status = tokenize_bytes(self, nrows);
1385+
status = tokenize_bytes(self, nrows, start_lines);
13881386

13891387
if (status < 0) {
13901388
// XXX

0 commit comments

Comments
 (0)