Skip to content

Made line comments work with delim_whitespace and custom line terminator #8122

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 28, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions doc/source/v0.15.0.txt
Original file line number Diff line number Diff line change
Expand Up @@ -638,6 +638,8 @@ Bug Fixes
- Bug in ``Float64Index`` where ``iat`` and ``at`` were not testing and were
failing (:issue:`8092`).

- Bug in ``read_csv`` where line comments were not handled correctly given
a custom line terminator or ``delim_whitespace=True`` (:issue:`8122`).



Expand Down
8 changes: 8 additions & 0 deletions pandas/io/tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2944,6 +2944,14 @@ def test_line_comment(self):
[5., np.nan, 10.]]
df = self.read_csv(StringIO(data), comment='#')
tm.assert_almost_equal(df.values, expected)
# check with delim_whitespace=True
df = self.read_csv(StringIO(data.replace(',', ' ')), comment='#',
delim_whitespace=True)
tm.assert_almost_equal(df.values, expected)
# check with custom line terminator
df = self.read_csv(StringIO(data.replace('\n', '*')), comment='#',
lineterminator='*')
tm.assert_almost_equal(df.values, expected)

def test_comment_skiprows(self):
data = """# empty
Expand Down
33 changes: 33 additions & 0 deletions pandas/src/parser/tokenizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -969,6 +969,10 @@ int tokenize_delim_customterm(parser_t *self, size_t line_limit)
END_LINE();
break;
}
else if (c == self->commentchar) {
self->state = EAT_LINE_COMMENT;
break;
}
/* normal character - handle as START_FIELD */
self->state = START_FIELD;
/* fallthru */
Expand Down Expand Up @@ -1103,6 +1107,13 @@ int tokenize_delim_customterm(parser_t *self, size_t line_limit)
}
break;

case EAT_LINE_COMMENT:
if (c == self->lineterminator) {
self->file_lines++;
self->state = START_RECORD;
}
break;

case EAT_COMMENT:
if (c == self->lineterminator) {
END_LINE();
Expand Down Expand Up @@ -1186,6 +1197,9 @@ int tokenize_whitespace(parser_t *self, size_t line_limit)
} else if (IS_WHITESPACE(c)) {
self->state = EAT_WHITESPACE;
break;
} else if (c == self->commentchar) {
self->state = EAT_LINE_COMMENT;
break;
} else {
/* normal character - handle as START_FIELD */
self->state = START_FIELD;
Expand Down Expand Up @@ -1231,6 +1245,16 @@ int tokenize_whitespace(parser_t *self, size_t line_limit)
}
break;

case EAT_LINE_COMMENT:
if (c == '\n') {
self->file_lines++;
self->state = START_RECORD;
} else if (c == '\r') {
self->file_lines++;
self->state = EAT_CRNL_NOP;
}
break;

case ESCAPED_CHAR:
/* if (c == '\0') */
/* c = '\n'; */
Expand Down Expand Up @@ -1351,6 +1375,15 @@ int tokenize_whitespace(parser_t *self, size_t line_limit)
}
break;

case EAT_CRNL_NOP: // inside an ignored comment line
self->state = START_RECORD;
/* \r line terminator -- parse this character again */
if (c != '\n' && c != self->delimiter) {
--i;
--buf;
}
break;

case EAT_COMMENT:
if (c == '\n') {
END_LINE();
Expand Down