Skip to content

CLN: pandas.concat internal checks #57996

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Mar 26, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions pandas/core/reshape/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -635,16 +635,13 @@ def _get_concat_axis(self) -> Index:
indexes, self.keys, self.levels, self.names
)

self._maybe_check_integrity(concat_axis)

return concat_axis

def _maybe_check_integrity(self, concat_index: Index) -> None:
if self.verify_integrity:
if not concat_index.is_unique:
overlap = concat_index[concat_index.duplicated()].unique()
if not concat_axis.is_unique:
overlap = concat_axis[concat_axis.duplicated()].unique()
raise ValueError(f"Indexes have overlapping values: {overlap}")

return concat_axis


def _clean_keys_and_objs(
objs: Iterable[Series | DataFrame] | Mapping[HashableT, Series | DataFrame],
Expand Down Expand Up @@ -742,6 +739,12 @@ def _concat_indexes(indexes) -> Index:
return indexes[0].append(indexes[1:])


def validate_unique_levels(levels: list[Index]) -> None:
for level in levels:
if not level.is_unique:
raise ValueError(f"Level values not unique: {level.tolist()}")


def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiIndex:
if (levels is None and isinstance(keys[0], tuple)) or (
levels is not None and len(levels) > 1
Expand All @@ -754,6 +757,7 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiInde
_, levels = factorize_from_iterables(zipped)
else:
levels = [ensure_index(x) for x in levels]
validate_unique_levels(levels)
else:
zipped = [keys]
if names is None:
Expand All @@ -763,12 +767,9 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiInde
levels = [ensure_index(keys).unique()]
else:
levels = [ensure_index(x) for x in levels]
validate_unique_levels(levels)

for level in levels:
if not level.is_unique:
raise ValueError(f"Level values not unique: {level.tolist()}")

if not all_indexes_same(indexes) or not all(level.is_unique for level in levels):
if not all_indexes_same(indexes):
codes_list = []

# things are potentially different sizes, so compute the exact codes
Expand Down