Skip to content

REF: Deduplicate to_xml code #45132

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jan 1, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pandas/core/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -2948,8 +2948,8 @@ def to_xml(
root_name: str | None = "data",
row_name: str | None = "row",
na_rep: str | None = None,
attr_cols: str | list[str] | None = None,
elem_cols: str | list[str] | None = None,
attr_cols: list[str] | None = None,
elem_cols: list[str] | None = None,
namespaces: dict[str | None, str] | None = None,
prefix: str | None = None,
encoding: str = "utf-8",
Expand Down
69 changes: 28 additions & 41 deletions pandas/io/formats/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from pandas._typing import (
CompressionOptions,
FilePath,
ReadBuffer,
StorageOptions,
WriteBuffer,
)
Expand Down Expand Up @@ -96,7 +95,7 @@ class BaseXMLFormatter:
def __init__(
self,
frame: DataFrame,
path_or_buffer: FilePath | WriteBuffer[bytes] | None = None,
path_or_buffer: FilePath | WriteBuffer[bytes] | WriteBuffer[str] | None = None,
index: bool = True,
root_name: str | None = "data",
row_name: str | None = "row",
Expand All @@ -108,7 +107,7 @@ def __init__(
encoding: str = "utf-8",
xml_declaration: bool | None = True,
pretty_print: bool | None = True,
stylesheet: FilePath | ReadBuffer[str] | None = None,
stylesheet: FilePath | WriteBuffer[bytes] | WriteBuffer[str] | None = None,
compression: CompressionOptions = "infer",
storage_options: StorageOptions = None,
) -> None:
Expand All @@ -132,6 +131,11 @@ def __init__(
self.orig_cols = self.frame.columns.tolist()
self.frame_dicts = self.process_dataframe()

self.validate_columns()
self.validate_encoding()
self.prefix_uri = self.get_prefix_uri()
self.handle_indexes()

def build_tree(self) -> bytes:
"""
Build tree from data.
Expand Down Expand Up @@ -247,7 +251,7 @@ def other_namespaces(self) -> dict:

return nmsp_dict

def build_attribs(self) -> None:
def build_attribs(self, d: dict[str, Any], elem_row: Any) -> None:
"""
Create attributes of row.

Expand All @@ -261,9 +265,9 @@ def build_attribs(self) -> None:
for col in self.attr_cols:
attr_name = self._get_flat_col_name(col)
try:
val = None if isna(self.d[col]) else str(self.d[col])
val = None if isna(d[col]) else str(d[col])
if val is not None:
self.elem_row.attrib[attr_name] = val
elem_row.attrib[attr_name] = val
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this be copied?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe avoid val? (This change came on last recent to_xml PR).

if not isna(d[col]):
    elem_row.attrib[attr_name] = str(d[col])

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep thx, this sounds good

except KeyError:
raise KeyError(f"no valid column, {col}")

Expand All @@ -277,7 +281,7 @@ def _get_flat_col_name(self, col: str | tuple) -> str:
)
return f"{self.prefix_uri}{flat_col}"

def build_elems(self) -> None:
def build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
"""
Create child elements of row.

Expand All @@ -287,18 +291,16 @@ def build_elems(self) -> None:

raise AbstractMethodError(self)

def _build_elems(self, sub_element_cls) -> None:
def _build_elems(self, sub_element_cls, d: dict[str, Any], elem_row: Any) -> None:

if not self.elem_cols:
return

for col in self.elem_cols:
elem_name = self._get_flat_col_name(col)
try:
val = (
None if isna(self.d[col]) or self.d[col] == "" else str(self.d[col])
)
sub_element_cls(self.elem_row, elem_name).text = val
val = None if isna(d[col]) or d[col] == "" else str(d[col])
sub_element_cls(elem_row, elem_name).text = val
except KeyError:
raise KeyError(f"no valid column, {col}")

Expand Down Expand Up @@ -326,14 +328,6 @@ class EtreeXMLFormatter(BaseXMLFormatter):
modules: `xml.etree.ElementTree` and `xml.dom.minidom`.
"""

def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

self.validate_columns()
self.validate_encoding()
self.handle_indexes()
self.prefix_uri = self.get_prefix_uri()

def build_tree(self) -> bytes:
from xml.etree.ElementTree import (
Element,
Expand All @@ -346,16 +340,15 @@ def build_tree(self) -> bytes:
)

for d in self.frame_dicts.values():
self.d = d
self.elem_row = SubElement(self.root, f"{self.prefix_uri}{self.row_name}")
elem_row = SubElement(self.root, f"{self.prefix_uri}{self.row_name}")

if not self.attr_cols and not self.elem_cols:
self.elem_cols = list(self.d.keys())
self.build_elems()
self.elem_cols = list(d.keys())
self.build_elems(d, elem_row)

else:
self.build_attribs()
self.build_elems()
self.build_attribs(d, elem_row)
self.build_elems(d, elem_row)

self.out_xml = tostring(self.root, method="xml", encoding=self.encoding)

Expand Down Expand Up @@ -392,10 +385,10 @@ def get_prefix_uri(self) -> str:

return uri

def build_elems(self) -> None:
def build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
from xml.etree.ElementTree import SubElement

self._build_elems(SubElement)
self._build_elems(SubElement, d, elem_row)

def prettify_tree(self) -> bytes:
"""
Expand Down Expand Up @@ -447,12 +440,7 @@ class LxmlXMLFormatter(BaseXMLFormatter):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

self.validate_columns()
self.validate_encoding()
self.prefix_uri = self.get_prefix_uri()

self.convert_empty_str_key()
self.handle_indexes()

def build_tree(self) -> bytes:
"""
Expand All @@ -470,16 +458,15 @@ def build_tree(self) -> bytes:
self.root = Element(f"{self.prefix_uri}{self.root_name}", nsmap=self.namespaces)

for d in self.frame_dicts.values():
self.d = d
self.elem_row = SubElement(self.root, f"{self.prefix_uri}{self.row_name}")
elem_row = SubElement(self.root, f"{self.prefix_uri}{self.row_name}")

if not self.attr_cols and not self.elem_cols:
self.elem_cols = list(self.d.keys())
self.build_elems()
self.elem_cols = list(d.keys())
self.build_elems(d, elem_row)

else:
self.build_attribs()
self.build_elems()
self.build_attribs(d, elem_row)
self.build_elems(d, elem_row)

self.out_xml = tostring(
self.root,
Expand Down Expand Up @@ -518,10 +505,10 @@ def get_prefix_uri(self) -> str:

return uri

def build_elems(self) -> None:
def build_elems(self, d: dict[str, Any], elem_row: Any) -> None:
from lxml.etree import SubElement

self._build_elems(SubElement)
self._build_elems(SubElement, d, elem_row)

def transform_doc(self) -> bytes:
"""
Expand Down
17 changes: 11 additions & 6 deletions pandas/io/xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
FilePath,
ReadBuffer,
StorageOptions,
WriteBuffer,
)
from pandas.compat._optional import import_optional_dependency
from pandas.errors import (
Expand Down Expand Up @@ -569,11 +570,18 @@ def _transform_doc(self) -> bytes:


def get_data_from_filepath(
filepath_or_buffer: FilePath | bytes | ReadBuffer[bytes] | ReadBuffer[str],
filepath_or_buffer: FilePath
| bytes
| ReadBuffer[bytes]
| ReadBuffer[str]
| WriteBuffer[bytes]
| WriteBuffer[str],
encoding,
compression: CompressionOptions,
storage_options: StorageOptions,
) -> str | bytes | ReadBuffer[bytes] | ReadBuffer[str]:
) -> str | bytes | ReadBuffer[bytes] | ReadBuffer[str] | WriteBuffer[
bytes
] | WriteBuffer[str]:
"""
Extract raw XML data.

Expand Down Expand Up @@ -605,10 +613,7 @@ def get_data_from_filepath(
storage_options=storage_options,
) as handle_obj:
filepath_or_buffer = (
# error: Incompatible types in assignment (expression has type
# "Union[str, IO[str]]", variable has type "Union[Union[str,
# PathLike[str]], bytes, ReadBuffer[bytes], ReadBuffer[str]]")
handle_obj.handle.read() # type: ignore[assignment]
handle_obj.handle.read()
if hasattr(handle_obj.handle, "read")
else handle_obj.handle
)
Expand Down