Skip to content

Commit 513083e

Browse files
authored
Merge pull request #1 from pandas-dev/main
DOC: fix EX03 errors in docstrings pandas-dev#56804
2 parents 4f27380 + c1e57c9 commit 513083e

File tree

2 files changed

+13
-13
lines changed

2 files changed

+13
-13
lines changed

doc/source/user_guide/io.rst

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,10 @@ The pandas I/O API is a set of top level ``reader`` functions accessed like
1818
:widths: 30, 100, 60, 60
1919

2020
text,`CSV <https://en.wikipedia.org/wiki/Comma-separated_values>`__, :ref:`read_csv<io.read_csv_table>`, :ref:`to_csv<io.store_in_csv>`
21-
text,Fixed-Width Text File, :ref:`read_fwf<io.fwf_reader>` , NA
21+
text,Fixed-Width Text File, :ref:`read_fwf<io.fwf_reader>`, NA
2222
text,`JSON <https://www.json.org/>`__, :ref:`read_json<io.json_reader>`, :ref:`to_json<io.json_writer>`
2323
text,`HTML <https://en.wikipedia.org/wiki/HTML>`__, :ref:`read_html<io.read_html>`, :ref:`to_html<io.html>`
24-
text,`LaTeX <https://en.wikipedia.org/wiki/LaTeX>`__, :ref:`Styler.to_latex<io.latex>` , NA
24+
text,`LaTeX <https://en.wikipedia.org/wiki/LaTeX>`__, NA, :ref:`Styler.to_latex<io.latex>`
2525
text,`XML <https://www.w3.org/standards/xml/core>`__, :ref:`read_xml<io.read_xml>`, :ref:`to_xml<io.xml>`
2626
text, Local clipboard, :ref:`read_clipboard<io.clipboard>`, :ref:`to_clipboard<io.clipboard>`
2727
binary,`MS Excel <https://en.wikipedia.org/wiki/Microsoft_Excel>`__ , :ref:`read_excel<io.excel_reader>`, :ref:`to_excel<io.excel_writer>`

pandas/io/json/_normalize.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def nested_to_record(
147147
return new_ds
148148

149149

150-
def _normalise_json(
150+
def _normalize_json(
151151
data: Any,
152152
key_string: str,
153153
normalized_dict: dict[str, Any],
@@ -177,7 +177,7 @@ def _normalise_json(
177177
if not key_string:
178178
new_key = new_key.removeprefix(separator)
179179

180-
_normalise_json(
180+
_normalize_json(
181181
data=value,
182182
key_string=new_key,
183183
normalized_dict=normalized_dict,
@@ -188,7 +188,7 @@ def _normalise_json(
188188
return normalized_dict
189189

190190

191-
def _normalise_json_ordered(data: dict[str, Any], separator: str) -> dict[str, Any]:
191+
def _normalize_json_ordered(data: dict[str, Any], separator: str) -> dict[str, Any]:
192192
"""
193193
Order the top level keys and then recursively go to depth
194194
@@ -201,10 +201,10 @@ def _normalise_json_ordered(data: dict[str, Any], separator: str) -> dict[str, A
201201
202202
Returns
203203
-------
204-
dict or list of dicts, matching `normalised_json_object`
204+
dict or list of dicts, matching `normalized_json_object`
205205
"""
206206
top_dict_ = {k: v for k, v in data.items() if not isinstance(v, dict)}
207-
nested_dict_ = _normalise_json(
207+
nested_dict_ = _normalize_json(
208208
data={k: v for k, v in data.items() if isinstance(v, dict)},
209209
key_string="",
210210
normalized_dict={},
@@ -235,7 +235,7 @@ def _simple_json_normalize(
235235
Returns
236236
-------
237237
frame : DataFrame
238-
d - dict or list of dicts, matching `normalised_json_object`
238+
d - dict or list of dicts, matching `normalized_json_object`
239239
240240
Examples
241241
--------
@@ -256,14 +256,14 @@ def _simple_json_normalize(
256256
}
257257
258258
"""
259-
normalised_json_object = {}
259+
normalized_json_object = {}
260260
# expect a dictionary, as most jsons are. However, lists are perfectly valid
261261
if isinstance(ds, dict):
262-
normalised_json_object = _normalise_json_ordered(data=ds, separator=sep)
262+
normalized_json_object = _normalize_json_ordered(data=ds, separator=sep)
263263
elif isinstance(ds, list):
264-
normalised_json_list = [_simple_json_normalize(row, sep=sep) for row in ds]
265-
return normalised_json_list
266-
return normalised_json_object
264+
normalized_json_list = [_simple_json_normalize(row, sep=sep) for row in ds]
265+
return normalized_json_list
266+
return normalized_json_object
267267

268268

269269
def json_normalize(

0 commit comments

Comments
 (0)