Skip to content

Commit 06ff699

Browse files
authored
DOC: synchronize docs changes with pandas (#190)
The docs needs some corrections in order to pass the Pandas docs linter in pandas-dev/pandas#21628
1 parent ade32a2 commit 06ff699

File tree

1 file changed

+87
-69
lines changed

1 file changed

+87
-69
lines changed

pandas_gbq/gbq.py

+87-69
Original file line numberDiff line numberDiff line change
@@ -470,9 +470,9 @@ def _parse_data(schema, rows):
470470

471471

472472
def read_gbq(query, project_id=None, index_col=None, col_order=None,
473-
reauth=False, verbose=None, private_key=None,
474-
auth_local_webserver=False, dialect='legacy', location=None,
475-
configuration=None):
473+
reauth=False, private_key=None, auth_local_webserver=False,
474+
dialect='legacy', location=None, configuration=None,
475+
verbose=None):
476476
r"""Load data from Google BigQuery using google-cloud-python
477477
478478
The main method a user calls to execute a Query in Google BigQuery
@@ -488,63 +488,69 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
488488
Parameters
489489
----------
490490
query : str
491-
SQL-Like Query to return data values
492-
project_id : str (optional when available in environment)
493-
Google BigQuery Account project ID.
494-
index_col : str (optional)
495-
Name of result column to use for index in results DataFrame
496-
col_order : list(str) (optional)
491+
SQL-Like Query to return data values.
492+
project_id : str, optional
493+
Google BigQuery Account project ID. Optional when available from
494+
the environment.
495+
index_col : str, optional
496+
Name of result column to use for index in results DataFrame.
497+
col_order : list(str), optional
497498
List of BigQuery column names in the desired order for results
498-
DataFrame
499-
reauth : boolean (default False)
500-
Force Google BigQuery to reauthenticate the user. This is useful
499+
DataFrame.
500+
reauth : boolean, default False
501+
Force Google BigQuery to re-authenticate the user. This is useful
501502
if multiple accounts are used.
502-
private_key : str (optional)
503+
private_key : str, optional
503504
Service account private key in JSON format. Can be file path
504505
or string contents. This is useful for remote server
505-
authentication (eg. jupyter iPython notebook on remote host)
506+
authentication (eg. Jupyter/IPython notebook on remote host).
506507
auth_local_webserver : boolean, default False
507-
Use the [local webserver flow] instead of the [console flow] when
508-
getting user credentials. A file named bigquery_credentials.dat will
509-
be created in current dir. You can also set PANDAS_GBQ_CREDENTIALS_FILE
510-
environment variable so as to define a specific path to store this
511-
credential (eg. /etc/keys/bigquery.dat).
508+
Use the `local webserver flow`_ instead of the `console flow`_
509+
when getting user credentials.
512510
513-
.. [local webserver flow]
511+
.. _local webserver flow:
514512
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server
515-
.. [console flow]
513+
.. _console flow:
516514
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console
517-
.. versionadded:: 0.2.0
518515
519-
dialect : {'legacy', 'standard'}, default 'legacy'
520-
'legacy' : Use BigQuery's legacy SQL dialect.
521-
'standard' : Use BigQuery's standard SQL (beta), which is
522-
compliant with the SQL 2011 standard. For more information
523-
see `BigQuery SQL Reference
524-
<https://cloud.google.com/bigquery/sql-reference/>`__
525-
location : str (optional)
516+
.. versionadded:: 0.2.0
517+
dialect : str, default 'legacy'
518+
SQL syntax dialect to use. Value can be one of:
519+
520+
``'legacy'``
521+
Use BigQuery's legacy SQL dialect. For more information see
522+
`BigQuery Legacy SQL Reference
523+
<https://cloud.google.com/bigquery/docs/reference/legacy-sql>`__.
524+
``'standard'``
525+
Use BigQuery's standard SQL, which is
526+
compliant with the SQL 2011 standard. For more information
527+
see `BigQuery Standard SQL Reference
528+
<https://cloud.google.com/bigquery/docs/reference/standard-sql/>`__.
529+
location : str, optional
526530
Location where the query job should run. See the `BigQuery locations
527531
documentation
528532
<https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a
529533
list of available locations. The location must match that of any
530534
datasets used in the query.
535+
531536
.. versionadded:: 0.5.0
532-
configuration : dict (optional)
537+
configuration : dict, optional
533538
Query config parameters for job processing.
534539
For example:
535540
536541
configuration = {'query': {'useQueryCache': False}}
537542
538-
For more information see `BigQuery SQL Reference
539-
<https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query>`__
540-
543+
For more information see `BigQuery REST API Reference
544+
<https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query>`__.
541545
verbose : None, deprecated
546+
Deprecated in Pandas-GBQ 0.4.0. Use the `logging module
547+
to adjust verbosity instead
548+
<https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__.
542549
543550
Returns
544551
-------
545552
df: DataFrame
546-
DataFrame representing results of query
547-
553+
DataFrame representing results of query.
548554
"""
549555

550556
_test_google_api_imports()
@@ -603,9 +609,9 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
603609

604610

605611
def to_gbq(dataframe, destination_table, project_id=None, chunksize=None,
606-
verbose=None, reauth=False, if_exists='fail', private_key=None,
612+
reauth=False, if_exists='fail', private_key=None,
607613
auth_local_webserver=False, table_schema=None, location=None,
608-
progress_bar=True):
614+
progress_bar=True, verbose=None):
609615
"""Write a DataFrame to a Google BigQuery table.
610616
611617
The main method a user calls to export pandas DataFrame contents to
@@ -621,54 +627,66 @@ def to_gbq(dataframe, destination_table, project_id=None, chunksize=None,
621627
Parameters
622628
----------
623629
dataframe : pandas.DataFrame
624-
DataFrame to be written
630+
DataFrame to be written to a Google BigQuery table.
625631
destination_table : str
626-
Name of table to be written, in the form 'dataset.tablename'
627-
project_id : str (optional when available in environment)
628-
Google BigQuery Account project ID.
629-
chunksize : int (default None)
630-
Number of rows to be inserted in each chunk from the dataframe. Use
631-
``None`` to load the dataframe in a single chunk.
632-
reauth : boolean (default False)
633-
Force Google BigQuery to reauthenticate the user. This is useful
632+
Name of table to be written, in the form ``dataset.tablename``.
633+
project_id : str, optional
634+
Google BigQuery Account project ID. Optional when available from
635+
the environment.
636+
chunksize : int, optional
637+
Number of rows to be inserted in each chunk from the dataframe.
638+
Set to ``None`` to load the whole dataframe at once.
639+
reauth : bool, default False
640+
Force Google BigQuery to re-authenticate the user. This is useful
634641
if multiple accounts are used.
635-
if_exists : {'fail', 'replace', 'append'}, default 'fail'
636-
'fail': If table exists, do nothing.
637-
'replace': If table exists, drop it, recreate it, and insert data.
638-
'append': If table exists and the dataframe schema is a subset of
639-
the destination table schema, insert data. Create destination table
640-
if does not exist.
641-
private_key : str (optional)
642+
if_exists : str, default 'fail'
643+
Behavior when the destination table exists. Value can be one of:
644+
645+
``'fail'``
646+
If table exists, do nothing.
647+
``'replace'``
648+
If table exists, drop it, recreate it, and insert data.
649+
``'append'``
650+
If table exists, insert data. Create if does not exist.
651+
private_key : str, optional
642652
Service account private key in JSON format. Can be file path
643653
or string contents. This is useful for remote server
644-
authentication (eg. jupyter iPython notebook on remote host)
645-
auth_local_webserver : boolean, default False
646-
Use the [local webserver flow] instead of the [console flow] when
647-
getting user credentials.
654+
authentication (eg. Jupyter/IPython notebook on remote host).
655+
auth_local_webserver : bool, default False
656+
Use the `local webserver flow`_ instead of the `console flow`_
657+
when getting user credentials.
648658
649-
.. [local webserver flow]
659+
.. _local webserver flow:
650660
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server
651-
.. [console flow]
661+
.. _console flow:
652662
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console
663+
653664
.. versionadded:: 0.2.0
654-
table_schema : list of dicts
655-
List of BigQuery table fields to which according DataFrame columns
656-
conform to, e.g. `[{'name': 'col1', 'type': 'STRING'},...]`. If
657-
schema is not provided, it will be generated according to dtypes
658-
of DataFrame columns. See BigQuery API documentation on available
659-
names of a field.
665+
table_schema : list of dicts, optional
666+
List of BigQuery table fields to which according DataFrame
667+
columns conform to, e.g. ``[{'name': 'col1', 'type':
668+
'STRING'},...]``. If schema is not provided, it will be
669+
generated according to dtypes of DataFrame columns. See
670+
BigQuery API documentation on available names of a field.
671+
660672
.. versionadded:: 0.3.1
661-
location : str (optional)
673+
location : str, optional
662674
Location where the load job should run. See the `BigQuery locations
663675
documentation
664676
<https://cloud.google.com/bigquery/docs/dataset-locations>`__ for a
665677
list of available locations. The location must match that of the
666678
target dataset.
679+
667680
.. versionadded:: 0.5.0
668-
progress_bar : boolean, True by default. It uses the library `tqdm` to show
669-
the progress bar for the upload, chunk by chunk.
681+
progress_bar : bool, default True
682+
Use the library `tqdm` to show the progress bar for the upload,
683+
chunk by chunk.
684+
670685
.. versionadded:: 0.5.0
671-
verbose : None, deprecated
686+
verbose : bool, deprecated
687+
Deprecated in Pandas-GBQ 0.4.0. Use the `logging module
688+
to adjust verbosity instead
689+
<https://pandas-gbq.readthedocs.io/en/latest/intro.html#logging>`__.
672690
"""
673691

674692
_test_google_api_imports()

0 commit comments

Comments
 (0)