Skip to content

Commit 37b2a08

Browse files
committed
DOC: render BigQuery auth flows as links
Also, adjust line breaks per review comments.
1 parent b6fdf37 commit 37b2a08

File tree

2 files changed

+40
-38
lines changed

2 files changed

+40
-38
lines changed

pandas/core/frame.py

+34-31
Original file line numberDiff line numberDiff line change
@@ -1116,10 +1116,9 @@ def to_dict(self, orient='dict', into=dict):
11161116
else:
11171117
raise ValueError("orient '%s' not understood" % orient)
11181118

1119-
def to_gbq(
1120-
self, destination_table, project_id, chunksize=10000,
1121-
verbose=True, reauth=False, if_exists='fail', private_key=None,
1122-
auth_local_webserver=False, table_schema=None):
1119+
def to_gbq(self, destination_table, project_id, chunksize=None,
1120+
verbose=True, reauth=False, if_exists='fail', private_key=None,
1121+
auth_local_webserver=False, table_schema=None):
11231122
"""
11241123
Write a DataFrame to a Google BigQuery table.
11251124
@@ -1128,57 +1127,61 @@ def to_gbq(
11281127
11291128
Authentication to the Google BigQuery service is via OAuth 2.0.
11301129
1131-
- If "private_key" is not provided:
1130+
- If ``private_key`` is provided, the library loads the JSON service
1131+
account credentials and uses those to authenticate.
11321132
1133-
By default "application default credentials" are used.
1133+
- If no ``private_key`` is provided, the library tries `application
1134+
default credentials`_.
11341135
1135-
If default application credentials are not found or are restrictive,
1136-
user account credentials are used. In this case, you will be asked to
1137-
grant permissions for product name 'pandas GBQ'.
1136+
.. _application default credentials:
1137+
https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application
11381138
1139-
- If "private_key" is provided:
1140-
1141-
Service account credentials will be used to authenticate.
1139+
- If application default credentials are not found or cannot be used
1140+
with BigQuery, the library authenticates with user account
1141+
credentials. In this case, you will be asked to grant permissions
1142+
for product name 'pandas GBQ'.
11421143
11431144
Parameters
11441145
----------
1145-
dataframe : DataFrame
1146-
DataFrame to be written to Google BigQuery.
1147-
destination_table : string
1146+
destination_table : str
11481147
Name of table to be written, in the form 'dataset.tablename'.
11491148
project_id : str
11501149
Google BigQuery Account project ID.
1151-
chunksize : int (default 10000)
1150+
chunksize : int, optional
11521151
Number of rows to be inserted in each chunk from the dataframe.
11531152
Set to ``None`` to load the whole dataframe at once.
1154-
verbose : boolean (default True)
1153+
verbose : bool, default True
11551154
Show percentage complete.
1156-
reauth : boolean (default False)
1155+
reauth : bool, default False
11571156
Force Google BigQuery to reauthenticate the user. This is useful
11581157
if multiple accounts are used.
1159-
if_exists : {'fail', 'replace', 'append'}, default 'fail'
1160-
Behavior when the destination table exists.
1161-
'fail': If table exists, do nothing.
1162-
'replace': If table exists, drop it, recreate it, and insert data.
1163-
'append': If table exists, insert data. Create if does not exist.
1164-
private_key : str (optional)
1158+
if_exists : str, default 'fail'
1159+
Behavior when the destination table exists. Value can be one of:
1160+
1161+
``'fail'``
1162+
If table exists, do nothing.
1163+
``'replace'``
1164+
If table exists, drop it, recreate it, and insert data.
1165+
``'append'``
1166+
If table exists, insert data. Create if does not exist.
1167+
private_key : str, optional
11651168
Service account private key in JSON format. Can be file path
11661169
or string contents. This is useful for remote server
11671170
authentication (eg. Jupyter/IPython notebook on remote host).
1168-
auth_local_webserver : boolean (default False)
1169-
Use the [local webserver flow] instead of the [console flow]
1171+
auth_local_webserver : bool, default False
1172+
Use the `local webserver flow`_ instead of the `console flow`_
11701173
when getting user credentials.
11711174
1172-
.. [local webserver flow]
1175+
.. _local webserver flow:
11731176
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server
1174-
.. [console flow]
1177+
.. _console flow:
11751178
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console
11761179
11771180
*New in version 0.2.0 of pandas-gbq*.
1178-
table_schema : list of dicts (optional)
1181+
table_schema : list of dicts, optional
11791182
List of BigQuery table fields to which according DataFrame
1180-
columns conform to, e.g. `[{'name': 'col1', 'type':
1181-
'STRING'},...]`. If schema is not provided, it will be
1183+
columns conform to, e.g. ``[{'name': 'col1', 'type':
1184+
'STRING'},...]``. If schema is not provided, it will be
11821185
generated according to dtypes of DataFrame columns. See
11831186
BigQuery API documentation on available names of a field.
11841187

pandas/io/gbq.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@ def _try_import():
2121
return pandas_gbq
2222

2323

24-
def read_gbq(
25-
query, project_id=None, index_col=None, col_order=None, reauth=False,
26-
verbose=True, private_key=None, dialect='legacy', **kwargs):
24+
def read_gbq(query, project_id=None, index_col=None, col_order=None,
25+
reauth=False, verbose=True, private_key=None, dialect='legacy',
26+
**kwargs):
2727
"""
2828
Load data from Google BigQuery.
2929
@@ -101,10 +101,9 @@ def read_gbq(
101101
**kwargs)
102102

103103

104-
def to_gbq(
105-
dataframe, destination_table, project_id, chunksize=10000,
106-
verbose=True, reauth=False, if_exists='fail', private_key=None,
107-
auth_local_webserver=False, table_schema=None):
104+
def to_gbq(dataframe, destination_table, project_id, chunksize=None,
105+
verbose=True, reauth=False, if_exists='fail', private_key=None,
106+
auth_local_webserver=False, table_schema=None):
108107
pandas_gbq = _try_import()
109108
return pandas_gbq.to_gbq(
110109
dataframe, destination_table, project_id, chunksize=chunksize,

0 commit comments

Comments
 (0)