Skip to content

Commit bee847b

Browse files
committed
DOC: remove dead link to pandas.io.to_gbq
1 parent 0429227 commit bee847b

File tree

2 files changed

+0
-71
lines changed

2 files changed

+0
-71
lines changed

pandas/core/frame.py

-1
Original file line numberDiff line numberDiff line change
@@ -1186,7 +1186,6 @@ def to_gbq(
11861186
See Also
11871187
--------
11881188
pandas_gbq.to_gbq
1189-
pandas.io.to_gbq
11901189
"""
11911190
from pandas.io import gbq
11921191
return gbq.to_gbq(

pandas/io/gbq.py

-70
Original file line numberDiff line numberDiff line change
@@ -104,76 +104,6 @@ def to_gbq(
104104
dataframe, destination_table, project_id, chunksize=10000,
105105
verbose=True, reauth=False, if_exists='fail', private_key=None,
106106
**kwargs):
107-
"""
108-
Write a DataFrame to a Google BigQuery table.
109-
110-
This function requires the `pandas-gbq package
111-
<https://pandas-gbq.readthedocs.io>`__.
112-
113-
Authentication to the Google BigQuery service is via OAuth 2.0.
114-
115-
- If "private_key" is not provided:
116-
117-
By default "application default credentials" are used.
118-
119-
If default application credentials are not found or are restrictive,
120-
user account credentials are used. In this case, you will be asked to
121-
grant permissions for product name 'pandas GBQ'.
122-
123-
- If "private_key" is provided:
124-
125-
Service account credentials will be used to authenticate.
126-
127-
Parameters
128-
----------
129-
dataframe : DataFrame
130-
DataFrame to be written.
131-
destination_table : string
132-
Name of table to be written, in the form 'dataset.tablename'.
133-
project_id : str
134-
Google BigQuery Account project ID.
135-
chunksize : int (default 10000)
136-
Number of rows to be inserted in each chunk from the dataframe.
137-
Set to ``None`` to load the whole dataframe at once.
138-
verbose : boolean (default True)
139-
Show percentage complete.
140-
reauth : boolean (default False)
141-
Force Google BigQuery to reauthenticate the user. This is useful
142-
if multiple accounts are used.
143-
if_exists : {'fail', 'replace', 'append'}, default 'fail'
144-
Behavior when the destination table exists.
145-
'fail': If table exists, do nothing.
146-
'replace': If table exists, drop it, recreate it, and insert data.
147-
'append': If table exists, insert data. Create if does not exist.
148-
private_key : str (optional)
149-
Service account private key in JSON format. Can be file path
150-
or string contents. This is useful for remote server
151-
authentication (eg. Jupyter/IPython notebook on remote host).
152-
kwargs : dict
153-
Arbitrary keyword arguments.
154-
155-
auth_local_webserver (boolean): default False
156-
Use the [local webserver flow] instead of the [console flow] when
157-
getting user credentials.
158-
159-
.. [local webserver flow]
160-
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_local_server
161-
.. [console flow]
162-
http://google-auth-oauthlib.readthedocs.io/en/latest/reference/google_auth_oauthlib.flow.html#google_auth_oauthlib.flow.InstalledAppFlow.run_console
163-
.. versionadded:: pandas-gbq 0.2.0
164-
table_schema (list of dicts):
165-
List of BigQuery table fields to which according DataFrame columns
166-
conform to, e.g. `[{'name': 'col1', 'type': 'STRING'},...]`. If
167-
schema is not provided, it will be generated according to dtypes
168-
of DataFrame columns. See BigQuery API documentation on available
169-
names of a field.
170-
.. versionadded:: pandas-gbq 0.3.1
171-
172-
See Also
173-
--------
174-
pandas_gbq.to_gbq
175-
pandas.DataFrame.to_gbq
176-
"""
177107
pandas_gbq = _try_import()
178108
pandas_gbq.to_gbq(
179109
dataframe, destination_table, project_id, chunksize=chunksize,

0 commit comments

Comments
 (0)