4
4
import logging
5
5
from time import sleep
6
6
import uuid
7
+ import pandas .json
7
8
8
9
import numpy as np
9
10
@@ -109,9 +110,10 @@ class TableCreationError(PandasError, ValueError):
109
110
110
111
class GbqConnector (object ):
111
112
112
- def __init__ (self , project_id , reauth = False ):
113
+ def __init__ (self , project_id , key_file , reauth = False ):
113
114
self .test_google_api_imports ()
114
115
self .project_id = project_id
116
+ self .key_file = key_file
115
117
self .reauth = reauth
116
118
self .credentials = self .get_credentials ()
117
119
self .service = self .get_service (self .credentials )
@@ -129,22 +131,15 @@ def test_google_api_imports(self):
129
131
raise ImportError ("Missing module required for Google BigQuery support: {0}" .format (str (e )))
130
132
131
133
def get_credentials (self ):
132
- from oauth2client .client import OAuth2WebServerFlow
133
- from oauth2client .file import Storage
134
- from oauth2client .tools import run_flow , argparser
135
-
136
- _check_google_client_version ()
137
-
138
- flow = OAuth2WebServerFlow (client_id = '495642085510-k0tmvj2m941jhre2nbqka17vqpjfddtd.apps.googleusercontent.com' ,
139
- client_secret = 'kOc9wMptUtxkcIFbtZCcrEAc' ,
140
- scope = 'https://www.googleapis.com/auth/bigquery' ,
141
- redirect_uri = 'urn:ietf:wg:oauth:2.0:oob' )
142
-
143
- storage = Storage ('bigquery_credentials.dat' )
144
- credentials = storage .get ()
134
+ from oauth2client .client import SignedJwtAssertionCredentials
145
135
146
- if credentials is None or credentials .invalid or self .reauth :
147
- credentials = run_flow (flow , storage , argparser .parse_args ([]))
136
+ scope = 'https://www.googleapis.com/auth/bigquery'
137
+ with open (self .key_file ) as key_file :
138
+ key = pandas .json .load (key_file )
139
+ credentials = SignedJwtAssertionCredentials (
140
+ key ['client_email' ],
141
+ key ['private_key' ],
142
+ scope )
148
143
149
144
return credentials
150
145
@@ -185,8 +180,7 @@ def process_insert_errors(insert_errors, verbose):
185
180
for error in errors :
186
181
reason = error ['reason' ]
187
182
message = error ['message' ]
188
- location = error ['location' ]
189
- error_message = 'Error at Row: {0}, Reason: {1}, Location: {2}, Message: {3}' .format (row , reason , location , message )
183
+ error_message = 'Error at Row: {0}, Reason: {1}, Message: {2}' .format (row , reason , message )
190
184
191
185
# Report all error messages if verbose is set
192
186
if verbose :
@@ -386,7 +380,7 @@ def _parse_entry(field_value, field_type):
386
380
return field_value
387
381
388
382
389
- def read_gbq (query , project_id = None , index_col = None , col_order = None , reauth = False , verbose = True ):
383
+ def read_gbq (query , key_file , project_id = None , index_col = None , col_order = None , reauth = False , verbose = True ):
390
384
"""Load data from Google BigQuery.
391
385
392
386
THIS IS AN EXPERIMENTAL LIBRARY
@@ -424,7 +418,7 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None, reauth=Fals
424
418
if not project_id :
425
419
raise TypeError ("Missing required parameter: project_id" )
426
420
427
- connector = GbqConnector (project_id , reauth = reauth )
421
+ connector = GbqConnector (project_id , key_file , reauth = reauth )
428
422
schema , pages = connector .run_query (query , verbose = verbose )
429
423
dataframe_list = []
430
424
while len (pages ) > 0 :
@@ -462,7 +456,7 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None, reauth=Fals
462
456
return final_df
463
457
464
458
465
- def to_gbq (dataframe , destination_table , project_id , chunksize = 10000 ,
459
+ def to_gbq (dataframe , destination_table , project_id , key_file , chunksize = 10000 ,
466
460
verbose = True , reauth = False , if_exists = 'fail' ):
467
461
"""Write a DataFrame to a Google BigQuery table.
468
462
@@ -495,7 +489,7 @@ def to_gbq(dataframe, destination_table, project_id, chunksize=10000,
495
489
if '.' not in destination_table :
496
490
raise NotFoundException ("Invalid Table Name. Should be of the form 'datasetId.tableId' " )
497
491
498
- connector = GbqConnector (project_id , reauth = reauth )
492
+ connector = GbqConnector (project_id , key_file , reauth = reauth )
499
493
dataset_id , table_id = destination_table .rsplit ('.' , 1 )
500
494
501
495
table = _Table (project_id , dataset_id , reauth = reauth )
0 commit comments