Skip to content

Commit 9e623fb

Browse files
authored
Reduce verbosity of logging (#201)
* reduce verbosity of logging * changelog
1 parent 2608e1e commit 9e623fb

File tree

2 files changed

+6
-9
lines changed

2 files changed

+6
-9
lines changed

docs/source/changelog.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,8 @@ Changelog
1010
``DataFrame`` construction to the Pandas library, radically reducing
1111
the number of loops that execute in python
1212
(:issue:`128`)
13+
- Reduced verbosity of logging from ``read_gbq``, particularly for short
14+
queries. (:issue:`201`)
1315

1416
.. _changelog-0.6.0:
1517

pandas_gbq/gbq.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -240,12 +240,12 @@ def run_query(self, query, **kwargs):
240240
self._start_timer()
241241

242242
try:
243-
logger.info('Requesting query... ')
243+
logger.debug('Requesting query... ')
244244
query_reply = self.client.query(
245245
query,
246246
job_config=bigquery.QueryJobConfig.from_api_repr(job_config),
247247
location=self.location)
248-
logger.info('ok.\nQuery running...')
248+
logger.info('Query running...')
249249
except (RefreshError, ValueError):
250250
if self.private_key:
251251
raise AccessDenied(
@@ -258,7 +258,7 @@ def run_query(self, query, **kwargs):
258258
self.process_http_error(ex)
259259

260260
job_id = query_reply.job_id
261-
logger.info('Job ID: %s\nQuery running...' % job_id)
261+
logger.debug('Job ID: %s' % job_id)
262262

263263
while query_reply.state != 'DONE':
264264
self.log_elapsed_seconds(' Elapsed', 's. Waiting...')
@@ -303,8 +303,7 @@ def run_query(self, query, **kwargs):
303303
for field in rows_iter.schema],
304304
}
305305

306-
# log basic query stats
307-
logger.info('Got {} rows.\n'.format(total_rows))
306+
logger.debug('Got {} rows.\n'.format(total_rows))
308307

309308
return schema, result_rows
310309

@@ -314,7 +313,6 @@ def load_data(
314313
from pandas_gbq import load
315314

316315
total_rows = len(dataframe)
317-
logger.info("\n\n")
318316

319317
try:
320318
chunks = load.load_chunks(self.client, dataframe, dataset_id,
@@ -328,8 +326,6 @@ def load_data(
328326
except self.http_error as ex:
329327
self.process_http_error(ex)
330328

331-
logger.info("\n")
332-
333329
def schema(self, dataset_id, table_id):
334330
"""Retrieve the schema of the table
335331
@@ -611,7 +607,6 @@ def read_gbq(query, project_id=None, index_col=None, col_order=None,
611607
connector.log_elapsed_seconds(
612608
'Total time taken',
613609
datetime.now().strftime('s.\nFinished at %Y-%m-%d %H:%M:%S.'),
614-
0
615610
)
616611

617612
return final_df

0 commit comments

Comments
 (0)