From 33bb8726b3bdeda97bfda78229ca21b532289789 Mon Sep 17 00:00:00 2001 From: Ryo Kitagawa Date: Sun, 19 Jan 2025 16:17:04 +0900 Subject: [PATCH 1/8] fix: convert to datetime64[us] when TIMESTAMP or DATETIME is empty --- pandas_gbq/gbq.py | 9 +++++++++ tests/unit/test_gbq.py | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/pandas_gbq/gbq.py b/pandas_gbq/gbq.py index feffd858..893792f6 100644 --- a/pandas_gbq/gbq.py +++ b/pandas_gbq/gbq.py @@ -630,6 +630,7 @@ def _finalize_dtypes( """ import db_dtypes import pandas.api.types + import pandas # If you update this mapping, also update the table at # `docs/reading.rst`. @@ -638,6 +639,14 @@ def _finalize_dtypes( "DATETIME": "datetime64[ns]", "TIMESTAMP": "datetime64[ns]", } + if pandas.__version__ > "2.0.0": + # when pandas is 2.0.0 or later, default timestamp dtype is 'datetime64[us]' + # and we should use 'datetime64[us]' instead of 'datetime64[ns]' + dtype_map = { + "DATE": db_dtypes.DateDtype(), + "DATETIME": "datetime64[us]", + "TIMESTAMP": pandas.DatetimeTZDtype(unit="us", tz="UTC"), + } for field in schema_fields: # This method doesn't modify ARRAY/REPEATED columns. diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index 75574820..b0c6c0bf 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -113,6 +113,39 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): assert result == {"x": expected} +@pytest.mark.parametrize( + ("data", "schema_type", "expected"), + [ + ( + pandas.to_datetime(["2017-01-01T12:00:00Z"]).astype(pandas.DatetimeTZDtype(unit="us", tz="UTC")), + "TIMESTAMP", + pandas.DatetimeTZDtype(unit="us", tz="UTC"), + ), + ( + pandas.to_datetime([]).astype(object), + "TIMESTAMP", + pandas.DatetimeTZDtype(unit="us", tz="UTC"), + ), + ( + pandas.to_datetime(["2017-01-01T12:00:00"]).astype("datetime64[us]"), + "DATETIME", + numpy.dtype("datetime64[us]"), + ), + ( + pandas.to_datetime([]).astype(object), + "DATETIME", + numpy.dtype("datetime64[us]"), + ), + ], +) +def test__finalize_dtypes(data, schema_type, expected): + result = gbq._finalize_dtypes( + pandas.DataFrame(dict(x=data)), + [dict(name="x", type=schema_type, mode="NULLABLE")], + ) + assert result["x"].dtype == expected + + @pytest.mark.parametrize( ["query_or_table", "expected"], [ From 99b011326ccdfb9c74386ace0d922ac3d65266a2 Mon Sep 17 00:00:00 2001 From: Ryo Kitagawa Date: Wed, 29 Jan 2025 22:01:42 +0900 Subject: [PATCH 2/8] doc: update datetime64[ns] to datetime64[us] for pandas version >= 2.0.0 --- docs/reading.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/reading.rst b/docs/reading.rst index 5fa369a7..0b4a6ac4 100644 --- a/docs/reading.rst +++ b/docs/reading.rst @@ -56,17 +56,17 @@ Inferring the DataFrame's dtypes The :func:`~pandas_gbq.read_gbq` method infers the pandas dtype for each column, based on the BigQuery table schema. -================== ========================= +================== ============================================ BigQuery Data Type dtype -================== ========================= +================== ============================================ BOOL boolean INT64 Int64 FLOAT64 float64 TIME dbtime DATE dbdate or object -DATETIME datetime64[ns] or object -TIMESTAMP datetime64[ns, UTC] or object -================== ========================= +DATETIME datetime64[ns] (datetime64[us] if pandas version >= 2.0.0) or object +TIMESTAMP datetime64[ns, UTC] (datetime64[us, UTC] if pandas version >= 2.0.0) or object +================== ============================================ If any DATE/DATETIME/TIMESTAMP value is outside of the range of `pandas.Timestamp.min `__ From 7698f5a9b2767e56b8079a75cb3fee8cd347befd Mon Sep 17 00:00:00 2001 From: Ryo Kitagawa Date: Thu, 30 Jan 2025 17:08:33 +0900 Subject: [PATCH 3/8] fix: skip test__bqschema_to_nullsafe_dtypes for pandas < 2.0.0 --- tests/unit/test_gbq.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index b0c6c0bf..51776534 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -113,11 +113,16 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): assert result == {"x": expected} +@pytest.mark.skipif( + pandas.__version__ < "2.0.0", reason="requires pandas 2.0.0 or higher" +) @pytest.mark.parametrize( ("data", "schema_type", "expected"), [ ( - pandas.to_datetime(["2017-01-01T12:00:00Z"]).astype(pandas.DatetimeTZDtype(unit="us", tz="UTC")), + pandas.to_datetime(["2017-01-01T12:00:00Z"]).astype( + pandas.DatetimeTZDtype(unit="us", tz="UTC") + ), "TIMESTAMP", pandas.DatetimeTZDtype(unit="us", tz="UTC"), ), From 10448874b79ae71c710ff2d00ad564a84af10145 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Wed, 5 Feb 2025 15:58:22 -0600 Subject: [PATCH 4/8] Apply suggestions from code review --- docs/reading.rst | 4 ++-- pandas_gbq/gbq.py | 4 ++-- tests/unit/test_gbq.py | 3 ++- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/reading.rst b/docs/reading.rst index 0b4a6ac4..d48504f8 100644 --- a/docs/reading.rst +++ b/docs/reading.rst @@ -64,8 +64,8 @@ INT64 Int64 FLOAT64 float64 TIME dbtime DATE dbdate or object -DATETIME datetime64[ns] (datetime64[us] if pandas version >= 2.0.0) or object -TIMESTAMP datetime64[ns, UTC] (datetime64[us, UTC] if pandas version >= 2.0.0) or object +DATETIME datetime64[ns] (datetime64[us] if pandas version >= 2.1.0) or object +TIMESTAMP datetime64[ns, UTC] (datetime64[us, UTC] if pandas version >= 2.1.0) or object ================== ============================================ If any DATE/DATETIME/TIMESTAMP value is outside of the range of `pandas.Timestamp.min diff --git a/pandas_gbq/gbq.py b/pandas_gbq/gbq.py index 893792f6..061eacc6 100644 --- a/pandas_gbq/gbq.py +++ b/pandas_gbq/gbq.py @@ -639,8 +639,8 @@ def _finalize_dtypes( "DATETIME": "datetime64[ns]", "TIMESTAMP": "datetime64[ns]", } - if pandas.__version__ > "2.0.0": - # when pandas is 2.0.0 or later, default timestamp dtype is 'datetime64[us]' + if tuple(int(part) for part in pandas.__version__.split()[:2]) >= (2, 1): + # when pandas is 2.1.0 or later, default timestamp dtype is 'datetime64[us]' # and we should use 'datetime64[us]' instead of 'datetime64[ns]' dtype_map = { "DATE": db_dtypes.DateDtype(), diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index 51776534..6b7dc11b 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -114,7 +114,8 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): @pytest.mark.skipif( - pandas.__version__ < "2.0.0", reason="requires pandas 2.0.0 or higher" + tuple(int(part) for part in pandas.__version__.split()[:2]) < (2, 1), + reason="requires pandas 2.1.0 or higher" ) @pytest.mark.parametrize( ("data", "schema_type", "expected"), From 46dd0c8a5e0efcd9384381513fd5517075dc24a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Wed, 5 Feb 2025 16:01:50 -0600 Subject: [PATCH 5/8] Apply suggestions from code review --- pandas_gbq/gbq.py | 2 +- tests/unit/test_gbq.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pandas_gbq/gbq.py b/pandas_gbq/gbq.py index 061eacc6..fc3ac577 100644 --- a/pandas_gbq/gbq.py +++ b/pandas_gbq/gbq.py @@ -639,7 +639,7 @@ def _finalize_dtypes( "DATETIME": "datetime64[ns]", "TIMESTAMP": "datetime64[ns]", } - if tuple(int(part) for part in pandas.__version__.split()[:2]) >= (2, 1): + if tuple(int(part) for part in pandas.__version__.split(".")[:2]) >= (2, 1): # when pandas is 2.1.0 or later, default timestamp dtype is 'datetime64[us]' # and we should use 'datetime64[us]' instead of 'datetime64[ns]' dtype_map = { diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index 6b7dc11b..d2f71a95 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -114,7 +114,7 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): @pytest.mark.skipif( - tuple(int(part) for part in pandas.__version__.split()[:2]) < (2, 1), + tuple(int(part) for part in pandas.__version__.split(".")[:2]) < (2, 1), reason="requires pandas 2.1.0 or higher" ) @pytest.mark.parametrize( From 5f7e35a0047bad2a43594e81fe093ffb518ed978 Mon Sep 17 00:00:00 2001 From: Tim Swena Date: Wed, 5 Feb 2025 16:29:31 -0600 Subject: [PATCH 6/8] test on various pandas versions --- testing/constraints-3.10.txt | 2 ++ testing/constraints-3.11.txt | 1 + testing/constraints-3.9.txt | 4 +-- tests/unit/test_gbq.py | 54 ++++++++++++++++++++++++++++-------- 4 files changed, 48 insertions(+), 13 deletions(-) diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29b..a3deb24e 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,2 @@ +numpy==1.26.4 +pandas==2.0.3 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29b..10185663 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1 @@ +pandas==2.1.4 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index 76864a66..47012bef 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -1,2 +1,2 @@ -numpy==1.19.4 -pandas==1.1.4 +numpy==1.20.3 +pandas==1.5.3 diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index d2f71a95..bd2dc94a 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -28,6 +28,9 @@ pytestmark = pytest.mark.filterwarnings("ignore:credentials from Google Cloud SDK") +PANDAS_VERSION = tuple(int(part) for part in pandas.__version__.split(".")[:2]) + + def _make_connector(project_id="some-project", **kwargs): return gbq.GbqConnector(project_id, **kwargs) @@ -113,34 +116,63 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): assert result == {"x": expected} -@pytest.mark.skipif( - tuple(int(part) for part in pandas.__version__.split(".")[:2]) < (2, 1), - reason="requires pandas 2.1.0 or higher" -) @pytest.mark.parametrize( ("data", "schema_type", "expected"), [ - ( + pytest.param( pandas.to_datetime(["2017-01-01T12:00:00Z"]).astype( - pandas.DatetimeTZDtype(unit="us", tz="UTC") + pandas.DatetimeTZDtype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + tz="UTC", + ), ), "TIMESTAMP", - pandas.DatetimeTZDtype(unit="us", tz="UTC"), + pandas.DatetimeTZDtype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + tz="UTC", + ), ), ( pandas.to_datetime([]).astype(object), "TIMESTAMP", - pandas.DatetimeTZDtype(unit="us", tz="UTC"), + pandas.DatetimeTZDtype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + tz="UTC", + ), ), ( - pandas.to_datetime(["2017-01-01T12:00:00"]).astype("datetime64[us]"), + pandas.to_datetime(["2017-01-01T12:00:00"]).astype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + "datetime64[us]" + if PANDAS_VERSION >= (2, 1) + else "datetime64[ns]", + ), "DATETIME", - numpy.dtype("datetime64[us]"), + numpy.dtype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + "datetime64[us]" + if PANDAS_VERSION >= (2, 1) + else "datetime64[ns]", + ), ), ( pandas.to_datetime([]).astype(object), "DATETIME", - numpy.dtype("datetime64[us]"), + numpy.dtype( + # Microseconds aren't supported until newer pandas. + # https://github.com/googleapis/python-bigquery-pandas/issues/852 + "datetime64[us]" + if PANDAS_VERSION >= (2, 1) + else "datetime64[ns]", + ), ), ], ) From a1fdeb17d89cd3badda955783918dc5d1540094f Mon Sep 17 00:00:00 2001 From: Tim Swena Date: Thu, 6 Feb 2025 12:36:36 -0600 Subject: [PATCH 7/8] use FEATURES --- pandas_gbq/features.py | 8 ++++++ pandas_gbq/gbq.py | 2 +- pandas_gbq/load/core.py | 1 + tests/system/test_read_gbq.py | 11 +++++--- tests/system/test_to_gbq.py | 49 +++++++++++++++++++++-------------- tests/unit/test_gbq.py | 27 +++++-------------- 6 files changed, 54 insertions(+), 44 deletions(-) diff --git a/pandas_gbq/features.py b/pandas_gbq/features.py index 62405b4c..b9358f7b 100644 --- a/pandas_gbq/features.py +++ b/pandas_gbq/features.py @@ -9,6 +9,7 @@ BIGQUERY_QUERY_AND_WAIT_VERSION = "3.14.0" PANDAS_VERBOSITY_DEPRECATION_VERSION = "0.23.0" PANDAS_BOOLEAN_DTYPE_VERSION = "1.0.0" +PANDAS_MICROSECTONDS_DATETIME_VERSION = "2.1.0" class Features: @@ -81,5 +82,12 @@ def pandas_has_boolean_dtype(self): desired_version = packaging.version.parse(PANDAS_BOOLEAN_DTYPE_VERSION) return self.pandas_installed_version >= desired_version + @property + def pandas_has_microseconds_datetime(self): + import packaging.version + + desired_version = packaging.version.parse(PANDAS_MICROSECTONDS_DATETIME_VERSION) + return self.pandas_installed_version >= desired_version + FEATURES = Features() diff --git a/pandas_gbq/gbq.py b/pandas_gbq/gbq.py index fc3ac577..f4373252 100644 --- a/pandas_gbq/gbq.py +++ b/pandas_gbq/gbq.py @@ -639,7 +639,7 @@ def _finalize_dtypes( "DATETIME": "datetime64[ns]", "TIMESTAMP": "datetime64[ns]", } - if tuple(int(part) for part in pandas.__version__.split(".")[:2]) >= (2, 1): + if FEATURES.pandas_has_microseconds_datetime: # when pandas is 2.1.0 or later, default timestamp dtype is 'datetime64[us]' # and we should use 'datetime64[us]' instead of 'datetime64[ns]' dtype_map = { diff --git a/pandas_gbq/load/core.py b/pandas_gbq/load/core.py index d98f8306..bc476d23 100644 --- a/pandas_gbq/load/core.py +++ b/pandas_gbq/load/core.py @@ -195,6 +195,7 @@ def load_csv_from_dataframe( bq_schema = pandas_gbq.schema.to_google_cloud_bigquery(schema) def load_chunk(chunk, job_config): + breakpoint() client.load_table_from_dataframe( chunk, destination_table_ref, diff --git a/tests/system/test_read_gbq.py b/tests/system/test_read_gbq.py index 4ae96a36..06692c2d 100644 --- a/tests/system/test_read_gbq.py +++ b/tests/system/test_read_gbq.py @@ -16,6 +16,7 @@ from pandas_gbq.features import FEATURES + QueryTestCase = collections.namedtuple( "QueryTestCase", ["query", "expected", "use_bqstorage_apis"], @@ -628,7 +629,9 @@ def test_empty_dataframe(read_gbq, use_bqstorage_api): ), "datetime_col": pandas.Series( [], - dtype="datetime64[ns]", + dtype="datetime64[us]" + if FEATURES.pandas_has_microseconds_datetime + else "datetime64[ns]", ), "float_col": pandas.Series([], dtype="float64"), "int64_col": pandas.Series([], dtype="Int64"), @@ -640,8 +643,10 @@ def test_empty_dataframe(read_gbq, use_bqstorage_api): ), "timestamp_col": pandas.Series( [], - dtype="datetime64[ns]", - ).dt.tz_localize(datetime.timezone.utc), + dtype=pandas.DatetimeTZDtype(unit="us", tz="UTC") + if FEATURES.pandas_has_microseconds_datetime + else pandas.DatetimeTZDtype(tz="UTC"), + ), } ) result = read_gbq(query, use_bqstorage_api=use_bqstorage_api) diff --git a/tests/system/test_to_gbq.py b/tests/system/test_to_gbq.py index 139f072b..17d8bb13 100644 --- a/tests/system/test_to_gbq.py +++ b/tests/system/test_to_gbq.py @@ -16,6 +16,9 @@ pytest.importorskip("google.cloud.bigquery", minversion="1.24.0") +PANDAS_VERSION = tuple(int(part) for part in pandas.__version__.split(".")[:2]) + + @pytest.fixture(params=["load_parquet", "load_csv"]) def api_method(request): return request.param @@ -343,25 +346,33 @@ def test_series_round_trip( # require `date_as_object` parameter in # google-cloud-bigquery versions 1.x and 2.x, but not 3.x. # https://github.com/googleapis/python-bigquery-pandas/issues/365 - "datetime_col": [ - datetime.datetime(1, 1, 1), - datetime.datetime(1970, 1, 1), - datetime.datetime(9999, 12, 31, 23, 59, 59, 999999), - ], - "timestamp_col": [ - datetime.datetime(1, 1, 1, tzinfo=datetime.timezone.utc), - datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), - datetime.datetime( - 9999, - 12, - 31, - 23, - 59, - 59, - 999999, - tzinfo=datetime.timezone.utc, - ), - ], + "datetime_col": pandas.Series( + [ + datetime.datetime(1, 1, 1), + datetime.datetime(1970, 1, 1), + datetime.datetime(9999, 12, 31, 23, 59, 59, 999999), + ], + dtype="object" if PANDAS_VERSION < (2, 1) else "datetime64[us]", + ), + "timestamp_col": pandas.Series( + [ + datetime.datetime(1, 1, 1, tzinfo=datetime.timezone.utc), + datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc), + datetime.datetime( + 9999, + 12, + 31, + 23, + 59, + 59, + 999999, + tzinfo=datetime.timezone.utc, + ), + ], + dtype="object" + if PANDAS_VERSION < (2, 1) + else pandas.DatetimeTZDtype(unit="us", tz="UTC"), + ), }, columns=["row_num", "date_col", "datetime_col", "timestamp_col"], ), diff --git a/tests/unit/test_gbq.py b/tests/unit/test_gbq.py index bd2dc94a..c49f2157 100644 --- a/tests/unit/test_gbq.py +++ b/tests/unit/test_gbq.py @@ -28,9 +28,6 @@ pytestmark = pytest.mark.filterwarnings("ignore:credentials from Google Cloud SDK") -PANDAS_VERSION = tuple(int(part) for part in pandas.__version__.split(".")[:2]) - - def _make_connector(project_id="some-project", **kwargs): return gbq.GbqConnector(project_id, **kwargs) @@ -122,17 +119,13 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): pytest.param( pandas.to_datetime(["2017-01-01T12:00:00Z"]).astype( pandas.DatetimeTZDtype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 - unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + unit="us" if FEATURES.pandas_has_microseconds_datetime else "ns", tz="UTC", ), ), "TIMESTAMP", pandas.DatetimeTZDtype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 - unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + unit="us" if FEATURES.pandas_has_microseconds_datetime else "ns", tz="UTC", ), ), @@ -140,26 +133,20 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): pandas.to_datetime([]).astype(object), "TIMESTAMP", pandas.DatetimeTZDtype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 - unit="us" if PANDAS_VERSION >= (2, 1) else "ns", + unit="us" if FEATURES.pandas_has_microseconds_datetime else "ns", tz="UTC", ), ), ( pandas.to_datetime(["2017-01-01T12:00:00"]).astype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 "datetime64[us]" - if PANDAS_VERSION >= (2, 1) + if FEATURES.pandas_has_microseconds_datetime else "datetime64[ns]", ), "DATETIME", numpy.dtype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 "datetime64[us]" - if PANDAS_VERSION >= (2, 1) + if FEATURES.pandas_has_microseconds_datetime else "datetime64[ns]", ), ), @@ -167,10 +154,8 @@ def test__bqschema_to_nullsafe_dtypes(type_, expected): pandas.to_datetime([]).astype(object), "DATETIME", numpy.dtype( - # Microseconds aren't supported until newer pandas. - # https://github.com/googleapis/python-bigquery-pandas/issues/852 "datetime64[us]" - if PANDAS_VERSION >= (2, 1) + if FEATURES.pandas_has_microseconds_datetime else "datetime64[ns]", ), ), From 9b914ab02640de775270801f230f4a1a6f5ad092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Thu, 6 Feb 2025 13:19:05 -0600 Subject: [PATCH 8/8] Update pandas_gbq/load/core.py --- pandas_gbq/load/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pandas_gbq/load/core.py b/pandas_gbq/load/core.py index bc476d23..d98f8306 100644 --- a/pandas_gbq/load/core.py +++ b/pandas_gbq/load/core.py @@ -195,7 +195,6 @@ def load_csv_from_dataframe( bq_schema = pandas_gbq.schema.to_google_cloud_bigquery(schema) def load_chunk(chunk, job_config): - breakpoint() client.load_table_from_dataframe( chunk, destination_table_ref,