|
20 | 20 | import pandas |
21 | 21 | import pandas.testing |
22 | 22 | import pytest |
| 23 | +import pkg_resources |
23 | 24 |
|
24 | 25 | import google.api_core.exceptions |
25 | 26 | from google.cloud.bigquery_storage import types |
|
28 | 29 |
|
29 | 30 | pyarrow = pytest.importorskip("pyarrow") |
30 | 31 |
|
| 32 | +if pandas is not None: # pragma: NO COVER |
| 33 | + PANDAS_INSTALLED_VERSION = pkg_resources.get_distribution("pandas").parsed_version |
| 34 | +else: # pragma: NO COVER |
| 35 | + PANDAS_INSTALLED_VERSION = pkg_resources.parse_version("0.0.0") |
| 36 | + |
31 | 37 |
|
32 | 38 | # This dictionary is duplicated in bigquery/google/cloud/bigquery/_pandas_helpers.py |
33 | 39 | # When modifying it be sure to update it there as well. |
@@ -172,6 +178,9 @@ def test_to_arrow_w_scalars_arrow(class_under_test, mock_gapic_client): |
172 | 178 | assert actual_table == expected_table |
173 | 179 |
|
174 | 180 |
|
| 181 | +@pytest.mark.skipif( |
| 182 | + PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason="" |
| 183 | +) |
175 | 184 | def test_to_dataframe_w_scalars_arrow(class_under_test, mock_gapic_client): |
176 | 185 | arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) |
177 | 186 | arrow_batches = _bq_to_arrow_batches(SCALAR_BLOCKS, arrow_schema) |
@@ -239,6 +248,9 @@ def test_to_dataframe_w_dtypes_arrow(class_under_test, mock_gapic_client): |
239 | 248 | ) |
240 | 249 |
|
241 | 250 |
|
| 251 | +@pytest.mark.skipif( |
| 252 | + PANDAS_INSTALLED_VERSION >= pkg_resources.parse_version("2.0.0"), reason="" |
| 253 | +) |
242 | 254 | def test_to_dataframe_empty_w_scalars_arrow(class_under_test, mock_gapic_client): |
243 | 255 | arrow_schema = _bq_to_arrow_schema(SCALAR_COLUMNS) |
244 | 256 | read_session = _generate_arrow_read_session(arrow_schema) |
|
0 commit comments