Skip to content
This repository was archived by the owner on Mar 13, 2026. It is now read-only.

Commit 1164fcc

Browse files
committed
Merge remote-tracking branch 'origin/main' into tswast-biglake-sample
2 parents 8784664 + 4ea2d57 commit 1164fcc

7 files changed

Lines changed: 20 additions & 17 deletions

File tree

.github/CODEOWNERS

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
66
# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
77

8-
# @googleapis/yoshi-python @googleapis/python-core-client-libraries @googleapis/api-bigquery-dataframe are the default owners for changes in this repo
9-
* @googleapis/yoshi-python @googleapis/python-core-client-libraries @googleapis/api-bigquery-dataframe
8+
# @googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team are the default owners for changes in this repo
9+
* @googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team
1010

11-
# @googleapis/python-samples-reviewers @googleapis/python-core-client-libraries @googleapis/api-bigquery-dataframe are the default owners for samples changes
12-
/samples/ @googleapis/python-samples-reviewers @googleapis/python-core-client-libraries @googleapis/api-bigquery-dataframe
11+
# @googleapis/python-samples-reviewers @googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team are the default owners for samples changes
12+
/samples/ @googleapis/python-samples-reviewers @googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team

.github/blunderbuss.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,17 @@
44
# Note: This file is autogenerated. To make changes to the assignee
55
# team, please update `codeowner_team` in `.repo-metadata.json`.
66
assign_issues:
7-
- googleapis/python-core-client-libraries
8-
- googleapis/api-bigquery-dataframe
7+
- googleapis/cloud-sdk-python-team
8+
- googleapis/bigquery-dataframe-team
99

1010
assign_issues_by:
1111
- labels:
1212
- "samples"
1313
to:
1414
- googleapis/python-samples-reviewers
15-
- googleapis/python-core-client-libraries
16-
- googleapis/api-bigquery-dataframe
15+
- googleapis/cloud-sdk-python-team
16+
- googleapis/bigquery-dataframe-team
1717

1818
assign_prs:
19-
- googleapis/python-core-client-libraries
20-
- googleapis/api-bigquery-dataframe
19+
- googleapis/cloud-sdk-python-team
20+
- googleapis/bigquery-dataframe-team

.repo-metadata.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,5 @@
1111
"distribution_name": "pandas-gbq",
1212
"api_id": "bigquery.googleapis.com",
1313
"default_version": "",
14-
"codeowner_team": "@googleapis/python-core-client-libraries @googleapis/api-bigquery-dataframe"
14+
"codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/bigquery-dataframe-team"
1515
}

docs/index.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@ Note: The canonical version of this documentation can always be found on the
2323
`BigQuery sandbox <https://cloud.google.com/bigquery/docs/sandbox>`__ to
2424
try the service for free.
2525

26-
Also, consider using BigQuery DataFrames
27-
(`bit.ly/bigframes-intro <https://bit.ly/bigframes-intro>`__)
26+
Also, consider using `BigQuery DataFrames
27+
<https://dataframes.bigquery.dev>`__
2828
to process large results with pandas compatible APIs with transparent SQL
2929
pushdown to BigQuery engine. This provides an opportunity to save on costs
3030
and improve performance.

pandas_gbq/core/read.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ def download_results(
146146
num_gib = num_bytes / pandas_gbq.constants.BYTES_IN_GIB
147147
warnings.warn(
148148
f"Recommendation: Your results are {num_gib:.1f} GiB. "
149-
"Consider using BigQuery DataFrames (https://bit.ly/bigframes-intro)"
149+
"Consider using BigQuery DataFrames (https://dataframes.bigquery.dev)"
150150
"to process large results with pandas compatible APIs with transparent SQL "
151151
"pushdown to BigQuery engine. This provides an opportunity to save on costs "
152152
"and improve performance. "

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"setuptools",
2525
"db-dtypes >=1.0.4,<2.0.0",
2626
"numpy >=1.18.1",
27-
"pandas >=1.1.4, <3.0.0",
27+
"pandas >=1.1.4",
2828
"pyarrow >= 4.0.0",
2929
# See https://arrow.apache.org/release/22.0.0.html
3030
"pyarrow >= 22.0.0; python_version >= '3.14'",

tests/system/test_to_gbq.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def test_series_round_trip(
155155
dtype="boolean",
156156
),
157157
"object_col": pandas.Series(
158-
[False, None, True],
158+
[False, pandas.NA, True],
159159
dtype="object",
160160
),
161161
}
@@ -365,7 +365,10 @@ def test_series_round_trip(
365365
# google-cloud-bigquery versions 1.x and 2.x, but not 3.x.
366366
# https://github.com/googleapis/python-bigquery-pandas/issues/365
367367
"datetime_col": [
368-
datetime.datetime(1, 1, 1),
368+
# CSV loader in BigQuery currently requires leading 0s
369+
# for TIMESTAMP but not DATETIME. See internal issue
370+
# b/467399807.
371+
datetime.datetime(1000, 1, 1),
369372
datetime.datetime(1970, 1, 1),
370373
datetime.datetime(9999, 12, 31, 23, 59, 59, 999999),
371374
],

0 commit comments

Comments
 (0)