Skip to content
This repository was archived by the owner on Nov 12, 2025. It is now read-only.

Commit bef63fb

Browse files
feat: updates for v1beta2 storage API - Updated comments on BatchCommitWriteStreams - Added new support Bigquery types BIGNUMERIC and INTERVAL to TableSchema - Added read rows schema in ReadRowsResponse - Misc comment updates (#172)
Committer: @yirutang PiperOrigin-RevId: 366811078 Source-Author: Google APIs <noreply@google.com> Source-Date: Mon Apr 5 09:19:17 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: b1614aa0668564ec41d78b72cf776e0292ffc98c Source-Link: googleapis/googleapis@b1614aa
1 parent 97998be commit bef63fb

11 files changed

Lines changed: 139 additions & 71 deletions

File tree

google/cloud/bigquery_storage_v1beta2/proto/arrow.proto

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.

google/cloud/bigquery_storage_v1beta2/proto/avro.proto

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.

google/cloud/bigquery_storage_v1beta2/proto/protobuf.proto

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -23,15 +23,14 @@ option java_multiple_files = true;
2323
option java_outer_classname = "ProtoBufProto";
2424
option java_package = "com.google.cloud.bigquery.storage.v1beta2";
2525

26-
// Protobuf schema is an API presentation the proto buffer schema.
26+
// ProtoSchema describes the schema of the serialized protocol buffer data rows.
2727
message ProtoSchema {
2828
// Descriptor for input message. The descriptor has to be self contained,
2929
// including all the nested types, excepted for proto buffer well known types
3030
// (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf).
3131
google.protobuf.DescriptorProto proto_descriptor = 1;
3232
}
3333

34-
// Protobuf rows.
3534
message ProtoRows {
3635
// A sequence of rows serialized as a Protocol Buffer.
3736
//

google/cloud/bigquery_storage_v1beta2/proto/storage.proto

Lines changed: 64 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -71,8 +71,7 @@ service BigQueryRead {
7171
post: "/v1beta2/{read_session.table=projects/*/datasets/*/tables/*}"
7272
body: "*"
7373
};
74-
option (google.api.method_signature) =
75-
"parent,read_session,max_stream_count";
74+
option (google.api.method_signature) = "parent,read_session,max_stream_count";
7675
}
7776

7877
// Reads rows from the stream in the format prescribed by the ReadSession.
@@ -101,8 +100,7 @@ service BigQueryRead {
101100
// original, primary, and residual, that original[0-j] = primary[0-j] and
102101
// original[j-n] = residual[0-m] once the streams have been read to
103102
// completion.
104-
rpc SplitReadStream(SplitReadStreamRequest)
105-
returns (SplitReadStreamResponse) {
103+
rpc SplitReadStream(SplitReadStreamRequest) returns (SplitReadStreamResponse) {
106104
option (google.api.http) = {
107105
get: "/v1beta2/{name=projects/*/locations/*/sessions/*/streams/*}"
108106
};
@@ -171,8 +169,7 @@ service BigQueryWrite {
171169

172170
// Finalize a write stream so that no new data can be appended to the
173171
// stream. Finalize is not supported on the '_default' stream.
174-
rpc FinalizeWriteStream(FinalizeWriteStreamRequest)
175-
returns (FinalizeWriteStreamResponse) {
172+
rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) {
176173
option (google.api.http) = {
177174
post: "/v1beta2/{name=projects/*/datasets/*/tables/*/streams/*}"
178175
body: "*"
@@ -185,8 +182,7 @@ service BigQueryWrite {
185182
// Streams must be finalized before commit and cannot be committed multiple
186183
// times. Once a stream is committed, data in the stream becomes available
187184
// for read operations.
188-
rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest)
189-
returns (BatchCommitWriteStreamsResponse) {
185+
rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) {
190186
option (google.api.http) = {
191187
get: "/v1beta2/{parent=projects/*/datasets/*/tables/*}"
192188
};
@@ -303,6 +299,19 @@ message ReadRowsResponse {
303299
// Throttling state. If unset, the latest response still describes
304300
// the current throttling status.
305301
ThrottleState throttle_state = 5;
302+
303+
// The schema for the read. If read_options.selected_fields is set, the
304+
// schema may be different from the table schema as it will only contain
305+
// the selected fields. This schema is equivelant to the one returned by
306+
// CreateSession. This field is only populated in the first ReadRowsResponse
307+
// RPC.
308+
oneof schema {
309+
// Output only. Avro schema.
310+
AvroSchema avro_schema = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
311+
312+
// Output only. Arrow schema.
313+
ArrowSchema arrow_schema = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
314+
}
306315
}
307316

308317
// Request message for `SplitReadStream`.
@@ -342,7 +351,9 @@ message CreateWriteStreamRequest {
342351
// of `projects/{project}/datasets/{dataset}/tables/{table}`.
343352
string parent = 1 [
344353
(google.api.field_behavior) = REQUIRED,
345-
(google.api.resource_reference) = { type: "bigquery.googleapis.com/Table" }
354+
(google.api.resource_reference) = {
355+
type: "bigquery.googleapis.com/Table"
356+
}
346357
];
347358

348359
// Required. Stream to be created.
@@ -360,9 +371,9 @@ message AppendRowsRequest {
360371
ProtoRows rows = 2;
361372
}
362373

363-
// Required. The stream that is the target of the append operation. This value
364-
// must be specified for the initial request. If subsequent requests specify
365-
// the stream name, it must equal to the value provided in the first request.
374+
// Required. The stream that is the target of the append operation. This value must be
375+
// specified for the initial request. If subsequent requests specify the
376+
// stream name, it must equal to the value provided in the first request.
366377
// To write to the _default stream, populate this field with a string in the
367378
// format `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
368379
string write_stream = 1 [
@@ -394,7 +405,7 @@ message AppendRowsRequest {
394405

395406
// Response message for `AppendRows`.
396407
message AppendRowsResponse {
397-
// A success append result.
408+
// AppendResult is returned for successful append requests.
398409
message AppendResult {
399410
// The row offset at which the last append occurred. The offset will not be
400411
// set if appending using default streams.
@@ -405,25 +416,32 @@ message AppendRowsResponse {
405416
// Result if the append is successful.
406417
AppendResult append_result = 1;
407418

408-
// Error in case of request failed. If set, it means rows are not accepted
409-
// into the system. Users can retry or continue with other requests within
410-
// the same connection.
411-
// ALREADY_EXISTS: happens when offset is specified, it means the entire
412-
// request is already appended, it is safe to ignore this error.
413-
// OUT_OF_RANGE: happens when offset is specified, it means the specified
414-
// offset is beyond the end of the stream.
415-
// INVALID_ARGUMENT: error caused by malformed request or data.
416-
// RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
417-
// append without offset.
418-
// ABORTED: request processing is aborted because of prior failures, request
419-
// can be retried if previous failure is fixed.
420-
// INTERNAL: server side errors that can be retried.
419+
// Error returned when problems were encountered. If present,
420+
// it indicates rows were not accepted into the system.
421+
// Users can retry or continue with other append requests within the
422+
// same connection.
423+
//
424+
// Additional information about error signalling:
425+
//
426+
// ALREADY_EXISTS: Happens when an append specified an offset, and the
427+
// backend already has received data at this offset. Typically encountered
428+
// in retry scenarios, and can be ignored.
429+
//
430+
// OUT_OF_RANGE: Returned when the specified offset in the stream is beyond
431+
// the current end of the stream.
432+
//
433+
// INVALID_ARGUMENT: Indicates a malformed request or data.
434+
//
435+
// ABORTED: Request processing is aborted because of prior failures. The
436+
// request can be retried if previous failure is addressed.
437+
//
438+
// INTERNAL: Indicates server side error(s) that can be retried.
421439
google.rpc.Status error = 2;
422440
}
423441

424442
// If backend detects a schema update, pass it to user so that user can
425-
// use it to input new type of message. It will be empty when there is no
426-
// schema updates.
443+
// use it to input new type of message. It will be empty when no schema
444+
// updates have occurred.
427445
TableSchema updated_schema = 3;
428446
}
429447

@@ -441,9 +459,11 @@ message GetWriteStreamRequest {
441459

442460
// Request message for `BatchCommitWriteStreams`.
443461
message BatchCommitWriteStreamsRequest {
444-
// Required. Parent table that all the streams should belong to, in the form
445-
// of `projects/{project}/datasets/{dataset}/tables/{table}`.
446-
string parent = 1 [(google.api.field_behavior) = REQUIRED];
462+
// Required. Parent table that all the streams should belong to, in the form of
463+
// `projects/{project}/datasets/{dataset}/tables/{table}`.
464+
string parent = 1 [
465+
(google.api.field_behavior) = REQUIRED
466+
];
447467

448468
// Required. The group of streams that will be committed atomically.
449469
repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED];
@@ -452,11 +472,15 @@ message BatchCommitWriteStreamsRequest {
452472
// Response message for `BatchCommitWriteStreams`.
453473
message BatchCommitWriteStreamsResponse {
454474
// The time at which streams were committed in microseconds granularity.
455-
// This field will only exist when there is no stream errors.
475+
// This field will only exist when there are no stream errors.
476+
// **Note** if this field is not set, it means the commit was not successful.
456477
google.protobuf.Timestamp commit_time = 1;
457478

458479
// Stream level error if commit failed. Only streams with error will be in
459480
// the list.
481+
// If empty, there is no error and all streams are committed successfully.
482+
// If non empty, certain streams have errors and ZERO stream is committed due
483+
// to atomicity guarantee.
460484
repeated StorageError stream_errors = 2;
461485
}
462486

@@ -500,8 +524,9 @@ message FlushRowsResponse {
500524
}
501525

502526
// Structured custom BigQuery Storage error message. The error can be attached
503-
// as error details in the returned rpc Status. User can use the info to process
504-
// errors in a structural way, rather than having to parse error messages.
527+
// as error details in the returned rpc Status. In particular, the use of error
528+
// codes allows more structured error handling, and reduces the need to evaluate
529+
// unstructured error text strings.
505530
message StorageError {
506531
// Error code for `StorageError`.
507532
enum StorageErrorCode {
@@ -522,9 +547,12 @@ message StorageError {
522547
INVALID_STREAM_TYPE = 4;
523548

524549
// Invalid Stream state.
525-
// For example, you try to commit a stream that is not fianlized or is
550+
// For example, you try to commit a stream that is not finalized or is
526551
// garbaged.
527552
INVALID_STREAM_STATE = 5;
553+
554+
// Stream is finalized.
555+
STREAM_FINALIZED = 6;
528556
}
529557

530558
// BigQuery Storage specific error code.

google/cloud/bigquery_storage_v1beta2/proto/stream.proto

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -74,6 +74,8 @@ message ReadSession {
7474
// "nullable_field is not NULL"
7575
// "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))"
7676
// "numeric_field BETWEEN 1.0 AND 5.0"
77+
//
78+
// Restricted to a maximum length for 1 MB.
7779
string row_restriction = 2;
7880

7981
// Optional. Options specific to the Apache Arrow output format.

google/cloud/bigquery_storage_v1beta2/proto/table.proto

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2020 Google LLC
1+
// Copyright 2021 Google LLC
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -70,6 +70,12 @@ message TableFieldSchema {
7070

7171
// Numeric value
7272
NUMERIC = 12;
73+
74+
// BigNumeric value
75+
BIGNUMERIC = 13;
76+
77+
// Interval
78+
INTERVAL = 14;
7379
}
7480

7581
enum Mode {

google/cloud/bigquery_storage_v1beta2/types/protobuf.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@
2828

2929

3030
class ProtoSchema(proto.Message):
31-
r"""Protobuf schema is an API presentation the proto buffer
32-
schema.
31+
r"""ProtoSchema describes the schema of the serialized protocol
32+
buffer data rows.
3333
3434
Attributes:
3535
proto_descriptor (google.protobuf.descriptor_pb2.DescriptorProto):
@@ -47,7 +47,7 @@ class ProtoSchema(proto.Message):
4747

4848

4949
class ProtoRows(proto.Message):
50-
r"""Protobuf rows.
50+
r"""
5151
5252
Attributes:
5353
serialized_rows (Sequence[bytes]):

0 commit comments

Comments
 (0)