Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

require "bigquery_helper"

describe Google::Cloud::Bigquery::Dataset, :ddl, :bigquery do
describe Google::Cloud::Bigquery::Dataset, :ddl_dml, :bigquery do
let(:dataset_id) { "#{prefix}_dataset" }
let(:dataset) do
d = bigquery.dataset dataset_id
Expand All @@ -25,10 +25,10 @@
end
let(:table_id) { "dataset_ddl_table_#{SecureRandom.hex(16)}" }


it "creates and drops a table with ddl stats" do
create_job = dataset.query_job "CREATE TABLE #{table_id} (x INT64)"
create_job.wait_until_done!
create_job.wont_be :failed?

create_job.statement_type.must_equal "CREATE_TABLE"
create_job.ddl_operation_performed.must_equal "CREATE"
Expand All @@ -39,10 +39,29 @@
table_ref.table_id.must_equal table_id
table_ref.reference?.must_equal true
table_ref.exists?.must_equal true
create_job.num_dml_affected_rows.must_be :nil?

insert_job = dataset.query_job "INSERT #{table_id} (x) VALUES(101),(102)"
insert_job.wait_until_done!
insert_job.wont_be :failed?
insert_job.statement_type.must_equal "INSERT"
insert_job.num_dml_affected_rows.must_equal 2

update_job = dataset.query_job "UPDATE #{table_id} SET x = x + 1 WHERE x IS NOT NULL"
update_job.wait_until_done!
update_job.wont_be :failed?
update_job.statement_type.must_equal "UPDATE"
update_job.num_dml_affected_rows.must_equal 2

delete_job = dataset.query_job "DELETE #{table_id} WHERE x = 103"
delete_job.wait_until_done!
delete_job.wont_be :failed?
delete_job.statement_type.must_equal "DELETE"
delete_job.num_dml_affected_rows.must_equal 1

drop_job = dataset.query_job "DROP TABLE #{table_id}"
drop_job.wait_until_done!

drop_job.wont_be :failed?
drop_job.statement_type.must_equal "DROP_TABLE"
drop_job.ddl_operation_performed.must_equal "DROP"
table_ref_2 = create_job.ddl_target_table
Expand All @@ -52,5 +71,6 @@
table_ref_2.table_id.must_equal table_id
table_ref_2.reference?.must_equal true
table_ref_2.exists?.must_equal false
drop_job.num_dml_affected_rows.must_be :nil?
end
end
32 changes: 31 additions & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb
Original file line number Diff line number Diff line change
Expand Up @@ -674,6 +674,8 @@ def tables token: nil, max: nil
##
# Queries data by creating a [query
# job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs).
# Use this method rather than {#query} for executing DDL/DML statements,
# since this method does not automatically return table data.
#
# Sets the current dataset as the default dataset in the query. Useful
# for using unqualified table names.
Expand Down Expand Up @@ -874,6 +876,32 @@ def tables token: nil, max: nil
# end
# end
#
# @example Execute a DDL statement:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
#
# job = bigquery.query_job "CREATE TABLE my_table (x INT64)"
#
# job.wait_until_done!
# if !job.failed?
# table_ref = job.ddl_target_table
# end
#
# @example Execute a DML statement:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
#
# job = bigquery.query_job "UPDATE my_table " \
# "SET x = x + 1 " \
# "WHERE x IS NOT NULL"
#
# job.wait_until_done!
# if !job.failed?
# puts job.num_dml_affected_rows
# end
#
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
Expand Down Expand Up @@ -930,7 +958,9 @@ def query_job query, params: nil, external: nil,
# Queries data and waits for the results. In this method, a {QueryJob}
# is created and its results are saved to a temporary table, then read
# from the table. Timeouts and transient errors are generally handled
# as needed to complete the query.
# as needed to complete the query. Use {#query_job} rather than this
# method for executing DDL/DML statements, since this method
# automatically returns table data.
#
# Sets the current dataset as the default dataset in the query. Useful
# for using unqualified table names.
Expand Down
35 changes: 34 additions & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/project.rb
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ def service_account_email
##
# Queries data by creating a [query
# job](https://cloud.google.com/bigquery/docs/query-overview#query_jobs).
# Use this method rather than {#query} for executing DDL/DML statements,
# since this method does not automatically return table data.
#
# When using standard SQL and passing arguments using `params`, Ruby
# types are mapped to BigQuery types as follows:
Expand Down Expand Up @@ -302,6 +304,35 @@ def service_account_email
# end
# end
#
# @example Execute a DDL statement:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
#
# job = bigquery.query_job "CREATE TABLE " \
# "`my_dataset.my_table` " \
# "(x INT64)"
#
# job.wait_until_done!
# if !job.failed?
# table_ref = job.ddl_target_table
# end
#
# @example Execute a DML statement:
# require "google/cloud/bigquery"
#
# bigquery = Google::Cloud::Bigquery.new
#
# job = bigquery.query_job "UPDATE " \
# "`my_dataset.my_table` " \
# "SET x = x + 1 " \
# "WHERE x IS NOT NULL"
#
# job.wait_until_done!
# if !job.failed?
# puts job.num_dml_affected_rows
# end
#
# @example Query using external data source, set destination:
# require "google/cloud/bigquery"
#
Expand Down Expand Up @@ -356,7 +387,9 @@ def query_job query, params: nil, external: nil,
# Queries data and waits for the results. In this method, a {QueryJob}
# is created and its results are saved to a temporary table, then read
# from the table. Timeouts and transient errors are generally handled
# as needed to complete the query.
# as needed to complete the query. Use {#query_job} rather than this
# method for executing DDL/DML statements, since this method
# automatically returns table data.
#
# When using standard SQL and passing arguments using `params`, Ruby
# types are mapped to BigQuery types as follows:
Expand Down
12 changes: 12 additions & 0 deletions google-cloud-bigquery/lib/google/cloud/bigquery/query_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,18 @@ def ddl_target_table
Google::Cloud::Bigquery::Table.new_reference_from_gapi table, service
end

##
# The number of rows affected by a DML statement. Present only for DML
# statements `INSERT`, `UPDATE` or `DELETE`. (See {#statement_type}.)
#
# @return [Integer, nil] The number of rows affected by a DML statement,
# or `nil` if the query is not a DML statement.
#
def num_dml_affected_rows
return nil unless @gapi.statistics.query
@gapi.statistics.query.num_dml_affected_rows
end

##
# The table in which the query results are stored.
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@
job.statement_type.must_be :nil?
job.ddl_operation_performed.must_be :nil?
job.ddl_target_table.must_be :nil?
job.num_dml_affected_rows.must_be :nil?
end

it "queries the data with options set" do
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
job.ddl_target_table.project_id.must_equal "target_project_id"
job.ddl_target_table.dataset_id.must_equal "target_dataset_id"
job.ddl_target_table.table_id.must_equal "target_table_id"
job.num_dml_affected_rows.must_equal 50
job.statement_type.must_equal "CREATE_TABLE"
end

Expand Down Expand Up @@ -165,6 +166,7 @@ def statistics_query_gapi
dataset_id: "target_dataset_id",
table_id: "target_table_id"
),
num_dml_affected_rows: 50, # Present only for DML statements INSERT, UPDATE or DELETE.
query_plan: [
Google::Apis::BigqueryV2::ExplainQueryStage.new(
compute_ratio_avg: 1.0,
Expand Down