Skip to content

Commit

Permalink
Add Routine unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
quartzmo committed Jan 23, 2020
1 parent 7b4fb51 commit f3ff1f9
Show file tree
Hide file tree
Showing 3 changed files with 259 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

require "helper"

describe Google::Cloud::Bigquery::Dataset, :routine, :mock_bigquery do
let(:dataset_hash) { random_dataset_hash }
let(:dataset_gapi) { Google::Apis::BigqueryV2::Dataset.from_json dataset_hash.to_json }
let(:dataset) { Google::Cloud::Bigquery::Dataset.from_gapi dataset_gapi, bigquery.service }
focus
it "finds a routine" do
found_routine_id = "found_routine"

mock = Minitest::Mock.new
mock.expect :get_routine, random_routine_full_hash(dataset.dataset_id, found_routine_id).to_json, [project, dataset.dataset_id, found_routine_id, options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

routine = dataset.routine found_routine_id

mock.verify

routine.must_be_kind_of Google::Cloud::Bigquery::Routine
routine.routine_id.must_equal found_routine_id
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

require "helper"

describe Google::Cloud::Bigquery::Dataset, :models, :mock_bigquery do
let(:dataset_hash) { random_dataset_hash }
let(:dataset_gapi) { Google::Apis::BigqueryV2::Dataset.from_json dataset_hash.to_json }
let(:dataset) { Google::Cloud::Bigquery::Dataset.from_gapi dataset_gapi, bigquery.service }

it "lists models" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models

mock.verify

models.size.must_equal 3
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
end

it "lists models with max set" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: 3, page_token: nil, options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models max: 3

mock.verify

models.count.must_equal 3
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
models.token.wont_be :nil?
models.token.must_equal "next_page_token"
end

it "paginates models" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 2, nil),
[project, dataset.dataset_id, max_results: nil, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

first_models = dataset.models
second_models = dataset.models token: first_models.token

mock.verify

first_models.count.must_equal 3
first_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
first_models.token.wont_be :nil?
first_models.token.must_equal "next_page_token"

second_models.count.must_equal 2
second_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
second_models.token.must_be :nil?
end

it "paginates models with next? and next" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 2, nil),
[project, dataset.dataset_id, max_results: nil, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

first_models = dataset.models
second_models = first_models.next

mock.verify

first_models.count.must_equal 3
first_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
first_models.token.wont_be :nil?
first_models.token.must_equal "next_page_token"

second_models.count.must_equal 2
second_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
second_models.token.must_be :nil?
end

it "paginates models with next? and next and max" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: 3, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 2, nil),
[project, dataset.dataset_id, max_results: 3, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

first_models = dataset.models max: 3
second_models = first_models.next

mock.verify

first_models.count.must_equal 3
first_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
first_models.next?.must_equal true

second_models.count.must_equal 2
second_models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
second_models.next?.must_equal false
end

it "paginates models with all" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 2, nil),
[project, dataset.dataset_id, max_results: nil, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models.all.to_a

mock.verify

models.count.must_equal 5
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
end

it "paginates models with all and max" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: 3, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 2, nil),
[project, dataset.dataset_id, max_results: 3, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models(max: 3).all.to_a

mock.verify

models.count.must_equal 5
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
end

it "iterates models with all using Enumerator" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "second_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models.all.take(5)

mock.verify

models.count.must_equal 5
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
end

it "iterates models with all with request_limit set" do
mock = Minitest::Mock.new
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "next_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: nil, options: { skip_deserialization: true }]
mock.expect :list_models, list_models_gapi_json(dataset.dataset_id, 3, "second_page_token"),
[project, dataset.dataset_id, max_results: nil, page_token: "next_page_token", options: { skip_deserialization: true }]
dataset.service.mocked_service = mock

models = dataset.models.all(request_limit: 1).to_a

mock.verify

models.count.must_equal 6
models.each { |ds| ds.must_be_kind_of Google::Cloud::Bigquery::Model }
end
end
40 changes: 40 additions & 0 deletions google-cloud-bigquery/test/helper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,46 @@ def list_models_gapi_json dataset_id, count = 2, token = nil
hash.to_json
end


def random_routine_gapi dataset, id = nil, name = nil, description = nil, project_id = nil
json = random_routine_hash(dataset, id, name, description, project_id).to_json
Google::Apis::BigqueryV2::Table.from_json json
end

def random_routine_hash dataset, id = nil, name = nil, description = nil, project_id = nil
id ||= "my_table"
name ||= "Table Name"

{
"kind" => "bigquery#table",
"etag" => "etag123456789",
"id" => "#{project}:#{dataset}.#{id}",
"selfLink" => "http://googleapi/bigquery/v2/projects/#{project}/datasets/#{dataset}/tables/#{id}",
"tableReference" => {
"projectId" => (project_id || project),
"datasetId" => dataset,
"tableId" => id
},
"friendlyName" => name,
"description" => description,
"schema" => random_schema_hash,
"numBytes" => "1000", # String per google/google-api-ruby-client#439
"numRows" => "100", # String per google/google-api-ruby-client#439
"creationTime" => time_millis,
"expirationTime" => time_millis,
"lastModifiedTime" => time_millis,
"type" => "TABLE",
"location" => "US",
"labels" => { "foo" => "bar" },
"streamingBuffer" => {
"estimatedBytes" => "2000", # String per google/google-api-ruby-client
"estimatedRows" => "200", # String per google/google-api-ruby-client
"oldestEntryTime" => time_millis
},
"requirePartitionFilter" => true
}
end

def random_job_hash id = "job_9876543210", state = "running", location: "US"
hash = {
"kind" => "bigquery#job",
Expand Down

0 comments on commit f3ff1f9

Please sign in to comment.