Skip to content

Commit

Permalink
Add transfer configs
Browse files Browse the repository at this point in the history
  • Loading branch information
Ty Larrabee committed Jul 24, 2019
1 parent 23c4592 commit a66050d
Show file tree
Hide file tree
Showing 5 changed files with 217 additions and 1 deletion.
98 changes: 98 additions & 0 deletions products/bigquerydatatransfer/api.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Api::Product
name: BigqueryDataTransfer
display_name: BigQueryDataTransfer
versions:
- !ruby/object:Api::Product::Version
name: ga
base_url: https://bigquerydatatransfer.googleapis.com/v1/
scopes:
- https://www.googleapis.com/auth/bigquery
apis_required:
- !ruby/object:Api::Product::ApiReference
name: BigQueryDataTransfer API
url: https://console.cloud.google.com/apis/api/bigquerydatatransfer.googleapis.com/
objects:
- !ruby/object:Api::Resource
name: 'Config'
base_url: projects/{{project}}/locations/{{location}}/transferConfigs
self_link: "{{name}}"
update_verb: :PATCH
update_mask: true
description: |
Represents a data transfer configuration. A transfer configuration
contains all metadata needed to perform a data transfer.
parameters:
- !ruby/object:Api::Type::String
name: 'location'
description: |
The geographic location where the transfer config should reside.
Examples: US, EU, asia-northeast1. The default value is US.
url_param_only: true
input: true
default_value: US
properties:
- !ruby/object:Api::Type::String
name: 'displayName'
description: |
The user specified display name for the transfer config.
input: true
required: true
- !ruby/object:Api::Type::String
name: 'name'
description: |
The resource name of the transfer config. Transfer config names have the
form projects/{projectId}/locations/{location}/transferConfigs/{configId}.
Where configId is usually a uuid, but this is not required.
The name is ignored when creating a transfer config.
- !ruby/object:Api::Type::String
name: 'destinationDatasetId'
description: |
The BigQuery target dataset id.
required: true
- !ruby/object:Api::Type::String
name: 'dataSourceId'
description: |
The data source id. Cannot be changed once the transfer config is created.
input: true
required: true
- !ruby/object:Api::Type::String
name: 'schedule'
description: |
Data transfer schedule. If the data source does not support a custom
schedule, this should be empty. If it is empty, the default value for
the data source will be used. The specified times are in UTC. Examples
of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
jun 13:15, and first sunday of quarter 00:00. See more explanation
about the format here:
https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
NOTE: the granularity should be at least 8 hours, or less frequent.
- !ruby/object:Api::Type::Integer
name: 'dataRefreshWindowDays'
description: |
The number of days to look back to automatically refresh the data.
For example, if dataRefreshWindowDays = 10, then every day BigQuery
reingests data for [today-10, today-1], rather than ingesting data for
just [today-1]. Only valid if the data source supports the feature.
Set the value to 0 to use the default value.
- !ruby/object:Api::Type::Boolean
name: 'disabled'
description: |
When set to true, no runs are scheduled for a given transfer.
- !ruby/object:Api::Type::KeyValuePairs
name: 'params'
description: |
These parameters are specific to each data source.
required: true
39 changes: 39 additions & 0 deletions products/bigquerydatatransfer/terraform.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Provider::Terraform::Config
overrides: !ruby/object:Overrides::ResourceOverrides
Config: !ruby/object:Overrides::Terraform::ResourceOverride
import_format: ["{{name}}"]
id_format: "{{name}}"
custom_code: !ruby/object:Provider::Terraform::CustomCode
custom_import: templates/terraform/custom_import/self_link_as_name.erb
post_create: templates/terraform/post_create/set_computed_name.erb
properties:
name: !ruby/object:Overrides::Terraform::PropertyOverride
output: true
location: !ruby/object:Overrides::Terraform::PropertyOverride
ignore_read: true
examples:
- !ruby/object:Provider::Terraform::Examples
name: "scheduled_query"
primary_resource_id: "query_config"
vars:
display_name: "my-query"
dataset_id: "my_dataset"
# This is for copying files over
files: !ruby/object:Provider::Config::Files
# These files have templating (ERB) code that will be run.
# This is usually to add licensing info, autogeneration notices, etc.
compile:
<%= lines(indent(compile('provider/terraform/product~compile.yaml'), 4)) -%>
19 changes: 19 additions & 0 deletions templates/terraform/examples/scheduled_query.tf.erb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
resource "google_bigquery_data_transfer_config" "<%= ctx[:primary_resource_id] %>" {
display_name = "<%= ctx[:vars]['display_name'] %>"
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "first sunday of quarter 00:00"
destination_dataset_id = "${google_bigquery_dataset.my-dataset.dataset_id}"
params = {
destination_table_name_template = "my-table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM tabl WHERE x = 'y'"
}
}

resource "google_bigquery_dataset" "my-dataset" {
dataset_id = "<%= ctx[:vars]['dataset_id'].delete("-") %>"
friendly_name = "foo"
description = "bar"
location = "asia-northeast1"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package google

import (
"fmt"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)

func TestAccBigqueryDataTransferConfig_scheduledQueryUpdate(t *testing.T) {
t.Parallel()

random_suffix := acctest.RandString(10)

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy,
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, "first", "y"),
},
{
Config: testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, "second", "z"),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, schedule, letter string) string {
return fmt.Sprintf(`
resource "google_bigquery_data_transfer_config" "query_config" {
display_name = "my-query-%s"
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "%s sunday of quarter 00:00"
destination_dataset_id = "${google_bigquery_dataset.my-dataset.dataset_id}"
params = {
destination_table_name_template = "my-table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM tabl WHERE x = '%s'"
}
}
resource "google_bigquery_dataset" "my-dataset" {
dataset_id = "my_dataset%s"
friendly_name = "foo"
description = "bar"
location = "asia-northeast1"
}
`, random_suffix, schedule, letter, random_suffix)
}
3 changes: 2 additions & 1 deletion third_party/terraform/utils/provider.go.erb
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ func Provider() terraform.ResourceProvider {
ServiceManagementCustomEndpointEntryKey: ServiceManagementCustomEndpointEntry,
ServiceNetworkingCustomEndpointEntryKey: ServiceNetworkingCustomEndpointEntry,
ServiceUsageCustomEndpointEntryKey: ServiceUsageCustomEndpointEntry,
BigQueryCustomEndpointEntryKey: BigQueryCustomEndpointEntry,
BigQueryCustomEndpointEntryKey: BigQueryCustomEndpointEntry,
CloudFunctionsCustomEndpointEntryKey: CloudFunctionsCustomEndpointEntry,
CloudIoTCustomEndpointEntryKey: CloudIoTCustomEndpointEntry,
StorageTransferCustomEndpointEntryKey: StorageTransferCustomEndpointEntry,
Expand Down Expand Up @@ -395,6 +395,7 @@ func providerConfigure(d *schema.ResourceData) (interface{}, error) {
config.ServiceNetworkingBasePath = d.Get(ServiceNetworkingCustomEndpointEntryKey).(string)
config.ServiceUsageBasePath = d.Get(ServiceUsageCustomEndpointEntryKey).(string)
config.BigQueryBasePath = d.Get(BigQueryCustomEndpointEntryKey).(string)
config.BigqueryDataTransferBasePath = d.Get(BigqueryDataTransferCustomEndpointEntryKey).(string)
config.CloudFunctionsBasePath = d.Get(CloudFunctionsCustomEndpointEntryKey).(string)
config.CloudIoTBasePath = d.Get(CloudIoTCustomEndpointEntryKey).(string)
config.StorageTransferBasePath = d.Get(StorageTransferCustomEndpointEntryKey).(string)
Expand Down

0 comments on commit a66050d

Please sign in to comment.