From 44125114eeb6ea04ecd3c33b04edb08ada3cf9dc Mon Sep 17 00:00:00 2001 From: Shin Fan Date: Thu, 17 Mar 2016 12:53:27 -0700 Subject: [PATCH 1/2] Update and fix gcloud-java-logging --- .../logging/spi/v2/ConfigServiceV2Api.java | 473 +++ .../spi/v2/ConfigServiceV2Settings.java | 289 ++ .../logging/spi/v2/LoggingServiceV2Api.java | 507 ++++ .../spi/v2/LoggingServiceV2Settings.java | 370 +++ .../logging/spi/v2/MetricsServiceV2Api.java | 478 +++ .../spi/v2/MetricsServiceV2Settings.java | 293 ++ .../com/google/logging/type/HttpRequest.java | 1478 --------- .../logging/type/HttpRequestOrBuilder.java | 157 - .../google/logging/type/HttpRequestProto.java | 56 - .../com/google/logging/type/LogSeverity.java | 245 -- .../google/logging/type/LogSeverityProto.java | 43 - .../logging/v2/ConfigServiceV2Grpc.java | 356 --- .../logging/v2/CreateLogMetricRequest.java | 722 ----- .../v2/CreateLogMetricRequestOrBuilder.java | 59 - .../google/logging/v2/CreateSinkRequest.java | 722 ----- .../v2/CreateSinkRequestOrBuilder.java | 59 - .../logging/v2/DeleteLogMetricRequest.java | 483 --- .../v2/DeleteLogMetricRequestOrBuilder.java | 29 - .../google/logging/v2/DeleteLogRequest.java | 483 --- .../logging/v2/DeleteLogRequestOrBuilder.java | 29 - .../google/logging/v2/DeleteSinkRequest.java | 483 --- .../v2/DeleteSinkRequestOrBuilder.java | 29 - .../logging/v2/GetLogMetricRequest.java | 483 --- .../v2/GetLogMetricRequestOrBuilder.java | 29 - .../com/google/logging/v2/GetSinkRequest.java | 483 --- .../logging/v2/GetSinkRequestOrBuilder.java | 29 - .../logging/v2/ListLogEntriesRequest.java | 1182 -------- .../v2/ListLogEntriesRequestOrBuilder.java | 139 - .../logging/v2/ListLogEntriesResponse.java | 923 ------ .../v2/ListLogEntriesResponseOrBuilder.java | 75 - .../logging/v2/ListLogMetricsRequest.java | 748 ----- .../v2/ListLogMetricsRequestOrBuilder.java | 68 - .../logging/v2/ListLogMetricsResponse.java | 923 ------ .../v2/ListLogMetricsResponseOrBuilder.java | 75 - ...stMonitoredResourceDescriptorsRequest.java | 583 ---- ...edResourceDescriptorsRequestOrBuilder.java | 46 - ...tMonitoredResourceDescriptorsResponse.java | 923 ------ ...dResourceDescriptorsResponseOrBuilder.java | 75 - .../google/logging/v2/ListSinksRequest.java | 748 ----- .../logging/v2/ListSinksRequestOrBuilder.java | 68 - .../google/logging/v2/ListSinksResponse.java | 923 ------ .../v2/ListSinksResponseOrBuilder.java | 75 - .../java/com/google/logging/v2/LogEntry.java | 2648 ----------------- .../google/logging/v2/LogEntryOperation.java | 790 ----- .../v2/LogEntryOperationOrBuilder.java | 69 - .../google/logging/v2/LogEntryOrBuilder.java | 277 -- .../com/google/logging/v2/LogEntryProto.java | 108 - .../java/com/google/logging/v2/LogMetric.java | 934 ------ .../google/logging/v2/LogMetricOrBuilder.java | 75 - .../java/com/google/logging/v2/LogSink.java | 1115 ------- .../google/logging/v2/LogSinkOrBuilder.java | 108 - .../com/google/logging/v2/LoggingConfig.java | 162 - .../com/google/logging/v2/LoggingMetrics.java | 159 - .../com/google/logging/v2/LoggingProto.java | 208 -- .../logging/v2/LoggingServiceV2Grpc.java | 348 --- .../logging/v2/MetricsServiceV2Grpc.java | 356 --- .../logging/v2/ReadLogEntriesRequest.java | 1094 ------- .../v2/ReadLogEntriesRequestOrBuilder.java | 122 - .../logging/v2/ReadLogEntriesResponse.java | 946 ------ .../v2/ReadLogEntriesResponseOrBuilder.java | 80 - .../logging/v2/UpdateLogMetricRequest.java | 748 ----- .../v2/UpdateLogMetricRequestOrBuilder.java | 66 - .../google/logging/v2/UpdateSinkRequest.java | 748 ----- .../v2/UpdateSinkRequestOrBuilder.java | 66 - .../logging/v2/WriteLogEntriesRequest.java | 1356 --------- .../v2/WriteLogEntriesRequestOrBuilder.java | 123 - .../logging/v2/WriteLogEntriesResponse.java | 324 -- .../v2/WriteLogEntriesResponseOrBuilder.java | 9 - gcloud-java-logging/pom.xml | 23 +- .../logging/spi/v2/ConfigServiceV2Api.java | 342 +-- .../spi/v2/ConfigServiceV2Settings.java | 289 ++ .../logging/spi/v2/LoggingServiceV2Api.java | 372 +-- .../spi/v2/LoggingServiceV2Settings.java | 370 +++ .../logging/spi/v2/MetricsServiceV2Api.java | 341 +-- .../spi/v2/MetricsServiceV2Settings.java | 293 ++ .../v2}/testing/LocalLoggingHelper.java | 2 +- .../spi/v2/testing/LocalLoggingImpl.java | 0 .../spi/v2/LoggingServiceV2ApiTest.java | 26 +- 78 files changed, 3826 insertions(+), 27712 deletions(-) create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java create mode 100644 gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestProto.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverity.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverityProto.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ConfigServiceV2Grpc.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponseOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponseOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponseOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponseOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntry.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperation.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperationOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryProto.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetric.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetricOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSink.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSinkOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingConfig.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingMetrics.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingProto.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingServiceV2Grpc.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/MetricsServiceV2Grpc.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponseOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequest.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequestOrBuilder.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponse.java delete mode 100644 gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponseOrBuilder.java create mode 100644 gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java create mode 100644 gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java create mode 100644 gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java rename gcloud-java-logging/src/main/java/com/google/gcloud/logging/{ => spi/v2}/testing/LocalLoggingHelper.java (97%) rename gcloud-java-logging/src/main/java/com/google/gcloud/{logging => pubsub}/spi/v2/testing/LocalLoggingImpl.java (100%) diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java new file mode 100644 index 000000000000..5cd5202961ab --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java @@ -0,0 +1,473 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_config.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; +import com.google.logging.v2.CreateSinkRequest; +import com.google.logging.v2.DeleteSinkRequest; +import com.google.logging.v2.GetSinkRequest; +import com.google.logging.v2.ListSinksRequest; +import com.google.logging.v2.ListSinksResponse; +import com.google.logging.v2.LogSink; +import com.google.logging.v2.UpdateSinkRequest; +import com.google.protobuf.Empty; +import io.grpc.ManagedChannel; +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. +/** + * Service Description: See src/api/google/logging/v2/logging.proto for documentation + * + * + * + */ +@javax.annotation.Generated("by GAPIC") +public class ConfigServiceV2Api implements AutoCloseable { + + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a project resource. + * + * + * + */ + private static final PathTemplate PROJECT_PATH_TEMPLATE = + PathTemplate.create("projects/{project}"); + + /** + * A PathTemplate representing the fully-qualified path to represent + * a sink resource. + * + * + * + */ + private static final PathTemplate SINK_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/sinks/{sink}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a project resource. + * + * + * + */ + public static final String formatProjectPath(String project) { + return PROJECT_PATH_TEMPLATE.instantiate("project", project); + } + + /** + * Formats a string containing the fully-qualified path to represent + * a sink resource. + * + * + * + */ + public static final String formatSinkPath(String project, String sink) { + return SINK_PATH_TEMPLATE.instantiate("project", project, "sink", sink); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a project resource. + * + * + * + */ + public static final String parseProjectFromProjectPath(String projectPath) { + return PROJECT_PATH_TEMPLATE.parse(projectPath).get("project"); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a sink resource. + * + * + * + */ + public static final String parseProjectFromSinkPath(String sinkPath) { + return SINK_PATH_TEMPLATE.parse(sinkPath).get("project"); + } + + /** + * Parses the sink from the given fully-qualified path which + * represents a sink resource. + * + * + * + */ + public static final String parseSinkFromSinkPath(String sinkPath) { + return SINK_PATH_TEMPLATE.parse(sinkPath).get("sink"); + } + } + + // ======== + // Members + // ======== + + private final ManagedChannel channel; + private final List closeables = new ArrayList<>(); + + private final ApiCallable listSinksCallable; + private final ApiCallable> listSinksIterableCallable; + private final ApiCallable getSinkCallable; + private final ApiCallable createSinkCallable; + private final ApiCallable updateSinkCallable; + private final ApiCallable deleteSinkCallable; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of ConfigServiceV2Api with default settings. + * + * + * + */ + public static ConfigServiceV2Api create() throws IOException { + return create(ConfigServiceV2Settings.create()); + } + + /** + * Constructs an instance of ConfigServiceV2Api, using the given settings. The channels are created based + * on the settings passed in, or defaults for any settings that are not set. + * + * + * + */ + public static ConfigServiceV2Api create(ConfigServiceV2Settings settings) throws IOException { + return new ConfigServiceV2Api(settings); + } + + /** + * Constructs an instance of ConfigServiceV2Api, using the given settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected ConfigServiceV2Api(ConfigServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.listSinksCallable = settings.listSinksMethod().build(settings); + this.listSinksIterableCallable = settings.listSinksMethod().buildPageStreaming(settings); + this.getSinkCallable = settings.getSinkMethod().build(settings); + this.createSinkCallable = settings.createSinkMethod().build(settings); + this.updateSinkCallable = settings.updateSinkMethod().build(settings); + this.deleteSinkCallable = settings.deleteSinkMethod().build(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); + } + + // ============= + // Service Calls + // ============= + + // ----- listSinks ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists sinks. + * + * + * + */ + public Iterable listSinks(String projectName) { + ListSinksRequest request = ListSinksRequest.newBuilder().setProjectName(projectName).build(); + return listSinks(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists sinks. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public Iterable listSinks(ListSinksRequest request) { + return listSinksIterableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists sinks. + * + * + * + */ + public ApiCallable> listSinksIterableCallable() { + return listSinksIterableCallable; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists sinks. + * + * + * + */ + public ApiCallable listSinksCallable() { + return listSinksCallable; + } + + // ----- getSink ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a sink. + * + * + * + * + * @param sinkName The resource name of the sink to return. + * Example: `"projects/my-project-id/sinks/my-sink-id"`. + */ + public LogSink getSink(String sinkName) { + GetSinkRequest request = GetSinkRequest.newBuilder().setSinkName(sinkName).build(); + + return getSink(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a sink. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + private LogSink getSink(GetSinkRequest request) { + return getSinkCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a sink. + * + * + * + */ + public ApiCallable getSinkCallable() { + return getSinkCallable; + } + + // ----- createSink ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a sink. + * + * + * + * + * @param projectName The resource name of the project in which to create the sink. + * Example: `"projects/my-project-id"`. + * + * The new sink must be provided in the request. + * @param sink The new sink, which must not have an identifier that already + * exists. + */ + public LogSink createSink(String projectName, LogSink sink) { + CreateSinkRequest request = + CreateSinkRequest.newBuilder().setProjectName(projectName).setSink(sink).build(); + + return createSink(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a sink. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public LogSink createSink(CreateSinkRequest request) { + return createSinkCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a sink. + * + * + * + */ + public ApiCallable createSinkCallable() { + return createSinkCallable; + } + + // ----- updateSink ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a sink. + * + * + * + * + * @param sinkName The resource name of the sink to update. + * Example: `"projects/my-project-id/sinks/my-sink-id"`. + * + * The updated sink must be provided in the request and have the + * same name that is specified in `sinkName`. If the sink does not + * exist, it is created. + * @param sink The updated sink, whose name must be the same as the sink + * identifier in `sinkName`. If `sinkName` does not exist, then + * this method creates a new sink. + */ + public LogSink updateSink(String sinkName, LogSink sink) { + UpdateSinkRequest request = + UpdateSinkRequest.newBuilder().setSinkName(sinkName).setSink(sink).build(); + + return updateSink(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a sink. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public LogSink updateSink(UpdateSinkRequest request) { + return updateSinkCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a sink. + * + * + * + */ + public ApiCallable updateSinkCallable() { + return updateSinkCallable; + } + + // ----- deleteSink ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a sink. + * + * + * + * + * @param sinkName The resource name of the sink to delete. + * Example: `"projects/my-project-id/sinks/my-sink-id"`. + */ + public void deleteSink(String sinkName) { + DeleteSinkRequest request = DeleteSinkRequest.newBuilder().setSinkName(sinkName).build(); + + deleteSink(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a sink. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + private void deleteSink(DeleteSinkRequest request) { + deleteSinkCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a sink. + * + * + * + */ + public ApiCallable deleteSinkCallable() { + return deleteSinkCallable; + } + + // ======== + // Cleanup + // ======== + + /** + * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately + * cancelled. + * + * + * + */ + @Override + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } + } + + // ======== + // Manually-added methods: add custom (non-generated) methods after this point. + // ======== + +} diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java new file mode 100644 index 000000000000..e4571fcd200b --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java @@ -0,0 +1,289 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_config.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.ConfigServiceV2Grpc; +import com.google.logging.v2.CreateSinkRequest; +import com.google.logging.v2.DeleteSinkRequest; +import com.google.logging.v2.GetSinkRequest; +import com.google.logging.v2.ListSinksRequest; +import com.google.logging.v2.ListSinksResponse; +import com.google.logging.v2.LogSink; +import com.google.logging.v2.UpdateSinkRequest; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class ConfigServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final PageStreamingApiCallableBuilder + listSinksMethod; + private final ApiCallableBuilder getSinkMethod; + private final ApiCallableBuilder createSinkMethod; + private final ApiCallableBuilder updateSinkMethod; + private final ApiCallableBuilder deleteSinkMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + listSinksMethod = + new PageStreamingApiCallableBuilder<>( + ConfigServiceV2Grpc.METHOD_LIST_SINKS, LIST_SINKS_PAGE_STR_DESC); + listSinksMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listSinksMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + getSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_GET_SINK); + getSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + getSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + createSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_CREATE_SINK); + createSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + createSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + updateSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_UPDATE_SINK); + updateSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + updateSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + deleteSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_DELETE_SINK); + deleteSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + listSinksMethod, + getSinkMethod, + createSinkMethod, + updateSinkMethod, + deleteSinkMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of ConfigServiceV2Settings with default settings. + * + * + * + */ + public static ConfigServiceV2Settings create() { + ConfigServiceV2Settings settings = new ConfigServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of ConfigServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected ConfigServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listSinks. + * + * + * + */ + public PageStreamingApiCallableBuilder + listSinksMethod() { + return methods.listSinksMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method getSink. + * + * + * + */ + public ApiCallableBuilder getSinkMethod() { + return methods.getSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method createSink. + * + * + * + */ + public ApiCallableBuilder createSinkMethod() { + return methods.createSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method updateSink. + * + * + * + */ + public ApiCallableBuilder updateSinkMethod() { + return methods.updateSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteSink. + * + * + * + */ + public ApiCallableBuilder deleteSinkMethod() { + return methods.deleteSinkMethod; + } + + private static PageDescriptor + LIST_SINKS_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListSinksRequest injectToken(ListSinksRequest payload, Object token) { + return ListSinksRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListSinksResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListSinksResponse payload) { + return payload.getSinksList(); + } + }; +} diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java new file mode 100644 index 000000000000..1c705866d051 --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java @@ -0,0 +1,507 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.MonitoredResource; +import com.google.api.MonitoredResourceDescriptor; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; +import com.google.logging.v2.DeleteLogRequest; +import com.google.logging.v2.ListLogEntriesRequest; +import com.google.logging.v2.ListLogEntriesResponse; +import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest; +import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse; +import com.google.logging.v2.LogEntry; +import com.google.logging.v2.ReadLogEntriesRequest; +import com.google.logging.v2.ReadLogEntriesResponse; +import com.google.logging.v2.WriteLogEntriesRequest; +import com.google.logging.v2.WriteLogEntriesResponse; +import com.google.protobuf.Empty; +import io.grpc.ManagedChannel; +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. +/** + * Service Description: Service for ingesting and querying logs. + * + * + * + */ +@javax.annotation.Generated("by GAPIC") +public class LoggingServiceV2Api implements AutoCloseable { + + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a log resource. + * + * + * + */ + private static final PathTemplate LOG_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/logs/{log}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a log resource. + * + * + * + */ + public static final String formatLogPath(String project, String log) { + return LOG_PATH_TEMPLATE.instantiate("project", project, "log", log); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a log resource. + * + * + * + */ + public static final String parseProjectFromLogPath(String logPath) { + return LOG_PATH_TEMPLATE.parse(logPath).get("project"); + } + + /** + * Parses the log from the given fully-qualified path which + * represents a log resource. + * + * + * + */ + public static final String parseLogFromLogPath(String logPath) { + return LOG_PATH_TEMPLATE.parse(logPath).get("log"); + } + } + + // ======== + // Members + // ======== + + private final ManagedChannel channel; + private final List closeables = new ArrayList<>(); + + private final ApiCallable deleteLogCallable; + private final ApiCallable + writeLogEntriesCallable; + private final ApiCallable listLogEntriesCallable; + private final ApiCallable> + listLogEntriesIterableCallable; + private final ApiCallable readLogEntriesCallable; + private final ApiCallable> + readLogEntriesIterableCallable; + private final ApiCallable< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> + listMonitoredResourceDescriptorsCallable; + private final ApiCallable< + ListMonitoredResourceDescriptorsRequest, Iterable> + listMonitoredResourceDescriptorsIterableCallable; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of LoggingServiceV2Api with default settings. + * + * + * + */ + public static LoggingServiceV2Api create() throws IOException { + return create(LoggingServiceV2Settings.create()); + } + + /** + * Constructs an instance of LoggingServiceV2Api, using the given settings. The channels are created based + * on the settings passed in, or defaults for any settings that are not set. + * + * + * + */ + public static LoggingServiceV2Api create(LoggingServiceV2Settings settings) throws IOException { + return new LoggingServiceV2Api(settings); + } + + /** + * Constructs an instance of LoggingServiceV2Api, using the given settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected LoggingServiceV2Api(LoggingServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.deleteLogCallable = settings.deleteLogMethod().build(settings); + this.writeLogEntriesCallable = settings.writeLogEntriesMethod().build(settings); + this.listLogEntriesCallable = settings.listLogEntriesMethod().build(settings); + this.listLogEntriesIterableCallable = + settings.listLogEntriesMethod().buildPageStreaming(settings); + this.readLogEntriesCallable = settings.readLogEntriesMethod().build(settings); + this.readLogEntriesIterableCallable = + settings.readLogEntriesMethod().buildPageStreaming(settings); + this.listMonitoredResourceDescriptorsCallable = + settings.listMonitoredResourceDescriptorsMethod().build(settings); + this.listMonitoredResourceDescriptorsIterableCallable = + settings.listMonitoredResourceDescriptorsMethod().buildPageStreaming(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); + } + + // ============= + // Service Calls + // ============= + + // ----- deleteLog ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a log and all its log entries. + * The log will reappear if it receives new entries. + * + * + * + * + * @param logName Required. The resource name of the log to delete. Example: + * `"projects/my-project/logs/syslog"`. + */ + public void deleteLog(String logName) { + DeleteLogRequest request = DeleteLogRequest.newBuilder().setLogName(logName).build(); + + deleteLog(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a log and all its log entries. + * The log will reappear if it receives new entries. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + private void deleteLog(DeleteLogRequest request) { + deleteLogCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a log and all its log entries. + * The log will reappear if it receives new entries. + * + * + * + */ + public ApiCallable deleteLogCallable() { + return deleteLogCallable; + } + + // ----- writeLogEntries ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Writes log entries to Cloud Logging. + * All log entries in Cloud Logging are written by this method. + * + * + * + * + * @param logName Optional. A default log resource name for those log entries in `entries` + * that do not specify their own `logName`. Example: + * `"projects/my-project/logs/syslog"`. See + * [LogEntry][google.logging.v2.LogEntry]. + * @param resource Optional. A default monitored resource for those log entries in `entries` + * that do not specify their own `resource`. + * @param labels Optional. User-defined `key:value` items that are added to + * the `labels` field of each log entry in `entries`, except when a log + * entry specifies its own 'key:value' item with the same key. + * Example: `{ "size": "large", "color":"red" }` + * @param entries Required. The log entries to write. The log entries must have values for + * all required fields. + */ + public WriteLogEntriesResponse writeLogEntries( + String logName, + MonitoredResource resource, + Map labels, + List entries) { + WriteLogEntriesRequest request = + WriteLogEntriesRequest.newBuilder() + .setLogName(logName) + .setResource(resource) + .putAllLabels(labels) + .addAllEntries(entries) + .build(); + + return writeLogEntries(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Writes log entries to Cloud Logging. + * All log entries in Cloud Logging are written by this method. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public WriteLogEntriesResponse writeLogEntries(WriteLogEntriesRequest request) { + return writeLogEntriesCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Writes log entries to Cloud Logging. + * All log entries in Cloud Logging are written by this method. + * + * + * + */ + public ApiCallable writeLogEntriesCallable() { + return writeLogEntriesCallable; + } + + // ----- listLogEntries ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists log entries. + * Use this method to examine log entries from Cloud Logging. + * See [Exporting Logs](/logging/docs/export) for other ways to copy + * log entries out of Cloud Logging. + * + * + * + */ + public Iterable listLogEntries(List projectIds, String filter, String orderBy) { + ListLogEntriesRequest request = + ListLogEntriesRequest.newBuilder() + .addAllProjectIds(projectIds) + .setFilter(filter) + .setOrderBy(orderBy) + .build(); + return listLogEntries(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists log entries. + * Use this method to examine log entries from Cloud Logging. + * See [Exporting Logs](/logging/docs/export) for other ways to copy + * log entries out of Cloud Logging. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public Iterable listLogEntries(ListLogEntriesRequest request) { + return listLogEntriesIterableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists log entries. + * Use this method to examine log entries from Cloud Logging. + * See [Exporting Logs](/logging/docs/export) for other ways to copy + * log entries out of Cloud Logging. + * + * + * + */ + public ApiCallable> listLogEntriesIterableCallable() { + return listLogEntriesIterableCallable; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists log entries. + * Use this method to examine log entries from Cloud Logging. + * See [Exporting Logs](/logging/docs/export) for other ways to copy + * log entries out of Cloud Logging. + * + * + * + */ + public ApiCallable listLogEntriesCallable() { + return listLogEntriesCallable; + } + + // ----- readLogEntries ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Streaming read of log entries. Similar to `List`, this method is intended + * for a large volume of log entries. + * + * + * + */ + public Iterable readLogEntries(List projectIds, String filter, String orderBy) { + ReadLogEntriesRequest request = + ReadLogEntriesRequest.newBuilder() + .addAllProjectIds(projectIds) + .setFilter(filter) + .setOrderBy(orderBy) + .build(); + return readLogEntries(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Streaming read of log entries. Similar to `List`, this method is intended + * for a large volume of log entries. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public Iterable readLogEntries(ReadLogEntriesRequest request) { + return readLogEntriesIterableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Streaming read of log entries. Similar to `List`, this method is intended + * for a large volume of log entries. + * + * + * + */ + public ApiCallable> readLogEntriesIterableCallable() { + return readLogEntriesIterableCallable; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Streaming read of log entries. Similar to `List`, this method is intended + * for a large volume of log entries. + * + * + * + */ + public ApiCallable readLogEntriesCallable() { + return readLogEntriesCallable; + } + + // ----- listMonitoredResourceDescriptors ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists monitored resource descriptors that are used by Cloud Logging. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public Iterable listMonitoredResourceDescriptors( + ListMonitoredResourceDescriptorsRequest request) { + return listMonitoredResourceDescriptorsIterableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists monitored resource descriptors that are used by Cloud Logging. + * + * + * + */ + public ApiCallable> + listMonitoredResourceDescriptorsIterableCallable() { + return listMonitoredResourceDescriptorsIterableCallable; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists monitored resource descriptors that are used by Cloud Logging. + * + * + * + */ + public ApiCallable< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> + listMonitoredResourceDescriptorsCallable() { + return listMonitoredResourceDescriptorsCallable; + } + + // ======== + // Cleanup + // ======== + + /** + * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately + * cancelled. + * + * + * + */ + @Override + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } + } + + // ======== + // Manually-added methods: add custom (non-generated) methods after this point. + // ======== + +} diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java new file mode 100644 index 000000000000..a7e5035bc1e5 --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java @@ -0,0 +1,370 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.MonitoredResourceDescriptor; +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.DeleteLogRequest; +import com.google.logging.v2.ListLogEntriesRequest; +import com.google.logging.v2.ListLogEntriesResponse; +import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest; +import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse; +import com.google.logging.v2.LogEntry; +import com.google.logging.v2.LoggingServiceV2Grpc; +import com.google.logging.v2.ReadLogEntriesRequest; +import com.google.logging.v2.ReadLogEntriesResponse; +import com.google.logging.v2.WriteLogEntriesRequest; +import com.google.logging.v2.WriteLogEntriesResponse; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class LoggingServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final ApiCallableBuilder deleteLogMethod; + private final ApiCallableBuilder + writeLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ListLogEntriesRequest, ListLogEntriesResponse, LogEntry> + listLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ReadLogEntriesRequest, ReadLogEntriesResponse, LogEntry> + readLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + listMonitoredResourceDescriptorsMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + deleteLogMethod = new ApiCallableBuilder<>(LoggingServiceV2Grpc.METHOD_DELETE_LOG); + deleteLogMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteLogMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + writeLogEntriesMethod = + new ApiCallableBuilder<>(LoggingServiceV2Grpc.METHOD_WRITE_LOG_ENTRIES); + writeLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + writeLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + listLogEntriesMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_LIST_LOG_ENTRIES, LIST_LOG_ENTRIES_PAGE_STR_DESC); + listLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + readLogEntriesMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_READ_LOG_ENTRIES, READ_LOG_ENTRIES_PAGE_STR_DESC); + readLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + readLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + listMonitoredResourceDescriptorsMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, + LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC); + listMonitoredResourceDescriptorsMethod.setRetryableCodes( + RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listMonitoredResourceDescriptorsMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + deleteLogMethod, + writeLogEntriesMethod, + listLogEntriesMethod, + readLogEntriesMethod, + listMonitoredResourceDescriptorsMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of LoggingServiceV2Settings with default settings. + * + * + * + */ + public static LoggingServiceV2Settings create() { + LoggingServiceV2Settings settings = new LoggingServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of LoggingServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected LoggingServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteLog. + * + * + * + */ + public ApiCallableBuilder deleteLogMethod() { + return methods.deleteLogMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method writeLogEntries. + * + * + * + */ + public ApiCallableBuilder + writeLogEntriesMethod() { + return methods.writeLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listLogEntries. + * + * + * + */ + public PageStreamingApiCallableBuilder + listLogEntriesMethod() { + return methods.listLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method readLogEntries. + * + * + * + */ + public PageStreamingApiCallableBuilder + readLogEntriesMethod() { + return methods.readLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listMonitoredResourceDescriptors. + * + * + * + */ + public PageStreamingApiCallableBuilder< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + listMonitoredResourceDescriptorsMethod() { + return methods.listMonitoredResourceDescriptorsMethod; + } + + private static PageDescriptor + LIST_LOG_ENTRIES_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListLogEntriesRequest injectToken(ListLogEntriesRequest payload, Object token) { + return ListLogEntriesRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListLogEntriesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListLogEntriesResponse payload) { + return payload.getEntriesList(); + } + }; + + private static PageDescriptor + READ_LOG_ENTRIES_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ReadLogEntriesRequest injectToken(ReadLogEntriesRequest payload, Object token) { + return ReadLogEntriesRequest.newBuilder(payload) + .setResumeToken((String) token) + .build(); + } + + @Override + public Object extractNextToken(ReadLogEntriesResponse payload) { + return payload.getResumeToken(); + } + + @Override + public Iterable extractResources(ReadLogEntriesResponse payload) { + return payload.getEntriesList(); + } + }; + + private static PageDescriptor< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC = + new PageDescriptor< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor>() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListMonitoredResourceDescriptorsRequest injectToken( + ListMonitoredResourceDescriptorsRequest payload, Object token) { + return ListMonitoredResourceDescriptorsRequest.newBuilder(payload) + .setPageToken((String) token) + .build(); + } + + @Override + public Object extractNextToken(ListMonitoredResourceDescriptorsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListMonitoredResourceDescriptorsResponse payload) { + return payload.getResourceDescriptorsList(); + } + }; +} diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java new file mode 100644 index 000000000000..fb919e1e017f --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java @@ -0,0 +1,478 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_metrics.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; +import com.google.logging.v2.CreateLogMetricRequest; +import com.google.logging.v2.DeleteLogMetricRequest; +import com.google.logging.v2.GetLogMetricRequest; +import com.google.logging.v2.ListLogMetricsRequest; +import com.google.logging.v2.ListLogMetricsResponse; +import com.google.logging.v2.LogMetric; +import com.google.logging.v2.UpdateLogMetricRequest; +import com.google.protobuf.Empty; +import io.grpc.ManagedChannel; +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. +/** + * Service Description: See src/api/google/logging/v1/logging.proto for documentation + * + * + * + */ +@javax.annotation.Generated("by GAPIC") +public class MetricsServiceV2Api implements AutoCloseable { + + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a project resource. + * + * + * + */ + private static final PathTemplate PROJECT_PATH_TEMPLATE = + PathTemplate.create("projects/{project}"); + + /** + * A PathTemplate representing the fully-qualified path to represent + * a metric resource. + * + * + * + */ + private static final PathTemplate METRIC_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/metrics/{metric}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a project resource. + * + * + * + */ + public static final String formatProjectPath(String project) { + return PROJECT_PATH_TEMPLATE.instantiate("project", project); + } + + /** + * Formats a string containing the fully-qualified path to represent + * a metric resource. + * + * + * + */ + public static final String formatMetricPath(String project, String metric) { + return METRIC_PATH_TEMPLATE.instantiate("project", project, "metric", metric); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a project resource. + * + * + * + */ + public static final String parseProjectFromProjectPath(String projectPath) { + return PROJECT_PATH_TEMPLATE.parse(projectPath).get("project"); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a metric resource. + * + * + * + */ + public static final String parseProjectFromMetricPath(String metricPath) { + return METRIC_PATH_TEMPLATE.parse(metricPath).get("project"); + } + + /** + * Parses the metric from the given fully-qualified path which + * represents a metric resource. + * + * + * + */ + public static final String parseMetricFromMetricPath(String metricPath) { + return METRIC_PATH_TEMPLATE.parse(metricPath).get("metric"); + } + } + + // ======== + // Members + // ======== + + private final ManagedChannel channel; + private final List closeables = new ArrayList<>(); + + private final ApiCallable listLogMetricsCallable; + private final ApiCallable> + listLogMetricsIterableCallable; + private final ApiCallable getLogMetricCallable; + private final ApiCallable createLogMetricCallable; + private final ApiCallable updateLogMetricCallable; + private final ApiCallable deleteLogMetricCallable; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of MetricsServiceV2Api with default settings. + * + * + * + */ + public static MetricsServiceV2Api create() throws IOException { + return create(MetricsServiceV2Settings.create()); + } + + /** + * Constructs an instance of MetricsServiceV2Api, using the given settings. The channels are created based + * on the settings passed in, or defaults for any settings that are not set. + * + * + * + */ + public static MetricsServiceV2Api create(MetricsServiceV2Settings settings) throws IOException { + return new MetricsServiceV2Api(settings); + } + + /** + * Constructs an instance of MetricsServiceV2Api, using the given settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected MetricsServiceV2Api(MetricsServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.listLogMetricsCallable = settings.listLogMetricsMethod().build(settings); + this.listLogMetricsIterableCallable = + settings.listLogMetricsMethod().buildPageStreaming(settings); + this.getLogMetricCallable = settings.getLogMetricMethod().build(settings); + this.createLogMetricCallable = settings.createLogMetricMethod().build(settings); + this.updateLogMetricCallable = settings.updateLogMetricMethod().build(settings); + this.deleteLogMetricCallable = settings.deleteLogMetricMethod().build(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); + } + + // ============= + // Service Calls + // ============= + + // ----- listLogMetrics ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists logs-based metrics. + * + * + * + */ + public Iterable listLogMetrics(String projectName) { + ListLogMetricsRequest request = + ListLogMetricsRequest.newBuilder().setProjectName(projectName).build(); + return listLogMetrics(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists logs-based metrics. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public Iterable listLogMetrics(ListLogMetricsRequest request) { + return listLogMetricsIterableCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists logs-based metrics. + * + * + * + */ + public ApiCallable> listLogMetricsIterableCallable() { + return listLogMetricsIterableCallable; + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Lists logs-based metrics. + * + * + * + */ + public ApiCallable listLogMetricsCallable() { + return listLogMetricsCallable; + } + + // ----- getLogMetric ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a logs-based metric. + * + * + * + * + * @param metricName The resource name of the desired metric. + * Example: `"projects/my-project-id/metrics/my-metric-id"`. + */ + public LogMetric getLogMetric(String metricName) { + GetLogMetricRequest request = + GetLogMetricRequest.newBuilder().setMetricName(metricName).build(); + + return getLogMetric(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a logs-based metric. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + private LogMetric getLogMetric(GetLogMetricRequest request) { + return getLogMetricCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Gets a logs-based metric. + * + * + * + */ + public ApiCallable getLogMetricCallable() { + return getLogMetricCallable; + } + + // ----- createLogMetric ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a logs-based metric. + * + * + * + * + * @param projectName The resource name of the project in which to create the metric. + * Example: `"projects/my-project-id"`. + * + * The new metric must be provided in the request. + * @param metric The new logs-based metric, which must not have an identifier that + * already exists. + */ + public LogMetric createLogMetric(String projectName, LogMetric metric) { + CreateLogMetricRequest request = + CreateLogMetricRequest.newBuilder().setProjectName(projectName).setMetric(metric).build(); + + return createLogMetric(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a logs-based metric. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public LogMetric createLogMetric(CreateLogMetricRequest request) { + return createLogMetricCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates a logs-based metric. + * + * + * + */ + public ApiCallable createLogMetricCallable() { + return createLogMetricCallable; + } + + // ----- updateLogMetric ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a logs-based metric. + * + * + * + * + * @param metricName The resource name of the metric to update. + * Example: `"projects/my-project-id/metrics/my-metric-id"`. + * + * The updated metric must be provided in the request and have the + * same identifier that is specified in `metricName`. + * If the metric does not exist, it is created. + * @param metric The updated metric, whose name must be the same as the + * metric identifier in `metricName`. If `metricName` does not + * exist, then a new metric is created. + */ + public LogMetric updateLogMetric(String metricName, LogMetric metric) { + UpdateLogMetricRequest request = + UpdateLogMetricRequest.newBuilder().setMetricName(metricName).setMetric(metric).build(); + + return updateLogMetric(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a logs-based metric. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + public LogMetric updateLogMetric(UpdateLogMetricRequest request) { + return updateLogMetricCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Creates or updates a logs-based metric. + * + * + * + */ + public ApiCallable updateLogMetricCallable() { + return updateLogMetricCallable; + } + + // ----- deleteLogMetric ----- + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a logs-based metric. + * + * + * + * + * @param metricName The resource name of the metric to delete. + * Example: `"projects/my-project-id/metrics/my-metric-id"`. + */ + public void deleteLogMetric(String metricName) { + DeleteLogMetricRequest request = + DeleteLogMetricRequest.newBuilder().setMetricName(metricName).build(); + + deleteLogMetric(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a logs-based metric. + * + * + * + * + * @param request The request object containing all of the parameters for the API call. + */ + private void deleteLogMetric(DeleteLogMetricRequest request) { + deleteLogMetricCallable().call(request); + } + + // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. + /** + * Deletes a logs-based metric. + * + * + * + */ + public ApiCallable deleteLogMetricCallable() { + return deleteLogMetricCallable; + } + + // ======== + // Cleanup + // ======== + + /** + * Initiates an orderly shutdown in which preexisting calls continue but new calls are immediately + * cancelled. + * + * + * + */ + @Override + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } + } + + // ======== + // Manually-added methods: add custom (non-generated) methods after this point. + // ======== + +} diff --git a/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java new file mode 100644 index 000000000000..e2c93e20a3f4 --- /dev/null +++ b/gcloud-java-logging/baseline/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java @@ -0,0 +1,293 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_metrics.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.CreateLogMetricRequest; +import com.google.logging.v2.DeleteLogMetricRequest; +import com.google.logging.v2.GetLogMetricRequest; +import com.google.logging.v2.ListLogMetricsRequest; +import com.google.logging.v2.ListLogMetricsResponse; +import com.google.logging.v2.LogMetric; +import com.google.logging.v2.MetricsServiceV2Grpc; +import com.google.logging.v2.UpdateLogMetricRequest; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class MetricsServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final PageStreamingApiCallableBuilder< + ListLogMetricsRequest, ListLogMetricsResponse, LogMetric> + listLogMetricsMethod; + private final ApiCallableBuilder getLogMetricMethod; + private final ApiCallableBuilder createLogMetricMethod; + private final ApiCallableBuilder updateLogMetricMethod; + private final ApiCallableBuilder deleteLogMetricMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + listLogMetricsMethod = + new PageStreamingApiCallableBuilder<>( + MetricsServiceV2Grpc.METHOD_LIST_LOG_METRICS, LIST_LOG_METRICS_PAGE_STR_DESC); + listLogMetricsMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listLogMetricsMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + getLogMetricMethod = new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_GET_LOG_METRIC); + getLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + getLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + createLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_CREATE_LOG_METRIC); + createLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + createLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + updateLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_UPDATE_LOG_METRIC); + updateLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + updateLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + deleteLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_DELETE_LOG_METRIC); + deleteLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + listLogMetricsMethod, + getLogMetricMethod, + createLogMetricMethod, + updateLogMetricMethod, + deleteLogMetricMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of MetricsServiceV2Settings with default settings. + * + * + * + */ + public static MetricsServiceV2Settings create() { + MetricsServiceV2Settings settings = new MetricsServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of MetricsServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected MetricsServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listLogMetrics. + * + * + * + */ + public PageStreamingApiCallableBuilder + listLogMetricsMethod() { + return methods.listLogMetricsMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method getLogMetric. + * + * + * + */ + public ApiCallableBuilder getLogMetricMethod() { + return methods.getLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method createLogMetric. + * + * + * + */ + public ApiCallableBuilder createLogMetricMethod() { + return methods.createLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method updateLogMetric. + * + * + * + */ + public ApiCallableBuilder updateLogMetricMethod() { + return methods.updateLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteLogMetric. + * + * + * + */ + public ApiCallableBuilder deleteLogMetricMethod() { + return methods.deleteLogMetricMethod; + } + + private static PageDescriptor + LIST_LOG_METRICS_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListLogMetricsRequest injectToken(ListLogMetricsRequest payload, Object token) { + return ListLogMetricsRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListLogMetricsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListLogMetricsResponse payload) { + return payload.getMetricsList(); + } + }; +} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequest.java deleted file mode 100644 index fa61d3d297d4..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequest.java +++ /dev/null @@ -1,1478 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/type/http_request.proto - -package com.google.logging.type; - -/** - * Protobuf type {@code google.logging.type.HttpRequest} - * - *
- * A common proto for logging HTTP requests.
- * 
- */ -public final class HttpRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.type.HttpRequest) - HttpRequestOrBuilder { - // Use HttpRequest.newBuilder() to construct. - private HttpRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private HttpRequest() { - requestMethod_ = ""; - requestUrl_ = ""; - requestSize_ = 0L; - status_ = 0; - responseSize_ = 0L; - userAgent_ = ""; - remoteIp_ = ""; - referer_ = ""; - cacheHit_ = false; - validatedWithOriginServer_ = false; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private HttpRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - requestMethod_ = s; - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - requestUrl_ = s; - break; - } - case 24: { - - requestSize_ = input.readInt64(); - break; - } - case 32: { - - status_ = input.readInt32(); - break; - } - case 40: { - - responseSize_ = input.readInt64(); - break; - } - case 50: { - String s = input.readStringRequireUtf8(); - - userAgent_ = s; - break; - } - case 58: { - String s = input.readStringRequireUtf8(); - - remoteIp_ = s; - break; - } - case 66: { - String s = input.readStringRequireUtf8(); - - referer_ = s; - break; - } - case 72: { - - cacheHit_ = input.readBool(); - break; - } - case 80: { - - validatedWithOriginServer_ = input.readBool(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.type.HttpRequestProto.internal_static_google_logging_type_HttpRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.type.HttpRequestProto.internal_static_google_logging_type_HttpRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.type.HttpRequest.class, com.google.logging.type.HttpRequest.Builder.class); - } - - public static final int REQUEST_METHOD_FIELD_NUMBER = 1; - private volatile java.lang.Object requestMethod_; - /** - * optional string request_method = 1; - * - *
-   * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-   * 
- */ - public java.lang.String getRequestMethod() { - java.lang.Object ref = requestMethod_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - requestMethod_ = s; - return s; - } - } - /** - * optional string request_method = 1; - * - *
-   * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-   * 
- */ - public com.google.protobuf.ByteString - getRequestMethodBytes() { - java.lang.Object ref = requestMethod_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - requestMethod_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int REQUEST_URL_FIELD_NUMBER = 2; - private volatile java.lang.Object requestUrl_; - /** - * optional string request_url = 2; - * - *
-   * Contains the scheme (http|https), the host name, the path and the query
-   * portion of the URL that was requested.
-   * 
- */ - public java.lang.String getRequestUrl() { - java.lang.Object ref = requestUrl_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - requestUrl_ = s; - return s; - } - } - /** - * optional string request_url = 2; - * - *
-   * Contains the scheme (http|https), the host name, the path and the query
-   * portion of the URL that was requested.
-   * 
- */ - public com.google.protobuf.ByteString - getRequestUrlBytes() { - java.lang.Object ref = requestUrl_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - requestUrl_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int REQUEST_SIZE_FIELD_NUMBER = 3; - private long requestSize_; - /** - * optional int64 request_size = 3; - * - *
-   * Size of the HTTP request message in bytes, including request headers and
-   * the request body.
-   * 
- */ - public long getRequestSize() { - return requestSize_; - } - - public static final int STATUS_FIELD_NUMBER = 4; - private int status_; - /** - * optional int32 status = 4; - * - *
-   * A response code indicates the status of response, e.g., 200.
-   * 
- */ - public int getStatus() { - return status_; - } - - public static final int RESPONSE_SIZE_FIELD_NUMBER = 5; - private long responseSize_; - /** - * optional int64 response_size = 5; - * - *
-   * Size of the HTTP response message in bytes sent back to the client,
-   * including response headers and response body.
-   * 
- */ - public long getResponseSize() { - return responseSize_; - } - - public static final int USER_AGENT_FIELD_NUMBER = 6; - private volatile java.lang.Object userAgent_; - /** - * optional string user_agent = 6; - * - *
-   * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-   * Windows 98; Q312461; .NET CLR 1.0.3705)".
-   * 
- */ - public java.lang.String getUserAgent() { - java.lang.Object ref = userAgent_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - userAgent_ = s; - return s; - } - } - /** - * optional string user_agent = 6; - * - *
-   * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-   * Windows 98; Q312461; .NET CLR 1.0.3705)".
-   * 
- */ - public com.google.protobuf.ByteString - getUserAgentBytes() { - java.lang.Object ref = userAgent_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - userAgent_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int REMOTE_IP_FIELD_NUMBER = 7; - private volatile java.lang.Object remoteIp_; - /** - * optional string remote_ip = 7; - * - *
-   * IP address of the client who issues the HTTP request. Could be either IPv4
-   * or IPv6.
-   * 
- */ - public java.lang.String getRemoteIp() { - java.lang.Object ref = remoteIp_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - remoteIp_ = s; - return s; - } - } - /** - * optional string remote_ip = 7; - * - *
-   * IP address of the client who issues the HTTP request. Could be either IPv4
-   * or IPv6.
-   * 
- */ - public com.google.protobuf.ByteString - getRemoteIpBytes() { - java.lang.Object ref = remoteIp_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - remoteIp_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int REFERER_FIELD_NUMBER = 8; - private volatile java.lang.Object referer_; - /** - * optional string referer = 8; - * - *
-   * Referer (a.k.a. referrer) URL of request, as defined in
-   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-   * 
- */ - public java.lang.String getReferer() { - java.lang.Object ref = referer_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - referer_ = s; - return s; - } - } - /** - * optional string referer = 8; - * - *
-   * Referer (a.k.a. referrer) URL of request, as defined in
-   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-   * 
- */ - public com.google.protobuf.ByteString - getRefererBytes() { - java.lang.Object ref = referer_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - referer_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int CACHE_HIT_FIELD_NUMBER = 9; - private boolean cacheHit_; - /** - * optional bool cache_hit = 9; - * - *
-   * Whether or not an entity was served from cache
-   * (with or without validation).
-   * 
- */ - public boolean getCacheHit() { - return cacheHit_; - } - - public static final int VALIDATED_WITH_ORIGIN_SERVER_FIELD_NUMBER = 10; - private boolean validatedWithOriginServer_; - /** - * optional bool validated_with_origin_server = 10; - * - *
-   * Whether or not the response was validated with the origin server before
-   * being served from cache. This field is only meaningful if cache_hit is
-   * True.
-   * 
- */ - public boolean getValidatedWithOriginServer() { - return validatedWithOriginServer_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getRequestMethodBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, requestMethod_); - } - if (!getRequestUrlBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, requestUrl_); - } - if (requestSize_ != 0L) { - output.writeInt64(3, requestSize_); - } - if (status_ != 0) { - output.writeInt32(4, status_); - } - if (responseSize_ != 0L) { - output.writeInt64(5, responseSize_); - } - if (!getUserAgentBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 6, userAgent_); - } - if (!getRemoteIpBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 7, remoteIp_); - } - if (!getRefererBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 8, referer_); - } - if (cacheHit_ != false) { - output.writeBool(9, cacheHit_); - } - if (validatedWithOriginServer_ != false) { - output.writeBool(10, validatedWithOriginServer_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getRequestMethodBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, requestMethod_); - } - if (!getRequestUrlBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, requestUrl_); - } - if (requestSize_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(3, requestSize_); - } - if (status_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(4, status_); - } - if (responseSize_ != 0L) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(5, responseSize_); - } - if (!getUserAgentBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(6, userAgent_); - } - if (!getRemoteIpBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(7, remoteIp_); - } - if (!getRefererBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(8, referer_); - } - if (cacheHit_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(9, cacheHit_); - } - if (validatedWithOriginServer_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(10, validatedWithOriginServer_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.type.HttpRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.type.HttpRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.type.HttpRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.type.HttpRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.type.HttpRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.type.HttpRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.type.HttpRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.type.HttpRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.type.HttpRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.type.HttpRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.type.HttpRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.type.HttpRequest} - * - *
-   * A common proto for logging HTTP requests.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.type.HttpRequest) - com.google.logging.type.HttpRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.type.HttpRequestProto.internal_static_google_logging_type_HttpRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.type.HttpRequestProto.internal_static_google_logging_type_HttpRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.type.HttpRequest.class, com.google.logging.type.HttpRequest.Builder.class); - } - - // Construct using com.google.logging.type.HttpRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - requestMethod_ = ""; - - requestUrl_ = ""; - - requestSize_ = 0L; - - status_ = 0; - - responseSize_ = 0L; - - userAgent_ = ""; - - remoteIp_ = ""; - - referer_ = ""; - - cacheHit_ = false; - - validatedWithOriginServer_ = false; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.type.HttpRequestProto.internal_static_google_logging_type_HttpRequest_descriptor; - } - - public com.google.logging.type.HttpRequest getDefaultInstanceForType() { - return com.google.logging.type.HttpRequest.getDefaultInstance(); - } - - public com.google.logging.type.HttpRequest build() { - com.google.logging.type.HttpRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.type.HttpRequest buildPartial() { - com.google.logging.type.HttpRequest result = new com.google.logging.type.HttpRequest(this); - result.requestMethod_ = requestMethod_; - result.requestUrl_ = requestUrl_; - result.requestSize_ = requestSize_; - result.status_ = status_; - result.responseSize_ = responseSize_; - result.userAgent_ = userAgent_; - result.remoteIp_ = remoteIp_; - result.referer_ = referer_; - result.cacheHit_ = cacheHit_; - result.validatedWithOriginServer_ = validatedWithOriginServer_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.type.HttpRequest) { - return mergeFrom((com.google.logging.type.HttpRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.type.HttpRequest other) { - if (other == com.google.logging.type.HttpRequest.getDefaultInstance()) return this; - if (!other.getRequestMethod().isEmpty()) { - requestMethod_ = other.requestMethod_; - onChanged(); - } - if (!other.getRequestUrl().isEmpty()) { - requestUrl_ = other.requestUrl_; - onChanged(); - } - if (other.getRequestSize() != 0L) { - setRequestSize(other.getRequestSize()); - } - if (other.getStatus() != 0) { - setStatus(other.getStatus()); - } - if (other.getResponseSize() != 0L) { - setResponseSize(other.getResponseSize()); - } - if (!other.getUserAgent().isEmpty()) { - userAgent_ = other.userAgent_; - onChanged(); - } - if (!other.getRemoteIp().isEmpty()) { - remoteIp_ = other.remoteIp_; - onChanged(); - } - if (!other.getReferer().isEmpty()) { - referer_ = other.referer_; - onChanged(); - } - if (other.getCacheHit() != false) { - setCacheHit(other.getCacheHit()); - } - if (other.getValidatedWithOriginServer() != false) { - setValidatedWithOriginServer(other.getValidatedWithOriginServer()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.type.HttpRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.type.HttpRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object requestMethod_ = ""; - /** - * optional string request_method = 1; - * - *
-     * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-     * 
- */ - public java.lang.String getRequestMethod() { - java.lang.Object ref = requestMethod_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - requestMethod_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string request_method = 1; - * - *
-     * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-     * 
- */ - public com.google.protobuf.ByteString - getRequestMethodBytes() { - java.lang.Object ref = requestMethod_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - requestMethod_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string request_method = 1; - * - *
-     * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-     * 
- */ - public Builder setRequestMethod( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - requestMethod_ = value; - onChanged(); - return this; - } - /** - * optional string request_method = 1; - * - *
-     * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-     * 
- */ - public Builder clearRequestMethod() { - - requestMethod_ = getDefaultInstance().getRequestMethod(); - onChanged(); - return this; - } - /** - * optional string request_method = 1; - * - *
-     * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-     * 
- */ - public Builder setRequestMethodBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - requestMethod_ = value; - onChanged(); - return this; - } - - private java.lang.Object requestUrl_ = ""; - /** - * optional string request_url = 2; - * - *
-     * Contains the scheme (http|https), the host name, the path and the query
-     * portion of the URL that was requested.
-     * 
- */ - public java.lang.String getRequestUrl() { - java.lang.Object ref = requestUrl_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - requestUrl_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string request_url = 2; - * - *
-     * Contains the scheme (http|https), the host name, the path and the query
-     * portion of the URL that was requested.
-     * 
- */ - public com.google.protobuf.ByteString - getRequestUrlBytes() { - java.lang.Object ref = requestUrl_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - requestUrl_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string request_url = 2; - * - *
-     * Contains the scheme (http|https), the host name, the path and the query
-     * portion of the URL that was requested.
-     * 
- */ - public Builder setRequestUrl( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - requestUrl_ = value; - onChanged(); - return this; - } - /** - * optional string request_url = 2; - * - *
-     * Contains the scheme (http|https), the host name, the path and the query
-     * portion of the URL that was requested.
-     * 
- */ - public Builder clearRequestUrl() { - - requestUrl_ = getDefaultInstance().getRequestUrl(); - onChanged(); - return this; - } - /** - * optional string request_url = 2; - * - *
-     * Contains the scheme (http|https), the host name, the path and the query
-     * portion of the URL that was requested.
-     * 
- */ - public Builder setRequestUrlBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - requestUrl_ = value; - onChanged(); - return this; - } - - private long requestSize_ ; - /** - * optional int64 request_size = 3; - * - *
-     * Size of the HTTP request message in bytes, including request headers and
-     * the request body.
-     * 
- */ - public long getRequestSize() { - return requestSize_; - } - /** - * optional int64 request_size = 3; - * - *
-     * Size of the HTTP request message in bytes, including request headers and
-     * the request body.
-     * 
- */ - public Builder setRequestSize(long value) { - - requestSize_ = value; - onChanged(); - return this; - } - /** - * optional int64 request_size = 3; - * - *
-     * Size of the HTTP request message in bytes, including request headers and
-     * the request body.
-     * 
- */ - public Builder clearRequestSize() { - - requestSize_ = 0L; - onChanged(); - return this; - } - - private int status_ ; - /** - * optional int32 status = 4; - * - *
-     * A response code indicates the status of response, e.g., 200.
-     * 
- */ - public int getStatus() { - return status_; - } - /** - * optional int32 status = 4; - * - *
-     * A response code indicates the status of response, e.g., 200.
-     * 
- */ - public Builder setStatus(int value) { - - status_ = value; - onChanged(); - return this; - } - /** - * optional int32 status = 4; - * - *
-     * A response code indicates the status of response, e.g., 200.
-     * 
- */ - public Builder clearStatus() { - - status_ = 0; - onChanged(); - return this; - } - - private long responseSize_ ; - /** - * optional int64 response_size = 5; - * - *
-     * Size of the HTTP response message in bytes sent back to the client,
-     * including response headers and response body.
-     * 
- */ - public long getResponseSize() { - return responseSize_; - } - /** - * optional int64 response_size = 5; - * - *
-     * Size of the HTTP response message in bytes sent back to the client,
-     * including response headers and response body.
-     * 
- */ - public Builder setResponseSize(long value) { - - responseSize_ = value; - onChanged(); - return this; - } - /** - * optional int64 response_size = 5; - * - *
-     * Size of the HTTP response message in bytes sent back to the client,
-     * including response headers and response body.
-     * 
- */ - public Builder clearResponseSize() { - - responseSize_ = 0L; - onChanged(); - return this; - } - - private java.lang.Object userAgent_ = ""; - /** - * optional string user_agent = 6; - * - *
-     * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-     * Windows 98; Q312461; .NET CLR 1.0.3705)".
-     * 
- */ - public java.lang.String getUserAgent() { - java.lang.Object ref = userAgent_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - userAgent_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string user_agent = 6; - * - *
-     * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-     * Windows 98; Q312461; .NET CLR 1.0.3705)".
-     * 
- */ - public com.google.protobuf.ByteString - getUserAgentBytes() { - java.lang.Object ref = userAgent_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - userAgent_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string user_agent = 6; - * - *
-     * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-     * Windows 98; Q312461; .NET CLR 1.0.3705)".
-     * 
- */ - public Builder setUserAgent( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - userAgent_ = value; - onChanged(); - return this; - } - /** - * optional string user_agent = 6; - * - *
-     * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-     * Windows 98; Q312461; .NET CLR 1.0.3705)".
-     * 
- */ - public Builder clearUserAgent() { - - userAgent_ = getDefaultInstance().getUserAgent(); - onChanged(); - return this; - } - /** - * optional string user_agent = 6; - * - *
-     * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-     * Windows 98; Q312461; .NET CLR 1.0.3705)".
-     * 
- */ - public Builder setUserAgentBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - userAgent_ = value; - onChanged(); - return this; - } - - private java.lang.Object remoteIp_ = ""; - /** - * optional string remote_ip = 7; - * - *
-     * IP address of the client who issues the HTTP request. Could be either IPv4
-     * or IPv6.
-     * 
- */ - public java.lang.String getRemoteIp() { - java.lang.Object ref = remoteIp_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - remoteIp_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string remote_ip = 7; - * - *
-     * IP address of the client who issues the HTTP request. Could be either IPv4
-     * or IPv6.
-     * 
- */ - public com.google.protobuf.ByteString - getRemoteIpBytes() { - java.lang.Object ref = remoteIp_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - remoteIp_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string remote_ip = 7; - * - *
-     * IP address of the client who issues the HTTP request. Could be either IPv4
-     * or IPv6.
-     * 
- */ - public Builder setRemoteIp( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - remoteIp_ = value; - onChanged(); - return this; - } - /** - * optional string remote_ip = 7; - * - *
-     * IP address of the client who issues the HTTP request. Could be either IPv4
-     * or IPv6.
-     * 
- */ - public Builder clearRemoteIp() { - - remoteIp_ = getDefaultInstance().getRemoteIp(); - onChanged(); - return this; - } - /** - * optional string remote_ip = 7; - * - *
-     * IP address of the client who issues the HTTP request. Could be either IPv4
-     * or IPv6.
-     * 
- */ - public Builder setRemoteIpBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - remoteIp_ = value; - onChanged(); - return this; - } - - private java.lang.Object referer_ = ""; - /** - * optional string referer = 8; - * - *
-     * Referer (a.k.a. referrer) URL of request, as defined in
-     * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-     * 
- */ - public java.lang.String getReferer() { - java.lang.Object ref = referer_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - referer_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string referer = 8; - * - *
-     * Referer (a.k.a. referrer) URL of request, as defined in
-     * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-     * 
- */ - public com.google.protobuf.ByteString - getRefererBytes() { - java.lang.Object ref = referer_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - referer_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string referer = 8; - * - *
-     * Referer (a.k.a. referrer) URL of request, as defined in
-     * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-     * 
- */ - public Builder setReferer( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - referer_ = value; - onChanged(); - return this; - } - /** - * optional string referer = 8; - * - *
-     * Referer (a.k.a. referrer) URL of request, as defined in
-     * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-     * 
- */ - public Builder clearReferer() { - - referer_ = getDefaultInstance().getReferer(); - onChanged(); - return this; - } - /** - * optional string referer = 8; - * - *
-     * Referer (a.k.a. referrer) URL of request, as defined in
-     * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-     * 
- */ - public Builder setRefererBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - referer_ = value; - onChanged(); - return this; - } - - private boolean cacheHit_ ; - /** - * optional bool cache_hit = 9; - * - *
-     * Whether or not an entity was served from cache
-     * (with or without validation).
-     * 
- */ - public boolean getCacheHit() { - return cacheHit_; - } - /** - * optional bool cache_hit = 9; - * - *
-     * Whether or not an entity was served from cache
-     * (with or without validation).
-     * 
- */ - public Builder setCacheHit(boolean value) { - - cacheHit_ = value; - onChanged(); - return this; - } - /** - * optional bool cache_hit = 9; - * - *
-     * Whether or not an entity was served from cache
-     * (with or without validation).
-     * 
- */ - public Builder clearCacheHit() { - - cacheHit_ = false; - onChanged(); - return this; - } - - private boolean validatedWithOriginServer_ ; - /** - * optional bool validated_with_origin_server = 10; - * - *
-     * Whether or not the response was validated with the origin server before
-     * being served from cache. This field is only meaningful if cache_hit is
-     * True.
-     * 
- */ - public boolean getValidatedWithOriginServer() { - return validatedWithOriginServer_; - } - /** - * optional bool validated_with_origin_server = 10; - * - *
-     * Whether or not the response was validated with the origin server before
-     * being served from cache. This field is only meaningful if cache_hit is
-     * True.
-     * 
- */ - public Builder setValidatedWithOriginServer(boolean value) { - - validatedWithOriginServer_ = value; - onChanged(); - return this; - } - /** - * optional bool validated_with_origin_server = 10; - * - *
-     * Whether or not the response was validated with the origin server before
-     * being served from cache. This field is only meaningful if cache_hit is
-     * True.
-     * 
- */ - public Builder clearValidatedWithOriginServer() { - - validatedWithOriginServer_ = false; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.type.HttpRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.type.HttpRequest) - private static final com.google.logging.type.HttpRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.type.HttpRequest(); - } - - public static com.google.logging.type.HttpRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public HttpRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new HttpRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.type.HttpRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestOrBuilder.java deleted file mode 100644 index c89c1863f697..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestOrBuilder.java +++ /dev/null @@ -1,157 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/type/http_request.proto - -package com.google.logging.type; - -public interface HttpRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.type.HttpRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string request_method = 1; - * - *
-   * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-   * 
- */ - java.lang.String getRequestMethod(); - /** - * optional string request_method = 1; - * - *
-   * Request method, such as `GET`, `HEAD`, `PUT` or `POST`.
-   * 
- */ - com.google.protobuf.ByteString - getRequestMethodBytes(); - - /** - * optional string request_url = 2; - * - *
-   * Contains the scheme (http|https), the host name, the path and the query
-   * portion of the URL that was requested.
-   * 
- */ - java.lang.String getRequestUrl(); - /** - * optional string request_url = 2; - * - *
-   * Contains the scheme (http|https), the host name, the path and the query
-   * portion of the URL that was requested.
-   * 
- */ - com.google.protobuf.ByteString - getRequestUrlBytes(); - - /** - * optional int64 request_size = 3; - * - *
-   * Size of the HTTP request message in bytes, including request headers and
-   * the request body.
-   * 
- */ - long getRequestSize(); - - /** - * optional int32 status = 4; - * - *
-   * A response code indicates the status of response, e.g., 200.
-   * 
- */ - int getStatus(); - - /** - * optional int64 response_size = 5; - * - *
-   * Size of the HTTP response message in bytes sent back to the client,
-   * including response headers and response body.
-   * 
- */ - long getResponseSize(); - - /** - * optional string user_agent = 6; - * - *
-   * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-   * Windows 98; Q312461; .NET CLR 1.0.3705)".
-   * 
- */ - java.lang.String getUserAgent(); - /** - * optional string user_agent = 6; - * - *
-   * User agent sent by the client, e.g., "Mozilla/4.0 (compatible; MSIE 6.0;
-   * Windows 98; Q312461; .NET CLR 1.0.3705)".
-   * 
- */ - com.google.protobuf.ByteString - getUserAgentBytes(); - - /** - * optional string remote_ip = 7; - * - *
-   * IP address of the client who issues the HTTP request. Could be either IPv4
-   * or IPv6.
-   * 
- */ - java.lang.String getRemoteIp(); - /** - * optional string remote_ip = 7; - * - *
-   * IP address of the client who issues the HTTP request. Could be either IPv4
-   * or IPv6.
-   * 
- */ - com.google.protobuf.ByteString - getRemoteIpBytes(); - - /** - * optional string referer = 8; - * - *
-   * Referer (a.k.a. referrer) URL of request, as defined in
-   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-   * 
- */ - java.lang.String getReferer(); - /** - * optional string referer = 8; - * - *
-   * Referer (a.k.a. referrer) URL of request, as defined in
-   * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html.
-   * 
- */ - com.google.protobuf.ByteString - getRefererBytes(); - - /** - * optional bool cache_hit = 9; - * - *
-   * Whether or not an entity was served from cache
-   * (with or without validation).
-   * 
- */ - boolean getCacheHit(); - - /** - * optional bool validated_with_origin_server = 10; - * - *
-   * Whether or not the response was validated with the origin server before
-   * being served from cache. This field is only meaningful if cache_hit is
-   * True.
-   * 
- */ - boolean getValidatedWithOriginServer(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestProto.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestProto.java deleted file mode 100644 index 8c21f4a9c355..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/HttpRequestProto.java +++ /dev/null @@ -1,56 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/type/http_request.proto - -package com.google.logging.type; - -public final class HttpRequestProto { - private HttpRequestProto() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_type_HttpRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_type_HttpRequest_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n&google/logging/type/http_request.proto" + - "\022\023google.logging.type\"\350\001\n\013HttpRequest\022\026\n" + - "\016request_method\030\001 \001(\t\022\023\n\013request_url\030\002 \001" + - "(\t\022\024\n\014request_size\030\003 \001(\003\022\016\n\006status\030\004 \001(\005" + - "\022\025\n\rresponse_size\030\005 \001(\003\022\022\n\nuser_agent\030\006 " + - "\001(\t\022\021\n\tremote_ip\030\007 \001(\t\022\017\n\007referer\030\010 \001(\t\022" + - "\021\n\tcache_hit\030\t \001(\010\022$\n\034validated_with_ori" + - "gin_server\030\n \001(\010B-\n\027com.google.logging.t" + - "ypeB\020HttpRequestProtoP\001b\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - internal_static_google_logging_type_HttpRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_logging_type_HttpRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_type_HttpRequest_descriptor, - new java.lang.String[] { "RequestMethod", "RequestUrl", "RequestSize", "Status", "ResponseSize", "UserAgent", "RemoteIp", "Referer", "CacheHit", "ValidatedWithOriginServer", }); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverity.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverity.java deleted file mode 100644 index bfb7f0635dad..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverity.java +++ /dev/null @@ -1,245 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/type/log_severity.proto - -package com.google.logging.type; - -/** - * Protobuf enum {@code google.logging.type.LogSeverity} - * - *
- * The severity of the event described in a log entry.  These guideline severity
- * levels are ordered, with numerically smaller levels treated as less severe
- * than numerically larger levels. If the source of the log entries uses a
- * different set of severity levels, the client should select the closest
- * corresponding `LogSeverity` value. For example, Java's FINE, FINER, and
- * FINEST levels might all map to `LogSeverity.DEBUG`. If the original severity
- * code must be preserved, it can be stored in the payload.
- * 
- */ -public enum LogSeverity - implements com.google.protobuf.ProtocolMessageEnum { - /** - * DEFAULT = 0; - * - *
-   * The log entry has no assigned severity level.
-   * 
- */ - DEFAULT(0, 0), - /** - * DEBUG = 100; - * - *
-   * Debug or trace information.
-   * 
- */ - DEBUG(1, 100), - /** - * INFO = 200; - * - *
-   * Routine information, such as ongoing status or performance.
-   * 
- */ - INFO(2, 200), - /** - * NOTICE = 300; - * - *
-   * Normal but significant events, such as start up, shut down, or
-   * configuration.
-   * 
- */ - NOTICE(3, 300), - /** - * WARNING = 400; - * - *
-   * Warning events might cause problems.
-   * 
- */ - WARNING(4, 400), - /** - * ERROR = 500; - * - *
-   * Error events are likely to cause problems.
-   * 
- */ - ERROR(5, 500), - /** - * CRITICAL = 600; - * - *
-   * Critical events cause more severe problems or brief outages.
-   * 
- */ - CRITICAL(6, 600), - /** - * ALERT = 700; - * - *
-   * A person must take an action immediately.
-   * 
- */ - ALERT(7, 700), - /** - * EMERGENCY = 800; - * - *
-   * One or more systems are unusable.
-   * 
- */ - EMERGENCY(8, 800), - UNRECOGNIZED(-1, -1), - ; - - /** - * DEFAULT = 0; - * - *
-   * The log entry has no assigned severity level.
-   * 
- */ - public static final int DEFAULT_VALUE = 0; - /** - * DEBUG = 100; - * - *
-   * Debug or trace information.
-   * 
- */ - public static final int DEBUG_VALUE = 100; - /** - * INFO = 200; - * - *
-   * Routine information, such as ongoing status or performance.
-   * 
- */ - public static final int INFO_VALUE = 200; - /** - * NOTICE = 300; - * - *
-   * Normal but significant events, such as start up, shut down, or
-   * configuration.
-   * 
- */ - public static final int NOTICE_VALUE = 300; - /** - * WARNING = 400; - * - *
-   * Warning events might cause problems.
-   * 
- */ - public static final int WARNING_VALUE = 400; - /** - * ERROR = 500; - * - *
-   * Error events are likely to cause problems.
-   * 
- */ - public static final int ERROR_VALUE = 500; - /** - * CRITICAL = 600; - * - *
-   * Critical events cause more severe problems or brief outages.
-   * 
- */ - public static final int CRITICAL_VALUE = 600; - /** - * ALERT = 700; - * - *
-   * A person must take an action immediately.
-   * 
- */ - public static final int ALERT_VALUE = 700; - /** - * EMERGENCY = 800; - * - *
-   * One or more systems are unusable.
-   * 
- */ - public static final int EMERGENCY_VALUE = 800; - - - public final int getNumber() { - if (index == -1) { - throw new java.lang.IllegalArgumentException( - "Can't get the number of an unknown enum value."); - } - return value; - } - - public static LogSeverity valueOf(int value) { - switch (value) { - case 0: return DEFAULT; - case 100: return DEBUG; - case 200: return INFO; - case 300: return NOTICE; - case 400: return WARNING; - case 500: return ERROR; - case 600: return CRITICAL; - case 700: return ALERT; - case 800: return EMERGENCY; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static final com.google.protobuf.Internal.EnumLiteMap< - LogSeverity> internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public LogSeverity findValueByNumber(int number) { - return LogSeverity.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return com.google.logging.type.LogSeverityProto.getDescriptor() - .getEnumTypes().get(0); - } - - private static final LogSeverity[] VALUES = values(); - - public static LogSeverity valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - if (desc.getIndex() == -1) { - return UNRECOGNIZED; - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private LogSeverity(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:google.logging.type.LogSeverity) -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverityProto.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverityProto.java deleted file mode 100644 index aaad0508e345..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/type/LogSeverityProto.java +++ /dev/null @@ -1,43 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/type/log_severity.proto - -package com.google.logging.type; - -public final class LogSeverityProto { - private LogSeverityProto() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n&google/logging/type/log_severity.proto" + - "\022\023google.logging.type*\202\001\n\013LogSeverity\022\013\n" + - "\007DEFAULT\020\000\022\t\n\005DEBUG\020d\022\t\n\004INFO\020\310\001\022\013\n\006NOTI" + - "CE\020\254\002\022\014\n\007WARNING\020\220\003\022\n\n\005ERROR\020\364\003\022\r\n\010CRITI" + - "CAL\020\330\004\022\n\n\005ALERT\020\274\005\022\016\n\tEMERGENCY\020\240\006B-\n\027co" + - "m.google.logging.typeB\020LogSeverityProtoP" + - "\001b\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ConfigServiceV2Grpc.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ConfigServiceV2Grpc.java deleted file mode 100644 index 908932183558..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ConfigServiceV2Grpc.java +++ /dev/null @@ -1,356 +0,0 @@ -package com.google.logging.v2; - -import static io.grpc.stub.ClientCalls.asyncUnaryCall; -import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; -import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; -import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; -import static io.grpc.stub.ClientCalls.blockingUnaryCall; -import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; -import static io.grpc.stub.ClientCalls.futureUnaryCall; -import static io.grpc.MethodDescriptor.generateFullMethodName; -import static io.grpc.stub.ServerCalls.asyncUnaryCall; -import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; -import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; -import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; - -@javax.annotation.Generated("by gRPC proto compiler") -public class ConfigServiceV2Grpc { - - private ConfigServiceV2Grpc() {} - - public static final String SERVICE_NAME = "google.logging.v2.ConfigServiceV2"; - - // Static method descriptors that strictly reflect the proto. - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_LIST_SINKS = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.ConfigServiceV2", "ListSinks"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListSinksRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListSinksResponse.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_GET_SINK = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.ConfigServiceV2", "GetSink"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.GetSinkRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogSink.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_CREATE_SINK = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.ConfigServiceV2", "CreateSink"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.CreateSinkRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogSink.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_UPDATE_SINK = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.ConfigServiceV2", "UpdateSink"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.UpdateSinkRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogSink.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_DELETE_SINK = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.ConfigServiceV2", "DeleteSink"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.DeleteSinkRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.getDefaultInstance())); - - public static ConfigServiceV2Stub newStub(io.grpc.Channel channel) { - return new ConfigServiceV2Stub(channel); - } - - public static ConfigServiceV2BlockingStub newBlockingStub( - io.grpc.Channel channel) { - return new ConfigServiceV2BlockingStub(channel); - } - - public static ConfigServiceV2FutureStub newFutureStub( - io.grpc.Channel channel) { - return new ConfigServiceV2FutureStub(channel); - } - - public static interface ConfigServiceV2 { - - public void listSinks(com.google.logging.v2.ListSinksRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void getSink(com.google.logging.v2.GetSinkRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void createSink(com.google.logging.v2.CreateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void updateSink(com.google.logging.v2.UpdateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void deleteSink(com.google.logging.v2.DeleteSinkRequest request, - io.grpc.stub.StreamObserver responseObserver); - } - - public static interface ConfigServiceV2BlockingClient { - - public com.google.logging.v2.ListSinksResponse listSinks(com.google.logging.v2.ListSinksRequest request); - - public com.google.logging.v2.LogSink getSink(com.google.logging.v2.GetSinkRequest request); - - public com.google.logging.v2.LogSink createSink(com.google.logging.v2.CreateSinkRequest request); - - public com.google.logging.v2.LogSink updateSink(com.google.logging.v2.UpdateSinkRequest request); - - public com.google.protobuf.Empty deleteSink(com.google.logging.v2.DeleteSinkRequest request); - } - - public static interface ConfigServiceV2FutureClient { - - public com.google.common.util.concurrent.ListenableFuture listSinks( - com.google.logging.v2.ListSinksRequest request); - - public com.google.common.util.concurrent.ListenableFuture getSink( - com.google.logging.v2.GetSinkRequest request); - - public com.google.common.util.concurrent.ListenableFuture createSink( - com.google.logging.v2.CreateSinkRequest request); - - public com.google.common.util.concurrent.ListenableFuture updateSink( - com.google.logging.v2.UpdateSinkRequest request); - - public com.google.common.util.concurrent.ListenableFuture deleteSink( - com.google.logging.v2.DeleteSinkRequest request); - } - - public static class ConfigServiceV2Stub extends io.grpc.stub.AbstractStub - implements ConfigServiceV2 { - private ConfigServiceV2Stub(io.grpc.Channel channel) { - super(channel); - } - - private ConfigServiceV2Stub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected ConfigServiceV2Stub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new ConfigServiceV2Stub(channel, callOptions); - } - - @java.lang.Override - public void listSinks(com.google.logging.v2.ListSinksRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_LIST_SINKS, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void getSink(com.google.logging.v2.GetSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_GET_SINK, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void createSink(com.google.logging.v2.CreateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_CREATE_SINK, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void updateSink(com.google.logging.v2.UpdateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_UPDATE_SINK, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void deleteSink(com.google.logging.v2.DeleteSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_DELETE_SINK, getCallOptions()), request, responseObserver); - } - } - - public static class ConfigServiceV2BlockingStub extends io.grpc.stub.AbstractStub - implements ConfigServiceV2BlockingClient { - private ConfigServiceV2BlockingStub(io.grpc.Channel channel) { - super(channel); - } - - private ConfigServiceV2BlockingStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected ConfigServiceV2BlockingStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new ConfigServiceV2BlockingStub(channel, callOptions); - } - - @java.lang.Override - public com.google.logging.v2.ListSinksResponse listSinks(com.google.logging.v2.ListSinksRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_LIST_SINKS, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogSink getSink(com.google.logging.v2.GetSinkRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_GET_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogSink createSink(com.google.logging.v2.CreateSinkRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_CREATE_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogSink updateSink(com.google.logging.v2.UpdateSinkRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_UPDATE_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.protobuf.Empty deleteSink(com.google.logging.v2.DeleteSinkRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_DELETE_SINK, getCallOptions()), request); - } - } - - public static class ConfigServiceV2FutureStub extends io.grpc.stub.AbstractStub - implements ConfigServiceV2FutureClient { - private ConfigServiceV2FutureStub(io.grpc.Channel channel) { - super(channel); - } - - private ConfigServiceV2FutureStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected ConfigServiceV2FutureStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new ConfigServiceV2FutureStub(channel, callOptions); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture listSinks( - com.google.logging.v2.ListSinksRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_LIST_SINKS, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture getSink( - com.google.logging.v2.GetSinkRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_GET_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture createSink( - com.google.logging.v2.CreateSinkRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_CREATE_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture updateSink( - com.google.logging.v2.UpdateSinkRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_UPDATE_SINK, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture deleteSink( - com.google.logging.v2.DeleteSinkRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_DELETE_SINK, getCallOptions()), request); - } - } - - public static io.grpc.ServerServiceDefinition bindService( - final ConfigServiceV2 serviceImpl) { - return io.grpc.ServerServiceDefinition.builder(SERVICE_NAME) - .addMethod( - METHOD_LIST_SINKS, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.ListSinksRequest, - com.google.logging.v2.ListSinksResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.ListSinksRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.listSinks(request, responseObserver); - } - })) - .addMethod( - METHOD_GET_SINK, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.GetSinkRequest, - com.google.logging.v2.LogSink>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.GetSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.getSink(request, responseObserver); - } - })) - .addMethod( - METHOD_CREATE_SINK, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.CreateSinkRequest, - com.google.logging.v2.LogSink>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.CreateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.createSink(request, responseObserver); - } - })) - .addMethod( - METHOD_UPDATE_SINK, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.UpdateSinkRequest, - com.google.logging.v2.LogSink>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.UpdateSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.updateSink(request, responseObserver); - } - })) - .addMethod( - METHOD_DELETE_SINK, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.DeleteSinkRequest, - com.google.protobuf.Empty>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.DeleteSinkRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.deleteSink(request, responseObserver); - } - })).build(); - } -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequest.java deleted file mode 100644 index aa5238715d12..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequest.java +++ /dev/null @@ -1,722 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.CreateLogMetricRequest} - * - *
- * The parameters to CreateLogMetric.
- * 
- */ -public final class CreateLogMetricRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.CreateLogMetricRequest) - CreateLogMetricRequestOrBuilder { - // Use CreateLogMetricRequest.newBuilder() to construct. - private CreateLogMetricRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private CreateLogMetricRequest() { - projectName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private CreateLogMetricRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - projectName_ = s; - break; - } - case 18: { - com.google.logging.v2.LogMetric.Builder subBuilder = null; - if (metric_ != null) { - subBuilder = metric_.toBuilder(); - } - metric_ = input.readMessage(com.google.logging.v2.LogMetric.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(metric_); - metric_ = subBuilder.buildPartial(); - } - - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_CreateLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_CreateLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.CreateLogMetricRequest.class, com.google.logging.v2.CreateLogMetricRequest.Builder.class); - } - - public static final int PROJECT_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object projectName_; - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the metric.
-   * Example: `"projects/my-project-id"`.
-   * The new metric must be provided in the request.
-   * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } - } - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the metric.
-   * Example: `"projects/my-project-id"`.
-   * The new metric must be provided in the request.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int METRIC_FIELD_NUMBER = 2; - private com.google.logging.v2.LogMetric metric_; - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - public boolean hasMetric() { - return metric_ != null; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - public com.google.logging.v2.LogMetric getMetric() { - return metric_ == null ? com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder() { - return getMetric(); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getProjectNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectName_); - } - if (metric_ != null) { - output.writeMessage(2, getMetric()); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getProjectNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, projectName_); - } - if (metric_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getMetric()); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.CreateLogMetricRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.CreateLogMetricRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.CreateLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.CreateLogMetricRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.CreateLogMetricRequest} - * - *
-   * The parameters to CreateLogMetric.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.CreateLogMetricRequest) - com.google.logging.v2.CreateLogMetricRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_CreateLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_CreateLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.CreateLogMetricRequest.class, com.google.logging.v2.CreateLogMetricRequest.Builder.class); - } - - // Construct using com.google.logging.v2.CreateLogMetricRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectName_ = ""; - - if (metricBuilder_ == null) { - metric_ = null; - } else { - metric_ = null; - metricBuilder_ = null; - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_CreateLogMetricRequest_descriptor; - } - - public com.google.logging.v2.CreateLogMetricRequest getDefaultInstanceForType() { - return com.google.logging.v2.CreateLogMetricRequest.getDefaultInstance(); - } - - public com.google.logging.v2.CreateLogMetricRequest build() { - com.google.logging.v2.CreateLogMetricRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.CreateLogMetricRequest buildPartial() { - com.google.logging.v2.CreateLogMetricRequest result = new com.google.logging.v2.CreateLogMetricRequest(this); - result.projectName_ = projectName_; - if (metricBuilder_ == null) { - result.metric_ = metric_; - } else { - result.metric_ = metricBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.CreateLogMetricRequest) { - return mergeFrom((com.google.logging.v2.CreateLogMetricRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.CreateLogMetricRequest other) { - if (other == com.google.logging.v2.CreateLogMetricRequest.getDefaultInstance()) return this; - if (!other.getProjectName().isEmpty()) { - projectName_ = other.projectName_; - onChanged(); - } - if (other.hasMetric()) { - mergeMetric(other.getMetric()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.CreateLogMetricRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.CreateLogMetricRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object projectName_ = ""; - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the metric.
-     * Example: `"projects/my-project-id"`.
-     * The new metric must be provided in the request.
-     * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the metric.
-     * Example: `"projects/my-project-id"`.
-     * The new metric must be provided in the request.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the metric.
-     * Example: `"projects/my-project-id"`.
-     * The new metric must be provided in the request.
-     * 
- */ - public Builder setProjectName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - projectName_ = value; - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the metric.
-     * Example: `"projects/my-project-id"`.
-     * The new metric must be provided in the request.
-     * 
- */ - public Builder clearProjectName() { - - projectName_ = getDefaultInstance().getProjectName(); - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the metric.
-     * Example: `"projects/my-project-id"`.
-     * The new metric must be provided in the request.
-     * 
- */ - public Builder setProjectNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - projectName_ = value; - onChanged(); - return this; - } - - private com.google.logging.v2.LogMetric metric_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> metricBuilder_; - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public boolean hasMetric() { - return metricBuilder_ != null || metric_ != null; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public com.google.logging.v2.LogMetric getMetric() { - if (metricBuilder_ == null) { - return metric_ == null ? com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } else { - return metricBuilder_.getMessage(); - } - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public Builder setMetric(com.google.logging.v2.LogMetric value) { - if (metricBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - metric_ = value; - onChanged(); - } else { - metricBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public Builder setMetric( - com.google.logging.v2.LogMetric.Builder builderForValue) { - if (metricBuilder_ == null) { - metric_ = builderForValue.build(); - onChanged(); - } else { - metricBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public Builder mergeMetric(com.google.logging.v2.LogMetric value) { - if (metricBuilder_ == null) { - if (metric_ != null) { - metric_ = - com.google.logging.v2.LogMetric.newBuilder(metric_).mergeFrom(value).buildPartial(); - } else { - metric_ = value; - } - onChanged(); - } else { - metricBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public Builder clearMetric() { - if (metricBuilder_ == null) { - metric_ = null; - onChanged(); - } else { - metric_ = null; - metricBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public com.google.logging.v2.LogMetric.Builder getMetricBuilder() { - - onChanged(); - return getMetricFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder() { - if (metricBuilder_ != null) { - return metricBuilder_.getMessageOrBuilder(); - } else { - return metric_ == null ? - com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The new logs-based metric, which must not have an identifier that
-     * already exists.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> - getMetricFieldBuilder() { - if (metricBuilder_ == null) { - metricBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder>( - getMetric(), - getParentForChildren(), - isClean()); - metric_ = null; - } - return metricBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.CreateLogMetricRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - private static final com.google.logging.v2.CreateLogMetricRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.CreateLogMetricRequest(); - } - - public static com.google.logging.v2.CreateLogMetricRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public CreateLogMetricRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new CreateLogMetricRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.CreateLogMetricRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequestOrBuilder.java deleted file mode 100644 index b0c0d6103503..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateLogMetricRequestOrBuilder.java +++ /dev/null @@ -1,59 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface CreateLogMetricRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.CreateLogMetricRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the metric.
-   * Example: `"projects/my-project-id"`.
-   * The new metric must be provided in the request.
-   * 
- */ - java.lang.String getProjectName(); - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the metric.
-   * Example: `"projects/my-project-id"`.
-   * The new metric must be provided in the request.
-   * 
- */ - com.google.protobuf.ByteString - getProjectNameBytes(); - - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - boolean hasMetric(); - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - com.google.logging.v2.LogMetric getMetric(); - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The new logs-based metric, which must not have an identifier that
-   * already exists.
-   * 
- */ - com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequest.java deleted file mode 100644 index 3eeb01d2435c..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequest.java +++ /dev/null @@ -1,722 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.CreateSinkRequest} - * - *
- * The parameters to `CreateSink`.
- * 
- */ -public final class CreateSinkRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.CreateSinkRequest) - CreateSinkRequestOrBuilder { - // Use CreateSinkRequest.newBuilder() to construct. - private CreateSinkRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private CreateSinkRequest() { - projectName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private CreateSinkRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - projectName_ = s; - break; - } - case 18: { - com.google.logging.v2.LogSink.Builder subBuilder = null; - if (sink_ != null) { - subBuilder = sink_.toBuilder(); - } - sink_ = input.readMessage(com.google.logging.v2.LogSink.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(sink_); - sink_ = subBuilder.buildPartial(); - } - - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_CreateSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_CreateSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.CreateSinkRequest.class, com.google.logging.v2.CreateSinkRequest.Builder.class); - } - - public static final int PROJECT_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object projectName_; - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the sink.
-   * Example: `"projects/my-project-id"`.
-   * The new sink must be provided in the request.
-   * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } - } - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the sink.
-   * Example: `"projects/my-project-id"`.
-   * The new sink must be provided in the request.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int SINK_FIELD_NUMBER = 2; - private com.google.logging.v2.LogSink sink_; - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - public boolean hasSink() { - return sink_ != null; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - public com.google.logging.v2.LogSink getSink() { - return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() { - return getSink(); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getProjectNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectName_); - } - if (sink_ != null) { - output.writeMessage(2, getSink()); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getProjectNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, projectName_); - } - if (sink_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getSink()); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.CreateSinkRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.CreateSinkRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.CreateSinkRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.CreateSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.CreateSinkRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.CreateSinkRequest} - * - *
-   * The parameters to `CreateSink`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.CreateSinkRequest) - com.google.logging.v2.CreateSinkRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_CreateSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_CreateSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.CreateSinkRequest.class, com.google.logging.v2.CreateSinkRequest.Builder.class); - } - - // Construct using com.google.logging.v2.CreateSinkRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectName_ = ""; - - if (sinkBuilder_ == null) { - sink_ = null; - } else { - sink_ = null; - sinkBuilder_ = null; - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_CreateSinkRequest_descriptor; - } - - public com.google.logging.v2.CreateSinkRequest getDefaultInstanceForType() { - return com.google.logging.v2.CreateSinkRequest.getDefaultInstance(); - } - - public com.google.logging.v2.CreateSinkRequest build() { - com.google.logging.v2.CreateSinkRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.CreateSinkRequest buildPartial() { - com.google.logging.v2.CreateSinkRequest result = new com.google.logging.v2.CreateSinkRequest(this); - result.projectName_ = projectName_; - if (sinkBuilder_ == null) { - result.sink_ = sink_; - } else { - result.sink_ = sinkBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.CreateSinkRequest) { - return mergeFrom((com.google.logging.v2.CreateSinkRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.CreateSinkRequest other) { - if (other == com.google.logging.v2.CreateSinkRequest.getDefaultInstance()) return this; - if (!other.getProjectName().isEmpty()) { - projectName_ = other.projectName_; - onChanged(); - } - if (other.hasSink()) { - mergeSink(other.getSink()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.CreateSinkRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.CreateSinkRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object projectName_ = ""; - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the sink.
-     * Example: `"projects/my-project-id"`.
-     * The new sink must be provided in the request.
-     * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the sink.
-     * Example: `"projects/my-project-id"`.
-     * The new sink must be provided in the request.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the sink.
-     * Example: `"projects/my-project-id"`.
-     * The new sink must be provided in the request.
-     * 
- */ - public Builder setProjectName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - projectName_ = value; - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the sink.
-     * Example: `"projects/my-project-id"`.
-     * The new sink must be provided in the request.
-     * 
- */ - public Builder clearProjectName() { - - projectName_ = getDefaultInstance().getProjectName(); - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * The resource name of the project in which to create the sink.
-     * Example: `"projects/my-project-id"`.
-     * The new sink must be provided in the request.
-     * 
- */ - public Builder setProjectNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - projectName_ = value; - onChanged(); - return this; - } - - private com.google.logging.v2.LogSink sink_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> sinkBuilder_; - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public boolean hasSink() { - return sinkBuilder_ != null || sink_ != null; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public com.google.logging.v2.LogSink getSink() { - if (sinkBuilder_ == null) { - return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } else { - return sinkBuilder_.getMessage(); - } - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public Builder setSink(com.google.logging.v2.LogSink value) { - if (sinkBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - sink_ = value; - onChanged(); - } else { - sinkBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public Builder setSink( - com.google.logging.v2.LogSink.Builder builderForValue) { - if (sinkBuilder_ == null) { - sink_ = builderForValue.build(); - onChanged(); - } else { - sinkBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public Builder mergeSink(com.google.logging.v2.LogSink value) { - if (sinkBuilder_ == null) { - if (sink_ != null) { - sink_ = - com.google.logging.v2.LogSink.newBuilder(sink_).mergeFrom(value).buildPartial(); - } else { - sink_ = value; - } - onChanged(); - } else { - sinkBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public Builder clearSink() { - if (sinkBuilder_ == null) { - sink_ = null; - onChanged(); - } else { - sink_ = null; - sinkBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public com.google.logging.v2.LogSink.Builder getSinkBuilder() { - - onChanged(); - return getSinkFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() { - if (sinkBuilder_ != null) { - return sinkBuilder_.getMessageOrBuilder(); - } else { - return sink_ == null ? - com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The new sink, which must not have an identifier that already
-     * exists.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> - getSinkFieldBuilder() { - if (sinkBuilder_ == null) { - sinkBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder>( - getSink(), - getParentForChildren(), - isClean()); - sink_ = null; - } - return sinkBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.CreateSinkRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - private static final com.google.logging.v2.CreateSinkRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.CreateSinkRequest(); - } - - public static com.google.logging.v2.CreateSinkRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public CreateSinkRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new CreateSinkRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.CreateSinkRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequestOrBuilder.java deleted file mode 100644 index 682d3ffb8b1e..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/CreateSinkRequestOrBuilder.java +++ /dev/null @@ -1,59 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface CreateSinkRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.CreateSinkRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the sink.
-   * Example: `"projects/my-project-id"`.
-   * The new sink must be provided in the request.
-   * 
- */ - java.lang.String getProjectName(); - /** - * optional string project_name = 1; - * - *
-   * The resource name of the project in which to create the sink.
-   * Example: `"projects/my-project-id"`.
-   * The new sink must be provided in the request.
-   * 
- */ - com.google.protobuf.ByteString - getProjectNameBytes(); - - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - boolean hasSink(); - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - com.google.logging.v2.LogSink getSink(); - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The new sink, which must not have an identifier that already
-   * exists.
-   * 
- */ - com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequest.java deleted file mode 100644 index a41bb1d27285..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequest.java +++ /dev/null @@ -1,483 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.DeleteLogMetricRequest} - * - *
- * The parameters to DeleteLogMetric.
- * 
- */ -public final class DeleteLogMetricRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.DeleteLogMetricRequest) - DeleteLogMetricRequestOrBuilder { - // Use DeleteLogMetricRequest.newBuilder() to construct. - private DeleteLogMetricRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private DeleteLogMetricRequest() { - metricName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private DeleteLogMetricRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - metricName_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_DeleteLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteLogMetricRequest.class, com.google.logging.v2.DeleteLogMetricRequest.Builder.class); - } - - public static final int METRIC_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object metricName_; - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to delete.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } - } - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to delete.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getMetricNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, metricName_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getMetricNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, metricName_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.DeleteLogMetricRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.DeleteLogMetricRequest} - * - *
-   * The parameters to DeleteLogMetric.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.DeleteLogMetricRequest) - com.google.logging.v2.DeleteLogMetricRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_DeleteLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteLogMetricRequest.class, com.google.logging.v2.DeleteLogMetricRequest.Builder.class); - } - - // Construct using com.google.logging.v2.DeleteLogMetricRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - metricName_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor; - } - - public com.google.logging.v2.DeleteLogMetricRequest getDefaultInstanceForType() { - return com.google.logging.v2.DeleteLogMetricRequest.getDefaultInstance(); - } - - public com.google.logging.v2.DeleteLogMetricRequest build() { - com.google.logging.v2.DeleteLogMetricRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.DeleteLogMetricRequest buildPartial() { - com.google.logging.v2.DeleteLogMetricRequest result = new com.google.logging.v2.DeleteLogMetricRequest(this); - result.metricName_ = metricName_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.DeleteLogMetricRequest) { - return mergeFrom((com.google.logging.v2.DeleteLogMetricRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.DeleteLogMetricRequest other) { - if (other == com.google.logging.v2.DeleteLogMetricRequest.getDefaultInstance()) return this; - if (!other.getMetricName().isEmpty()) { - metricName_ = other.metricName_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.DeleteLogMetricRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.DeleteLogMetricRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object metricName_ = ""; - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to delete.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to delete.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to delete.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder setMetricName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - metricName_ = value; - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to delete.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder clearMetricName() { - - metricName_ = getDefaultInstance().getMetricName(); - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to delete.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder setMetricNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - metricName_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.DeleteLogMetricRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - private static final com.google.logging.v2.DeleteLogMetricRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.DeleteLogMetricRequest(); - } - - public static com.google.logging.v2.DeleteLogMetricRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public DeleteLogMetricRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new DeleteLogMetricRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.DeleteLogMetricRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequestOrBuilder.java deleted file mode 100644 index 8a9f333d1d03..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogMetricRequestOrBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface DeleteLogMetricRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.DeleteLogMetricRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to delete.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - java.lang.String getMetricName(); - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to delete.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getMetricNameBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequest.java deleted file mode 100644 index b27fd4cbd02f..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequest.java +++ /dev/null @@ -1,483 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.DeleteLogRequest} - * - *
- * The parameters to DeleteLog.
- * 
- */ -public final class DeleteLogRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.DeleteLogRequest) - DeleteLogRequestOrBuilder { - // Use DeleteLogRequest.newBuilder() to construct. - private DeleteLogRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private DeleteLogRequest() { - logName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private DeleteLogRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - logName_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_DeleteLogRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_DeleteLogRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteLogRequest.class, com.google.logging.v2.DeleteLogRequest.Builder.class); - } - - public static final int LOG_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object logName_; - /** - * optional string log_name = 1; - * - *
-   * Required. The resource name of the log to delete.  Example:
-   * `"projects/my-project/logs/syslog"`.
-   * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } - } - /** - * optional string log_name = 1; - * - *
-   * Required. The resource name of the log to delete.  Example:
-   * `"projects/my-project/logs/syslog"`.
-   * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getLogNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, logName_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getLogNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, logName_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.DeleteLogRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.DeleteLogRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteLogRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.DeleteLogRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.DeleteLogRequest} - * - *
-   * The parameters to DeleteLog.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.DeleteLogRequest) - com.google.logging.v2.DeleteLogRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_DeleteLogRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_DeleteLogRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteLogRequest.class, com.google.logging.v2.DeleteLogRequest.Builder.class); - } - - // Construct using com.google.logging.v2.DeleteLogRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - logName_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_DeleteLogRequest_descriptor; - } - - public com.google.logging.v2.DeleteLogRequest getDefaultInstanceForType() { - return com.google.logging.v2.DeleteLogRequest.getDefaultInstance(); - } - - public com.google.logging.v2.DeleteLogRequest build() { - com.google.logging.v2.DeleteLogRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.DeleteLogRequest buildPartial() { - com.google.logging.v2.DeleteLogRequest result = new com.google.logging.v2.DeleteLogRequest(this); - result.logName_ = logName_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.DeleteLogRequest) { - return mergeFrom((com.google.logging.v2.DeleteLogRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.DeleteLogRequest other) { - if (other == com.google.logging.v2.DeleteLogRequest.getDefaultInstance()) return this; - if (!other.getLogName().isEmpty()) { - logName_ = other.logName_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.DeleteLogRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.DeleteLogRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object logName_ = ""; - /** - * optional string log_name = 1; - * - *
-     * Required. The resource name of the log to delete.  Example:
-     * `"projects/my-project/logs/syslog"`.
-     * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string log_name = 1; - * - *
-     * Required. The resource name of the log to delete.  Example:
-     * `"projects/my-project/logs/syslog"`.
-     * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string log_name = 1; - * - *
-     * Required. The resource name of the log to delete.  Example:
-     * `"projects/my-project/logs/syslog"`.
-     * 
- */ - public Builder setLogName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - logName_ = value; - onChanged(); - return this; - } - /** - * optional string log_name = 1; - * - *
-     * Required. The resource name of the log to delete.  Example:
-     * `"projects/my-project/logs/syslog"`.
-     * 
- */ - public Builder clearLogName() { - - logName_ = getDefaultInstance().getLogName(); - onChanged(); - return this; - } - /** - * optional string log_name = 1; - * - *
-     * Required. The resource name of the log to delete.  Example:
-     * `"projects/my-project/logs/syslog"`.
-     * 
- */ - public Builder setLogNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - logName_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.DeleteLogRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - private static final com.google.logging.v2.DeleteLogRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.DeleteLogRequest(); - } - - public static com.google.logging.v2.DeleteLogRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public DeleteLogRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new DeleteLogRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.DeleteLogRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequestOrBuilder.java deleted file mode 100644 index a647aa68759c..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteLogRequestOrBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface DeleteLogRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.DeleteLogRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string log_name = 1; - * - *
-   * Required. The resource name of the log to delete.  Example:
-   * `"projects/my-project/logs/syslog"`.
-   * 
- */ - java.lang.String getLogName(); - /** - * optional string log_name = 1; - * - *
-   * Required. The resource name of the log to delete.  Example:
-   * `"projects/my-project/logs/syslog"`.
-   * 
- */ - com.google.protobuf.ByteString - getLogNameBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequest.java deleted file mode 100644 index c8c100ac0ab9..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequest.java +++ /dev/null @@ -1,483 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.DeleteSinkRequest} - * - *
- * The parameters to `DeleteSink`.
- * 
- */ -public final class DeleteSinkRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.DeleteSinkRequest) - DeleteSinkRequestOrBuilder { - // Use DeleteSinkRequest.newBuilder() to construct. - private DeleteSinkRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private DeleteSinkRequest() { - sinkName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private DeleteSinkRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - sinkName_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_DeleteSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_DeleteSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteSinkRequest.class, com.google.logging.v2.DeleteSinkRequest.Builder.class); - } - - public static final int SINK_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object sinkName_; - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to delete.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } - } - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to delete.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getSinkNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, sinkName_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getSinkNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, sinkName_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteSinkRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.DeleteSinkRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.DeleteSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.DeleteSinkRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.DeleteSinkRequest} - * - *
-   * The parameters to `DeleteSink`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.DeleteSinkRequest) - com.google.logging.v2.DeleteSinkRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_DeleteSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_DeleteSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.DeleteSinkRequest.class, com.google.logging.v2.DeleteSinkRequest.Builder.class); - } - - // Construct using com.google.logging.v2.DeleteSinkRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - sinkName_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_DeleteSinkRequest_descriptor; - } - - public com.google.logging.v2.DeleteSinkRequest getDefaultInstanceForType() { - return com.google.logging.v2.DeleteSinkRequest.getDefaultInstance(); - } - - public com.google.logging.v2.DeleteSinkRequest build() { - com.google.logging.v2.DeleteSinkRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.DeleteSinkRequest buildPartial() { - com.google.logging.v2.DeleteSinkRequest result = new com.google.logging.v2.DeleteSinkRequest(this); - result.sinkName_ = sinkName_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.DeleteSinkRequest) { - return mergeFrom((com.google.logging.v2.DeleteSinkRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.DeleteSinkRequest other) { - if (other == com.google.logging.v2.DeleteSinkRequest.getDefaultInstance()) return this; - if (!other.getSinkName().isEmpty()) { - sinkName_ = other.sinkName_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.DeleteSinkRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.DeleteSinkRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object sinkName_ = ""; - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to delete.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to delete.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to delete.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder setSinkName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - sinkName_ = value; - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to delete.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder clearSinkName() { - - sinkName_ = getDefaultInstance().getSinkName(); - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to delete.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder setSinkNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - sinkName_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.DeleteSinkRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - private static final com.google.logging.v2.DeleteSinkRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.DeleteSinkRequest(); - } - - public static com.google.logging.v2.DeleteSinkRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public DeleteSinkRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new DeleteSinkRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.DeleteSinkRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequestOrBuilder.java deleted file mode 100644 index 48db090deeac..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/DeleteSinkRequestOrBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface DeleteSinkRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.DeleteSinkRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to delete.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - java.lang.String getSinkName(); - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to delete.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getSinkNameBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequest.java deleted file mode 100644 index 21f08c3011f3..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequest.java +++ /dev/null @@ -1,483 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.GetLogMetricRequest} - * - *
- * The parameters to GetLogMetric.
- * 
- */ -public final class GetLogMetricRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.GetLogMetricRequest) - GetLogMetricRequestOrBuilder { - // Use GetLogMetricRequest.newBuilder() to construct. - private GetLogMetricRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private GetLogMetricRequest() { - metricName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private GetLogMetricRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - metricName_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_GetLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_GetLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.GetLogMetricRequest.class, com.google.logging.v2.GetLogMetricRequest.Builder.class); - } - - public static final int METRIC_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object metricName_; - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the desired metric.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } - } - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the desired metric.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getMetricNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, metricName_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getMetricNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, metricName_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.GetLogMetricRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.GetLogMetricRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.GetLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.GetLogMetricRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.GetLogMetricRequest} - * - *
-   * The parameters to GetLogMetric.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.GetLogMetricRequest) - com.google.logging.v2.GetLogMetricRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_GetLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_GetLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.GetLogMetricRequest.class, com.google.logging.v2.GetLogMetricRequest.Builder.class); - } - - // Construct using com.google.logging.v2.GetLogMetricRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - metricName_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_GetLogMetricRequest_descriptor; - } - - public com.google.logging.v2.GetLogMetricRequest getDefaultInstanceForType() { - return com.google.logging.v2.GetLogMetricRequest.getDefaultInstance(); - } - - public com.google.logging.v2.GetLogMetricRequest build() { - com.google.logging.v2.GetLogMetricRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.GetLogMetricRequest buildPartial() { - com.google.logging.v2.GetLogMetricRequest result = new com.google.logging.v2.GetLogMetricRequest(this); - result.metricName_ = metricName_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.GetLogMetricRequest) { - return mergeFrom((com.google.logging.v2.GetLogMetricRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.GetLogMetricRequest other) { - if (other == com.google.logging.v2.GetLogMetricRequest.getDefaultInstance()) return this; - if (!other.getMetricName().isEmpty()) { - metricName_ = other.metricName_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.GetLogMetricRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.GetLogMetricRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object metricName_ = ""; - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the desired metric.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the desired metric.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the desired metric.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder setMetricName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - metricName_ = value; - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the desired metric.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder clearMetricName() { - - metricName_ = getDefaultInstance().getMetricName(); - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the desired metric.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * 
- */ - public Builder setMetricNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - metricName_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.GetLogMetricRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - private static final com.google.logging.v2.GetLogMetricRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.GetLogMetricRequest(); - } - - public static com.google.logging.v2.GetLogMetricRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public GetLogMetricRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new GetLogMetricRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.GetLogMetricRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequestOrBuilder.java deleted file mode 100644 index 4c4a07d8919a..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetLogMetricRequestOrBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface GetLogMetricRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.GetLogMetricRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the desired metric.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - java.lang.String getMetricName(); - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the desired metric.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getMetricNameBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequest.java deleted file mode 100644 index 18899f0f7898..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequest.java +++ /dev/null @@ -1,483 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.GetSinkRequest} - * - *
- * The parameters to `GetSink`.
- * 
- */ -public final class GetSinkRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.GetSinkRequest) - GetSinkRequestOrBuilder { - // Use GetSinkRequest.newBuilder() to construct. - private GetSinkRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private GetSinkRequest() { - sinkName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private GetSinkRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - sinkName_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_GetSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_GetSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.GetSinkRequest.class, com.google.logging.v2.GetSinkRequest.Builder.class); - } - - public static final int SINK_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object sinkName_; - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to return.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } - } - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to return.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getSinkNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, sinkName_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getSinkNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, sinkName_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.GetSinkRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.GetSinkRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.GetSinkRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.GetSinkRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.GetSinkRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.GetSinkRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.GetSinkRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.GetSinkRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.GetSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.GetSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.GetSinkRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.GetSinkRequest} - * - *
-   * The parameters to `GetSink`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.GetSinkRequest) - com.google.logging.v2.GetSinkRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_GetSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_GetSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.GetSinkRequest.class, com.google.logging.v2.GetSinkRequest.Builder.class); - } - - // Construct using com.google.logging.v2.GetSinkRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - sinkName_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_GetSinkRequest_descriptor; - } - - public com.google.logging.v2.GetSinkRequest getDefaultInstanceForType() { - return com.google.logging.v2.GetSinkRequest.getDefaultInstance(); - } - - public com.google.logging.v2.GetSinkRequest build() { - com.google.logging.v2.GetSinkRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.GetSinkRequest buildPartial() { - com.google.logging.v2.GetSinkRequest result = new com.google.logging.v2.GetSinkRequest(this); - result.sinkName_ = sinkName_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.GetSinkRequest) { - return mergeFrom((com.google.logging.v2.GetSinkRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.GetSinkRequest other) { - if (other == com.google.logging.v2.GetSinkRequest.getDefaultInstance()) return this; - if (!other.getSinkName().isEmpty()) { - sinkName_ = other.sinkName_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.GetSinkRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.GetSinkRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object sinkName_ = ""; - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to return.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to return.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to return.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder setSinkName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - sinkName_ = value; - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to return.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder clearSinkName() { - - sinkName_ = getDefaultInstance().getSinkName(); - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to return.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * 
- */ - public Builder setSinkNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - sinkName_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.GetSinkRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - private static final com.google.logging.v2.GetSinkRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.GetSinkRequest(); - } - - public static com.google.logging.v2.GetSinkRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public GetSinkRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new GetSinkRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.GetSinkRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequestOrBuilder.java deleted file mode 100644 index 76bce80841a5..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/GetSinkRequestOrBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface GetSinkRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.GetSinkRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to return.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - java.lang.String getSinkName(); - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to return.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getSinkNameBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequest.java deleted file mode 100644 index 0c092432cd05..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequest.java +++ /dev/null @@ -1,1182 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListLogEntriesRequest} - * - *
- * The parameters to `ListLogEntries`.
- * 
- */ -public final class ListLogEntriesRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListLogEntriesRequest) - ListLogEntriesRequestOrBuilder { - // Use ListLogEntriesRequest.newBuilder() to construct. - private ListLogEntriesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListLogEntriesRequest() { - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - filter_ = ""; - orderBy_ = ""; - pageSize_ = 0; - pageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListLogEntriesRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - projectIds_.add(s); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - filter_ = s; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - - orderBy_ = s; - break; - } - case 32: { - - pageSize_ = input.readInt32(); - break; - } - case 42: { - String s = input.readStringRequireUtf8(); - - pageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = projectIds_.getUnmodifiableView(); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogEntriesRequest.class, com.google.logging.v2.ListLogEntriesRequest.Builder.class); - } - - private int bitField0_; - public static final int PROJECT_IDS_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList projectIds_; - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - public com.google.protobuf.ProtocolStringList - getProjectIdsList() { - return projectIds_; - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - public int getProjectIdsCount() { - return projectIds_.size(); - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - public java.lang.String getProjectIds(int index) { - return projectIds_.get(index); - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectIdsBytes(int index) { - return projectIds_.getByteString(index); - } - - public static final int FILTER_FIELD_NUMBER = 2; - private volatile java.lang.Object filter_; - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The filter is compared against all log entries in the projects specified by
-   * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-   * filter matches all log entries.
-   * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } - } - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The filter is compared against all log entries in the projects specified by
-   * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-   * filter matches all log entries.
-   * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int ORDER_BY_FIELD_NUMBER = 3; - private volatile java.lang.Object orderBy_; - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps are returned in order of `LogEntry.insertId`.
-   * 
- */ - public java.lang.String getOrderBy() { - java.lang.Object ref = orderBy_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - orderBy_ = s; - return s; - } - } - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps are returned in order of `LogEntry.insertId`.
-   * 
- */ - public com.google.protobuf.ByteString - getOrderByBytes() { - java.lang.Object ref = orderBy_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - orderBy_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PAGE_SIZE_FIELD_NUMBER = 4; - private int pageSize_; - /** - * optional int32 page_size = 4; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - public int getPageSize() { - return pageSize_; - } - - public static final int PAGE_TOKEN_FIELD_NUMBER = 5; - private volatile java.lang.Object pageToken_; - /** - * optional string page_token = 5; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-   * be the same as in the previous request.
-   * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } - } - /** - * optional string page_token = 5; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-   * be the same as in the previous request.
-   * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < projectIds_.size(); i++) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectIds_.getRaw(i)); - } - if (!getFilterBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, filter_); - } - if (!getOrderByBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 3, orderBy_); - } - if (pageSize_ != 0) { - output.writeInt32(4, pageSize_); - } - if (!getPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 5, pageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < projectIds_.size(); i++) { - dataSize += computeStringSizeNoTag(projectIds_.getRaw(i)); - } - size += dataSize; - size += 1 * getProjectIdsList().size(); - } - if (!getFilterBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, filter_); - } - if (!getOrderByBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(3, orderBy_); - } - if (pageSize_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(4, pageSize_); - } - if (!getPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(5, pageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListLogEntriesRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListLogEntriesRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListLogEntriesRequest} - * - *
-   * The parameters to `ListLogEntries`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListLogEntriesRequest) - com.google.logging.v2.ListLogEntriesRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogEntriesRequest.class, com.google.logging.v2.ListLogEntriesRequest.Builder.class); - } - - // Construct using com.google.logging.v2.ListLogEntriesRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - filter_ = ""; - - orderBy_ = ""; - - pageSize_ = 0; - - pageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesRequest_descriptor; - } - - public com.google.logging.v2.ListLogEntriesRequest getDefaultInstanceForType() { - return com.google.logging.v2.ListLogEntriesRequest.getDefaultInstance(); - } - - public com.google.logging.v2.ListLogEntriesRequest build() { - com.google.logging.v2.ListLogEntriesRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListLogEntriesRequest buildPartial() { - com.google.logging.v2.ListLogEntriesRequest result = new com.google.logging.v2.ListLogEntriesRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = projectIds_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.projectIds_ = projectIds_; - result.filter_ = filter_; - result.orderBy_ = orderBy_; - result.pageSize_ = pageSize_; - result.pageToken_ = pageToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListLogEntriesRequest) { - return mergeFrom((com.google.logging.v2.ListLogEntriesRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListLogEntriesRequest other) { - if (other == com.google.logging.v2.ListLogEntriesRequest.getDefaultInstance()) return this; - if (!other.projectIds_.isEmpty()) { - if (projectIds_.isEmpty()) { - projectIds_ = other.projectIds_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureProjectIdsIsMutable(); - projectIds_.addAll(other.projectIds_); - } - onChanged(); - } - if (!other.getFilter().isEmpty()) { - filter_ = other.filter_; - onChanged(); - } - if (!other.getOrderBy().isEmpty()) { - orderBy_ = other.orderBy_; - onChanged(); - } - if (other.getPageSize() != 0) { - setPageSize(other.getPageSize()); - } - if (!other.getPageToken().isEmpty()) { - pageToken_ = other.pageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListLogEntriesRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListLogEntriesRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private com.google.protobuf.LazyStringList projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureProjectIdsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = new com.google.protobuf.LazyStringArrayList(projectIds_); - bitField0_ |= 0x00000001; - } - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public com.google.protobuf.ProtocolStringList - getProjectIdsList() { - return projectIds_.getUnmodifiableView(); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public int getProjectIdsCount() { - return projectIds_.size(); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public java.lang.String getProjectIds(int index) { - return projectIds_.get(index); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectIdsBytes(int index) { - return projectIds_.getByteString(index); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public Builder setProjectIds( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureProjectIdsIsMutable(); - projectIds_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public Builder addProjectIds( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureProjectIdsIsMutable(); - projectIds_.add(value); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public Builder addAllProjectIds( - java.lang.Iterable values) { - ensureProjectIdsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, projectIds_); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public Builder clearProjectIds() { - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. One or more project IDs or project numbers from which to retrieve
-     * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-     * 
- */ - public Builder addProjectIdsBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - ensureProjectIdsIsMutable(); - projectIds_.add(value); - onChanged(); - return this; - } - - private java.lang.Object filter_ = ""; - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The filter is compared against all log entries in the projects specified by
-     * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-     * filter matches all log entries.
-     * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The filter is compared against all log entries in the projects specified by
-     * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-     * filter matches all log entries.
-     * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The filter is compared against all log entries in the projects specified by
-     * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-     * filter matches all log entries.
-     * 
- */ - public Builder setFilter( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - filter_ = value; - onChanged(); - return this; - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The filter is compared against all log entries in the projects specified by
-     * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-     * filter matches all log entries.
-     * 
- */ - public Builder clearFilter() { - - filter_ = getDefaultInstance().getFilter(); - onChanged(); - return this; - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The filter is compared against all log entries in the projects specified by
-     * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-     * filter matches all log entries.
-     * 
- */ - public Builder setFilterBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - filter_ = value; - onChanged(); - return this; - } - - private java.lang.Object orderBy_ = ""; - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps are returned in order of `LogEntry.insertId`.
-     * 
- */ - public java.lang.String getOrderBy() { - java.lang.Object ref = orderBy_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - orderBy_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps are returned in order of `LogEntry.insertId`.
-     * 
- */ - public com.google.protobuf.ByteString - getOrderByBytes() { - java.lang.Object ref = orderBy_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - orderBy_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps are returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder setOrderBy( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - orderBy_ = value; - onChanged(); - return this; - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps are returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder clearOrderBy() { - - orderBy_ = getDefaultInstance().getOrderBy(); - onChanged(); - return this; - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps are returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder setOrderByBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - orderBy_ = value; - onChanged(); - return this; - } - - private int pageSize_ ; - /** - * optional int32 page_size = 4; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public int getPageSize() { - return pageSize_; - } - /** - * optional int32 page_size = 4; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder setPageSize(int value) { - - pageSize_ = value; - onChanged(); - return this; - } - /** - * optional int32 page_size = 4; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder clearPageSize() { - - pageSize_ = 0; - onChanged(); - return this; - } - - private java.lang.Object pageToken_ = ""; - /** - * optional string page_token = 5; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-     * be the same as in the previous request.
-     * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string page_token = 5; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-     * be the same as in the previous request.
-     * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string page_token = 5; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder setPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - pageToken_ = value; - onChanged(); - return this; - } - /** - * optional string page_token = 5; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder clearPageToken() { - - pageToken_ = getDefaultInstance().getPageToken(); - onChanged(); - return this; - } - /** - * optional string page_token = 5; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder setPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - pageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListLogEntriesRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - private static final com.google.logging.v2.ListLogEntriesRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListLogEntriesRequest(); - } - - public static com.google.logging.v2.ListLogEntriesRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListLogEntriesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListLogEntriesRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListLogEntriesRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequestOrBuilder.java deleted file mode 100644 index 9b10254aaf5d..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesRequestOrBuilder.java +++ /dev/null @@ -1,139 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ListLogEntriesRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListLogEntriesRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - com.google.protobuf.ProtocolStringList - getProjectIdsList(); - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - int getProjectIdsCount(); - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - java.lang.String getProjectIds(int index); - /** - * repeated string project_ids = 1; - * - *
-   * Required. One or more project IDs or project numbers from which to retrieve
-   * log entries.  Examples of a project ID: `"my-project-1A"`, `"1234567890"`.
-   * 
- */ - com.google.protobuf.ByteString - getProjectIdsBytes(int index); - - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The filter is compared against all log entries in the projects specified by
-   * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-   * filter matches all log entries.
-   * 
- */ - java.lang.String getFilter(); - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The filter is compared against all log entries in the projects specified by
-   * `projectIds`.  Only entries that match the filter are retrieved.  An empty
-   * filter matches all log entries.
-   * 
- */ - com.google.protobuf.ByteString - getFilterBytes(); - - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps are returned in order of `LogEntry.insertId`.
-   * 
- */ - java.lang.String getOrderBy(); - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps are returned in order of `LogEntry.insertId`.
-   * 
- */ - com.google.protobuf.ByteString - getOrderByBytes(); - - /** - * optional int32 page_size = 4; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - int getPageSize(); - - /** - * optional string page_token = 5; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-   * be the same as in the previous request.
-   * 
- */ - java.lang.String getPageToken(); - /** - * optional string page_token = 5; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The values of `projectIds`, `filter`, and `orderBy` must
-   * be the same as in the previous request.
-   * 
- */ - com.google.protobuf.ByteString - getPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponse.java deleted file mode 100644 index 9974ae132ae1..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponse.java +++ /dev/null @@ -1,923 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListLogEntriesResponse} - * - *
- * Result returned from `ListLogEntries`.
- * 
- */ -public final class ListLogEntriesResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListLogEntriesResponse) - ListLogEntriesResponseOrBuilder { - // Use ListLogEntriesResponse.newBuilder() to construct. - private ListLogEntriesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListLogEntriesResponse() { - entries_ = java.util.Collections.emptyList(); - nextPageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListLogEntriesResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - entries_.add(input.readMessage(com.google.logging.v2.LogEntry.parser(), extensionRegistry)); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - nextPageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogEntriesResponse.class, com.google.logging.v2.ListLogEntriesResponse.Builder.class); - } - - private int bitField0_; - public static final int ENTRIES_FIELD_NUMBER = 1; - private java.util.List entries_; - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - public java.util.List getEntriesList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - public java.util.List - getEntriesOrBuilderList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - public int getEntriesCount() { - return entries_.size(); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - return entries_.get(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - return entries_.get(index); - } - - public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object nextPageToken_; - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call
-   * this method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } - } - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call
-   * this method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < entries_.size(); i++) { - output.writeMessage(1, entries_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < entries_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, entries_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListLogEntriesResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListLogEntriesResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListLogEntriesResponse} - * - *
-   * Result returned from `ListLogEntries`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListLogEntriesResponse) - com.google.logging.v2.ListLogEntriesResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogEntriesResponse.class, com.google.logging.v2.ListLogEntriesResponse.Builder.class); - } - - // Construct using com.google.logging.v2.ListLogEntriesResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getEntriesFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - entriesBuilder_.clear(); - } - nextPageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListLogEntriesResponse_descriptor; - } - - public com.google.logging.v2.ListLogEntriesResponse getDefaultInstanceForType() { - return com.google.logging.v2.ListLogEntriesResponse.getDefaultInstance(); - } - - public com.google.logging.v2.ListLogEntriesResponse build() { - com.google.logging.v2.ListLogEntriesResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListLogEntriesResponse buildPartial() { - com.google.logging.v2.ListLogEntriesResponse result = new com.google.logging.v2.ListLogEntriesResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (entriesBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.entries_ = entries_; - } else { - result.entries_ = entriesBuilder_.build(); - } - result.nextPageToken_ = nextPageToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListLogEntriesResponse) { - return mergeFrom((com.google.logging.v2.ListLogEntriesResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListLogEntriesResponse other) { - if (other == com.google.logging.v2.ListLogEntriesResponse.getDefaultInstance()) return this; - if (entriesBuilder_ == null) { - if (!other.entries_.isEmpty()) { - if (entries_.isEmpty()) { - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureEntriesIsMutable(); - entries_.addAll(other.entries_); - } - onChanged(); - } - } else { - if (!other.entries_.isEmpty()) { - if (entriesBuilder_.isEmpty()) { - entriesBuilder_.dispose(); - entriesBuilder_ = null; - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000001); - entriesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getEntriesFieldBuilder() : null; - } else { - entriesBuilder_.addAllMessages(other.entries_); - } - } - } - if (!other.getNextPageToken().isEmpty()) { - nextPageToken_ = other.nextPageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListLogEntriesResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListLogEntriesResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List entries_ = - java.util.Collections.emptyList(); - private void ensureEntriesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = new java.util.ArrayList(entries_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> entriesBuilder_; - - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public java.util.List getEntriesList() { - if (entriesBuilder_ == null) { - return java.util.Collections.unmodifiableList(entries_); - } else { - return entriesBuilder_.getMessageList(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public int getEntriesCount() { - if (entriesBuilder_ == null) { - return entries_.size(); - } else { - return entriesBuilder_.getCount(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); - } else { - return entriesBuilder_.getMessage(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.set(index, value); - onChanged(); - } else { - entriesBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.set(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder addEntries(com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(value); - onChanged(); - } else { - entriesBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(index, value); - onChanged(); - } else { - entriesBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder addEntries( - com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder addAllEntries( - java.lang.Iterable values) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, entries_); - onChanged(); - } else { - entriesBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder clearEntries() { - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - entriesBuilder_.clear(); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public Builder removeEntries(int index) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.remove(index); - onChanged(); - } else { - entriesBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder getEntriesBuilder( - int index) { - return getEntriesFieldBuilder().getBuilder(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); } else { - return entriesBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public java.util.List - getEntriesOrBuilderList() { - if (entriesBuilder_ != null) { - return entriesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(entries_); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder() { - return getEntriesFieldBuilder().addBuilder( - com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder( - int index) { - return getEntriesFieldBuilder().addBuilder( - index, com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries.
-     * 
- */ - public java.util.List - getEntriesBuilderList() { - return getEntriesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> - getEntriesFieldBuilder() { - if (entriesBuilder_ == null) { - entriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder>( - entries_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - entries_ = null; - } - return entriesBuilder_; - } - - private java.lang.Object nextPageToken_ = ""; - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call
-     * this method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call
-     * this method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call
-     * this method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - nextPageToken_ = value; - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call
-     * this method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder clearNextPageToken() { - - nextPageToken_ = getDefaultInstance().getNextPageToken(); - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call
-     * this method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - nextPageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListLogEntriesResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - private static final com.google.logging.v2.ListLogEntriesResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListLogEntriesResponse(); - } - - public static com.google.logging.v2.ListLogEntriesResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListLogEntriesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListLogEntriesResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListLogEntriesResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponseOrBuilder.java deleted file mode 100644 index 781993075527..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogEntriesResponseOrBuilder.java +++ /dev/null @@ -1,75 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ListLogEntriesResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListLogEntriesResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - java.util.List - getEntriesList(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - com.google.logging.v2.LogEntry getEntries(int index); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - int getEntriesCount(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - java.util.List - getEntriesOrBuilderList(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries.
-   * 
- */ - com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index); - - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call
-   * this method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - java.lang.String getNextPageToken(); - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call
-   * this method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - com.google.protobuf.ByteString - getNextPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequest.java deleted file mode 100644 index 7bae460529dc..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequest.java +++ /dev/null @@ -1,748 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListLogMetricsRequest} - * - *
- * The parameters to ListLogMetrics.
- * 
- */ -public final class ListLogMetricsRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListLogMetricsRequest) - ListLogMetricsRequestOrBuilder { - // Use ListLogMetricsRequest.newBuilder() to construct. - private ListLogMetricsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListLogMetricsRequest() { - projectName_ = ""; - pageToken_ = ""; - pageSize_ = 0; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListLogMetricsRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - projectName_ = s; - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - pageToken_ = s; - break; - } - case 24: { - - pageSize_ = input.readInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogMetricsRequest.class, com.google.logging.v2.ListLogMetricsRequest.Builder.class); - } - - public static final int PROJECT_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object projectName_; - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name for the project whose metrics are wanted.
-   * Example: `"projects/my-project-id"`.
-   * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } - } - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name for the project whose metrics are wanted.
-   * Example: `"projects/my-project-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object pageToken_; - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The value of `projectName` must
-   * be the same as in the previous request.
-   * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } - } - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The value of `projectName` must
-   * be the same as in the previous request.
-   * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PAGE_SIZE_FIELD_NUMBER = 3; - private int pageSize_; - /** - * optional int32 page_size = 3; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - public int getPageSize() { - return pageSize_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getProjectNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectName_); - } - if (!getPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, pageToken_); - } - if (pageSize_ != 0) { - output.writeInt32(3, pageSize_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getProjectNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, projectName_); - } - if (!getPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, pageToken_); - } - if (pageSize_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(3, pageSize_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListLogMetricsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogMetricsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListLogMetricsRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListLogMetricsRequest} - * - *
-   * The parameters to ListLogMetrics.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListLogMetricsRequest) - com.google.logging.v2.ListLogMetricsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogMetricsRequest.class, com.google.logging.v2.ListLogMetricsRequest.Builder.class); - } - - // Construct using com.google.logging.v2.ListLogMetricsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectName_ = ""; - - pageToken_ = ""; - - pageSize_ = 0; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsRequest_descriptor; - } - - public com.google.logging.v2.ListLogMetricsRequest getDefaultInstanceForType() { - return com.google.logging.v2.ListLogMetricsRequest.getDefaultInstance(); - } - - public com.google.logging.v2.ListLogMetricsRequest build() { - com.google.logging.v2.ListLogMetricsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListLogMetricsRequest buildPartial() { - com.google.logging.v2.ListLogMetricsRequest result = new com.google.logging.v2.ListLogMetricsRequest(this); - result.projectName_ = projectName_; - result.pageToken_ = pageToken_; - result.pageSize_ = pageSize_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListLogMetricsRequest) { - return mergeFrom((com.google.logging.v2.ListLogMetricsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListLogMetricsRequest other) { - if (other == com.google.logging.v2.ListLogMetricsRequest.getDefaultInstance()) return this; - if (!other.getProjectName().isEmpty()) { - projectName_ = other.projectName_; - onChanged(); - } - if (!other.getPageToken().isEmpty()) { - pageToken_ = other.pageToken_; - onChanged(); - } - if (other.getPageSize() != 0) { - setPageSize(other.getPageSize()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListLogMetricsRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListLogMetricsRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object projectName_ = ""; - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name for the project whose metrics are wanted.
-     * Example: `"projects/my-project-id"`.
-     * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name for the project whose metrics are wanted.
-     * Example: `"projects/my-project-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name for the project whose metrics are wanted.
-     * Example: `"projects/my-project-id"`.
-     * 
- */ - public Builder setProjectName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - projectName_ = value; - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name for the project whose metrics are wanted.
-     * Example: `"projects/my-project-id"`.
-     * 
- */ - public Builder clearProjectName() { - - projectName_ = getDefaultInstance().getProjectName(); - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name for the project whose metrics are wanted.
-     * Example: `"projects/my-project-id"`.
-     * 
- */ - public Builder setProjectNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - projectName_ = value; - onChanged(); - return this; - } - - private java.lang.Object pageToken_ = ""; - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The value of `projectName` must
-     * be the same as in the previous request.
-     * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The value of `projectName` must
-     * be the same as in the previous request.
-     * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The value of `projectName` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder setPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - pageToken_ = value; - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The value of `projectName` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder clearPageToken() { - - pageToken_ = getDefaultInstance().getPageToken(); - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.  The value of `projectName` must
-     * be the same as in the previous request.
-     * 
- */ - public Builder setPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - pageToken_ = value; - onChanged(); - return this; - } - - private int pageSize_ ; - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public int getPageSize() { - return pageSize_; - } - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder setPageSize(int value) { - - pageSize_ = value; - onChanged(); - return this; - } - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder clearPageSize() { - - pageSize_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListLogMetricsRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - private static final com.google.logging.v2.ListLogMetricsRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListLogMetricsRequest(); - } - - public static com.google.logging.v2.ListLogMetricsRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListLogMetricsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListLogMetricsRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListLogMetricsRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequestOrBuilder.java deleted file mode 100644 index c393cc4186d4..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsRequestOrBuilder.java +++ /dev/null @@ -1,68 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface ListLogMetricsRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListLogMetricsRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name for the project whose metrics are wanted.
-   * Example: `"projects/my-project-id"`.
-   * 
- */ - java.lang.String getProjectName(); - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name for the project whose metrics are wanted.
-   * Example: `"projects/my-project-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getProjectNameBytes(); - - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The value of `projectName` must
-   * be the same as in the previous request.
-   * 
- */ - java.lang.String getPageToken(); - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.  The value of `projectName` must
-   * be the same as in the previous request.
-   * 
- */ - com.google.protobuf.ByteString - getPageTokenBytes(); - - /** - * optional int32 page_size = 3; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - int getPageSize(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponse.java deleted file mode 100644 index 2cebbb193b82..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponse.java +++ /dev/null @@ -1,923 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListLogMetricsResponse} - * - *
- * Result returned from ListLogMetrics.
- * 
- */ -public final class ListLogMetricsResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListLogMetricsResponse) - ListLogMetricsResponseOrBuilder { - // Use ListLogMetricsResponse.newBuilder() to construct. - private ListLogMetricsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListLogMetricsResponse() { - metrics_ = java.util.Collections.emptyList(); - nextPageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListLogMetricsResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - metrics_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - metrics_.add(input.readMessage(com.google.logging.v2.LogMetric.parser(), extensionRegistry)); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - nextPageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - metrics_ = java.util.Collections.unmodifiableList(metrics_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogMetricsResponse.class, com.google.logging.v2.ListLogMetricsResponse.Builder.class); - } - - private int bitField0_; - public static final int METRICS_FIELD_NUMBER = 1; - private java.util.List metrics_; - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - public java.util.List getMetricsList() { - return metrics_; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - public java.util.List - getMetricsOrBuilderList() { - return metrics_; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - public int getMetricsCount() { - return metrics_.size(); - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - public com.google.logging.v2.LogMetric getMetrics(int index) { - return metrics_.get(index); - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricsOrBuilder( - int index) { - return metrics_.get(index); - } - - public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object nextPageToken_; - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is given
-   * a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } - } - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is given
-   * a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < metrics_.size(); i++) { - output.writeMessage(1, metrics_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < metrics_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, metrics_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListLogMetricsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListLogMetricsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListLogMetricsResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListLogMetricsResponse} - * - *
-   * Result returned from ListLogMetrics.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListLogMetricsResponse) - com.google.logging.v2.ListLogMetricsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListLogMetricsResponse.class, com.google.logging.v2.ListLogMetricsResponse.Builder.class); - } - - // Construct using com.google.logging.v2.ListLogMetricsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getMetricsFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (metricsBuilder_ == null) { - metrics_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - metricsBuilder_.clear(); - } - nextPageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; - } - - public com.google.logging.v2.ListLogMetricsResponse getDefaultInstanceForType() { - return com.google.logging.v2.ListLogMetricsResponse.getDefaultInstance(); - } - - public com.google.logging.v2.ListLogMetricsResponse build() { - com.google.logging.v2.ListLogMetricsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListLogMetricsResponse buildPartial() { - com.google.logging.v2.ListLogMetricsResponse result = new com.google.logging.v2.ListLogMetricsResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (metricsBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - metrics_ = java.util.Collections.unmodifiableList(metrics_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.metrics_ = metrics_; - } else { - result.metrics_ = metricsBuilder_.build(); - } - result.nextPageToken_ = nextPageToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListLogMetricsResponse) { - return mergeFrom((com.google.logging.v2.ListLogMetricsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListLogMetricsResponse other) { - if (other == com.google.logging.v2.ListLogMetricsResponse.getDefaultInstance()) return this; - if (metricsBuilder_ == null) { - if (!other.metrics_.isEmpty()) { - if (metrics_.isEmpty()) { - metrics_ = other.metrics_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureMetricsIsMutable(); - metrics_.addAll(other.metrics_); - } - onChanged(); - } - } else { - if (!other.metrics_.isEmpty()) { - if (metricsBuilder_.isEmpty()) { - metricsBuilder_.dispose(); - metricsBuilder_ = null; - metrics_ = other.metrics_; - bitField0_ = (bitField0_ & ~0x00000001); - metricsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getMetricsFieldBuilder() : null; - } else { - metricsBuilder_.addAllMessages(other.metrics_); - } - } - } - if (!other.getNextPageToken().isEmpty()) { - nextPageToken_ = other.nextPageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListLogMetricsResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListLogMetricsResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List metrics_ = - java.util.Collections.emptyList(); - private void ensureMetricsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - metrics_ = new java.util.ArrayList(metrics_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> metricsBuilder_; - - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public java.util.List getMetricsList() { - if (metricsBuilder_ == null) { - return java.util.Collections.unmodifiableList(metrics_); - } else { - return metricsBuilder_.getMessageList(); - } - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public int getMetricsCount() { - if (metricsBuilder_ == null) { - return metrics_.size(); - } else { - return metricsBuilder_.getCount(); - } - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public com.google.logging.v2.LogMetric getMetrics(int index) { - if (metricsBuilder_ == null) { - return metrics_.get(index); - } else { - return metricsBuilder_.getMessage(index); - } - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder setMetrics( - int index, com.google.logging.v2.LogMetric value) { - if (metricsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMetricsIsMutable(); - metrics_.set(index, value); - onChanged(); - } else { - metricsBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder setMetrics( - int index, com.google.logging.v2.LogMetric.Builder builderForValue) { - if (metricsBuilder_ == null) { - ensureMetricsIsMutable(); - metrics_.set(index, builderForValue.build()); - onChanged(); - } else { - metricsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder addMetrics(com.google.logging.v2.LogMetric value) { - if (metricsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMetricsIsMutable(); - metrics_.add(value); - onChanged(); - } else { - metricsBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder addMetrics( - int index, com.google.logging.v2.LogMetric value) { - if (metricsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureMetricsIsMutable(); - metrics_.add(index, value); - onChanged(); - } else { - metricsBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder addMetrics( - com.google.logging.v2.LogMetric.Builder builderForValue) { - if (metricsBuilder_ == null) { - ensureMetricsIsMutable(); - metrics_.add(builderForValue.build()); - onChanged(); - } else { - metricsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder addMetrics( - int index, com.google.logging.v2.LogMetric.Builder builderForValue) { - if (metricsBuilder_ == null) { - ensureMetricsIsMutable(); - metrics_.add(index, builderForValue.build()); - onChanged(); - } else { - metricsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder addAllMetrics( - java.lang.Iterable values) { - if (metricsBuilder_ == null) { - ensureMetricsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, metrics_); - onChanged(); - } else { - metricsBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder clearMetrics() { - if (metricsBuilder_ == null) { - metrics_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - metricsBuilder_.clear(); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public Builder removeMetrics(int index) { - if (metricsBuilder_ == null) { - ensureMetricsIsMutable(); - metrics_.remove(index); - onChanged(); - } else { - metricsBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public com.google.logging.v2.LogMetric.Builder getMetricsBuilder( - int index) { - return getMetricsFieldBuilder().getBuilder(index); - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricsOrBuilder( - int index) { - if (metricsBuilder_ == null) { - return metrics_.get(index); } else { - return metricsBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public java.util.List - getMetricsOrBuilderList() { - if (metricsBuilder_ != null) { - return metricsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(metrics_); - } - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public com.google.logging.v2.LogMetric.Builder addMetricsBuilder() { - return getMetricsFieldBuilder().addBuilder( - com.google.logging.v2.LogMetric.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public com.google.logging.v2.LogMetric.Builder addMetricsBuilder( - int index) { - return getMetricsFieldBuilder().addBuilder( - index, com.google.logging.v2.LogMetric.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-     * A list of logs-based metrics.
-     * 
- */ - public java.util.List - getMetricsBuilderList() { - return getMetricsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> - getMetricsFieldBuilder() { - if (metricsBuilder_ == null) { - metricsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder>( - metrics_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - metrics_ = null; - } - return metricsBuilder_; - } - - private java.lang.Object nextPageToken_ = ""; - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is given
-     * a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is given
-     * a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is given
-     * a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - nextPageToken_ = value; - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is given
-     * a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder clearNextPageToken() { - - nextPageToken_ = getDefaultInstance().getNextPageToken(); - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is given
-     * a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - nextPageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListLogMetricsResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - private static final com.google.logging.v2.ListLogMetricsResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListLogMetricsResponse(); - } - - public static com.google.logging.v2.ListLogMetricsResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListLogMetricsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListLogMetricsResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListLogMetricsResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponseOrBuilder.java deleted file mode 100644 index 64a6a9d2c5d5..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListLogMetricsResponseOrBuilder.java +++ /dev/null @@ -1,75 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface ListLogMetricsResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListLogMetricsResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - java.util.List - getMetricsList(); - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - com.google.logging.v2.LogMetric getMetrics(int index); - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - int getMetricsCount(); - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - java.util.List - getMetricsOrBuilderList(); - /** - * repeated .google.logging.v2.LogMetric metrics = 1; - * - *
-   * A list of logs-based metrics.
-   * 
- */ - com.google.logging.v2.LogMetricOrBuilder getMetricsOrBuilder( - int index); - - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is given
-   * a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - java.lang.String getNextPageToken(); - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is given
-   * a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - com.google.protobuf.ByteString - getNextPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequest.java deleted file mode 100644 index 414a42a309d4..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequest.java +++ /dev/null @@ -1,583 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListMonitoredResourceDescriptorsRequest} - * - *
- * The parameters to ListMonitoredResourceDescriptors
- * 
- */ -public final class ListMonitoredResourceDescriptorsRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - ListMonitoredResourceDescriptorsRequestOrBuilder { - // Use ListMonitoredResourceDescriptorsRequest.newBuilder() to construct. - private ListMonitoredResourceDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListMonitoredResourceDescriptorsRequest() { - pageSize_ = 0; - pageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListMonitoredResourceDescriptorsRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 8: { - - pageSize_ = input.readInt32(); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - pageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.class, com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.Builder.class); - } - - public static final int PAGE_SIZE_FIELD_NUMBER = 1; - private int pageSize_; - /** - * optional int32 page_size = 1; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - public int getPageSize() { - return pageSize_; - } - - public static final int PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object pageToken_; - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.
-   * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } - } - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.
-   * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (pageSize_ != 0) { - output.writeInt32(1, pageSize_); - } - if (!getPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, pageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (pageSize_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, pageSize_); - } - if (!getPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, pageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListMonitoredResourceDescriptorsRequest} - * - *
-   * The parameters to ListMonitoredResourceDescriptors
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - com.google.logging.v2.ListMonitoredResourceDescriptorsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.class, com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.Builder.class); - } - - // Construct using com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - pageSize_ = 0; - - pageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsRequest getDefaultInstanceForType() { - return com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.getDefaultInstance(); - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsRequest build() { - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsRequest buildPartial() { - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest result = new com.google.logging.v2.ListMonitoredResourceDescriptorsRequest(this); - result.pageSize_ = pageSize_; - result.pageToken_ = pageToken_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListMonitoredResourceDescriptorsRequest) { - return mergeFrom((com.google.logging.v2.ListMonitoredResourceDescriptorsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest other) { - if (other == com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.getDefaultInstance()) return this; - if (other.getPageSize() != 0) { - setPageSize(other.getPageSize()); - } - if (!other.getPageToken().isEmpty()) { - pageToken_ = other.pageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListMonitoredResourceDescriptorsRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private int pageSize_ ; - /** - * optional int32 page_size = 1; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public int getPageSize() { - return pageSize_; - } - /** - * optional int32 page_size = 1; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder setPageSize(int value) { - - pageSize_ = value; - onChanged(); - return this; - } - /** - * optional int32 page_size = 1; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder clearPageSize() { - - pageSize_ = 0; - onChanged(); - return this; - } - - private java.lang.Object pageToken_ = ""; - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.
-     * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.
-     * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.
-     * 
- */ - public Builder setPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - pageToken_ = value; - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.
-     * 
- */ - public Builder clearPageToken() { - - pageToken_ = getDefaultInstance().getPageToken(); - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request.
-     * 
- */ - public Builder setPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - pageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - private static final com.google.logging.v2.ListMonitoredResourceDescriptorsRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListMonitoredResourceDescriptorsRequest(); - } - - public static com.google.logging.v2.ListMonitoredResourceDescriptorsRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListMonitoredResourceDescriptorsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListMonitoredResourceDescriptorsRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequestOrBuilder.java deleted file mode 100644 index 81e37b7b2294..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsRequestOrBuilder.java +++ /dev/null @@ -1,46 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ListMonitoredResourceDescriptorsRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional int32 page_size = 1; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - int getPageSize(); - - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.
-   * 
- */ - java.lang.String getPageToken(); - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request.
-   * 
- */ - com.google.protobuf.ByteString - getPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponse.java deleted file mode 100644 index 2a22b7fd3109..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponse.java +++ /dev/null @@ -1,923 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListMonitoredResourceDescriptorsResponse} - * - *
- * Result returned from ListMonitoredResourceDescriptors.
- * 
- */ -public final class ListMonitoredResourceDescriptorsResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - ListMonitoredResourceDescriptorsResponseOrBuilder { - // Use ListMonitoredResourceDescriptorsResponse.newBuilder() to construct. - private ListMonitoredResourceDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListMonitoredResourceDescriptorsResponse() { - resourceDescriptors_ = java.util.Collections.emptyList(); - nextPageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListMonitoredResourceDescriptorsResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - resourceDescriptors_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - resourceDescriptors_.add(input.readMessage(com.google.api.MonitoredResourceDescriptor.parser(), extensionRegistry)); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - nextPageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - resourceDescriptors_ = java.util.Collections.unmodifiableList(resourceDescriptors_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.class, com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.Builder.class); - } - - private int bitField0_; - public static final int RESOURCE_DESCRIPTORS_FIELD_NUMBER = 1; - private java.util.List resourceDescriptors_; - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - public java.util.List getResourceDescriptorsList() { - return resourceDescriptors_; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - public java.util.List - getResourceDescriptorsOrBuilderList() { - return resourceDescriptors_; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - public int getResourceDescriptorsCount() { - return resourceDescriptors_.size(); - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - public com.google.api.MonitoredResourceDescriptor getResourceDescriptors(int index) { - return resourceDescriptors_.get(index); - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - public com.google.api.MonitoredResourceDescriptorOrBuilder getResourceDescriptorsOrBuilder( - int index) { - return resourceDescriptors_.get(index); - } - - public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object nextPageToken_; - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * returned in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } - } - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * returned in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < resourceDescriptors_.size(); i++) { - output.writeMessage(1, resourceDescriptors_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < resourceDescriptors_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, resourceDescriptors_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListMonitoredResourceDescriptorsResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListMonitoredResourceDescriptorsResponse} - * - *
-   * Result returned from ListMonitoredResourceDescriptors.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - com.google.logging.v2.ListMonitoredResourceDescriptorsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.class, com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.Builder.class); - } - - // Construct using com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getResourceDescriptorsFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (resourceDescriptorsBuilder_ == null) { - resourceDescriptors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - resourceDescriptorsBuilder_.clear(); - } - nextPageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse getDefaultInstanceForType() { - return com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.getDefaultInstance(); - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse build() { - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse buildPartial() { - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse result = new com.google.logging.v2.ListMonitoredResourceDescriptorsResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (resourceDescriptorsBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - resourceDescriptors_ = java.util.Collections.unmodifiableList(resourceDescriptors_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.resourceDescriptors_ = resourceDescriptors_; - } else { - result.resourceDescriptors_ = resourceDescriptorsBuilder_.build(); - } - result.nextPageToken_ = nextPageToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListMonitoredResourceDescriptorsResponse) { - return mergeFrom((com.google.logging.v2.ListMonitoredResourceDescriptorsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListMonitoredResourceDescriptorsResponse other) { - if (other == com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.getDefaultInstance()) return this; - if (resourceDescriptorsBuilder_ == null) { - if (!other.resourceDescriptors_.isEmpty()) { - if (resourceDescriptors_.isEmpty()) { - resourceDescriptors_ = other.resourceDescriptors_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.addAll(other.resourceDescriptors_); - } - onChanged(); - } - } else { - if (!other.resourceDescriptors_.isEmpty()) { - if (resourceDescriptorsBuilder_.isEmpty()) { - resourceDescriptorsBuilder_.dispose(); - resourceDescriptorsBuilder_ = null; - resourceDescriptors_ = other.resourceDescriptors_; - bitField0_ = (bitField0_ & ~0x00000001); - resourceDescriptorsBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getResourceDescriptorsFieldBuilder() : null; - } else { - resourceDescriptorsBuilder_.addAllMessages(other.resourceDescriptors_); - } - } - } - if (!other.getNextPageToken().isEmpty()) { - nextPageToken_ = other.nextPageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListMonitoredResourceDescriptorsResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List resourceDescriptors_ = - java.util.Collections.emptyList(); - private void ensureResourceDescriptorsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - resourceDescriptors_ = new java.util.ArrayList(resourceDescriptors_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.api.MonitoredResourceDescriptor, com.google.api.MonitoredResourceDescriptor.Builder, com.google.api.MonitoredResourceDescriptorOrBuilder> resourceDescriptorsBuilder_; - - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public java.util.List getResourceDescriptorsList() { - if (resourceDescriptorsBuilder_ == null) { - return java.util.Collections.unmodifiableList(resourceDescriptors_); - } else { - return resourceDescriptorsBuilder_.getMessageList(); - } - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public int getResourceDescriptorsCount() { - if (resourceDescriptorsBuilder_ == null) { - return resourceDescriptors_.size(); - } else { - return resourceDescriptorsBuilder_.getCount(); - } - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public com.google.api.MonitoredResourceDescriptor getResourceDescriptors(int index) { - if (resourceDescriptorsBuilder_ == null) { - return resourceDescriptors_.get(index); - } else { - return resourceDescriptorsBuilder_.getMessage(index); - } - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder setResourceDescriptors( - int index, com.google.api.MonitoredResourceDescriptor value) { - if (resourceDescriptorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.set(index, value); - onChanged(); - } else { - resourceDescriptorsBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder setResourceDescriptors( - int index, com.google.api.MonitoredResourceDescriptor.Builder builderForValue) { - if (resourceDescriptorsBuilder_ == null) { - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.set(index, builderForValue.build()); - onChanged(); - } else { - resourceDescriptorsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder addResourceDescriptors(com.google.api.MonitoredResourceDescriptor value) { - if (resourceDescriptorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.add(value); - onChanged(); - } else { - resourceDescriptorsBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder addResourceDescriptors( - int index, com.google.api.MonitoredResourceDescriptor value) { - if (resourceDescriptorsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.add(index, value); - onChanged(); - } else { - resourceDescriptorsBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder addResourceDescriptors( - com.google.api.MonitoredResourceDescriptor.Builder builderForValue) { - if (resourceDescriptorsBuilder_ == null) { - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.add(builderForValue.build()); - onChanged(); - } else { - resourceDescriptorsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder addResourceDescriptors( - int index, com.google.api.MonitoredResourceDescriptor.Builder builderForValue) { - if (resourceDescriptorsBuilder_ == null) { - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.add(index, builderForValue.build()); - onChanged(); - } else { - resourceDescriptorsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder addAllResourceDescriptors( - java.lang.Iterable values) { - if (resourceDescriptorsBuilder_ == null) { - ensureResourceDescriptorsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, resourceDescriptors_); - onChanged(); - } else { - resourceDescriptorsBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder clearResourceDescriptors() { - if (resourceDescriptorsBuilder_ == null) { - resourceDescriptors_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - resourceDescriptorsBuilder_.clear(); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public Builder removeResourceDescriptors(int index) { - if (resourceDescriptorsBuilder_ == null) { - ensureResourceDescriptorsIsMutable(); - resourceDescriptors_.remove(index); - onChanged(); - } else { - resourceDescriptorsBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public com.google.api.MonitoredResourceDescriptor.Builder getResourceDescriptorsBuilder( - int index) { - return getResourceDescriptorsFieldBuilder().getBuilder(index); - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public com.google.api.MonitoredResourceDescriptorOrBuilder getResourceDescriptorsOrBuilder( - int index) { - if (resourceDescriptorsBuilder_ == null) { - return resourceDescriptors_.get(index); } else { - return resourceDescriptorsBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public java.util.List - getResourceDescriptorsOrBuilderList() { - if (resourceDescriptorsBuilder_ != null) { - return resourceDescriptorsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(resourceDescriptors_); - } - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public com.google.api.MonitoredResourceDescriptor.Builder addResourceDescriptorsBuilder() { - return getResourceDescriptorsFieldBuilder().addBuilder( - com.google.api.MonitoredResourceDescriptor.getDefaultInstance()); - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public com.google.api.MonitoredResourceDescriptor.Builder addResourceDescriptorsBuilder( - int index) { - return getResourceDescriptorsFieldBuilder().addBuilder( - index, com.google.api.MonitoredResourceDescriptor.getDefaultInstance()); - } - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-     * A list of resource descriptors.
-     * 
- */ - public java.util.List - getResourceDescriptorsBuilderList() { - return getResourceDescriptorsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.api.MonitoredResourceDescriptor, com.google.api.MonitoredResourceDescriptor.Builder, com.google.api.MonitoredResourceDescriptorOrBuilder> - getResourceDescriptorsFieldBuilder() { - if (resourceDescriptorsBuilder_ == null) { - resourceDescriptorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.api.MonitoredResourceDescriptor, com.google.api.MonitoredResourceDescriptor.Builder, com.google.api.MonitoredResourceDescriptorOrBuilder>( - resourceDescriptors_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - resourceDescriptors_ = null; - } - return resourceDescriptorsBuilder_; - } - - private java.lang.Object nextPageToken_ = ""; - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * returned in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * returned in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * returned in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - nextPageToken_ = value; - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * returned in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder clearNextPageToken() { - - nextPageToken_ = getDefaultInstance().getNextPageToken(); - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * returned in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - nextPageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - private static final com.google.logging.v2.ListMonitoredResourceDescriptorsResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListMonitoredResourceDescriptorsResponse(); - } - - public static com.google.logging.v2.ListMonitoredResourceDescriptorsResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListMonitoredResourceDescriptorsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListMonitoredResourceDescriptorsResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponseOrBuilder.java deleted file mode 100644 index f582b8256eca..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListMonitoredResourceDescriptorsResponseOrBuilder.java +++ /dev/null @@ -1,75 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ListMonitoredResourceDescriptorsResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - java.util.List - getResourceDescriptorsList(); - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - com.google.api.MonitoredResourceDescriptor getResourceDescriptors(int index); - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - int getResourceDescriptorsCount(); - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - java.util.List - getResourceDescriptorsOrBuilderList(); - /** - * repeated .google.api.MonitoredResourceDescriptor resource_descriptors = 1; - * - *
-   * A list of resource descriptors.
-   * 
- */ - com.google.api.MonitoredResourceDescriptorOrBuilder getResourceDescriptorsOrBuilder( - int index); - - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * returned in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - java.lang.String getNextPageToken(); - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * returned in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - com.google.protobuf.ByteString - getNextPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequest.java deleted file mode 100644 index 3c1400d169ff..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequest.java +++ /dev/null @@ -1,748 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListSinksRequest} - * - *
- * The parameters to `ListSinks`.
- * 
- */ -public final class ListSinksRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListSinksRequest) - ListSinksRequestOrBuilder { - // Use ListSinksRequest.newBuilder() to construct. - private ListSinksRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListSinksRequest() { - projectName_ = ""; - pageToken_ = ""; - pageSize_ = 0; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListSinksRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - projectName_ = s; - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - pageToken_ = s; - break; - } - case 24: { - - pageSize_ = input.readInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListSinksRequest.class, com.google.logging.v2.ListSinksRequest.Builder.class); - } - - public static final int PROJECT_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object projectName_; - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name of the project owning the sinks.
-   * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-   * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } - } - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name of the project owning the sinks.
-   * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object pageToken_; - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request. The value of `projectName` must be the same as in the
-   * previous request.
-   * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } - } - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request. The value of `projectName` must be the same as in the
-   * previous request.
-   * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PAGE_SIZE_FIELD_NUMBER = 3; - private int pageSize_; - /** - * optional int32 page_size = 3; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - public int getPageSize() { - return pageSize_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getProjectNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectName_); - } - if (!getPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, pageToken_); - } - if (pageSize_ != 0) { - output.writeInt32(3, pageSize_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getProjectNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, projectName_); - } - if (!getPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, pageToken_); - } - if (pageSize_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(3, pageSize_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListSinksRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListSinksRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListSinksRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListSinksRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListSinksRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListSinksRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListSinksRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListSinksRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListSinksRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListSinksRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListSinksRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListSinksRequest} - * - *
-   * The parameters to `ListSinks`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListSinksRequest) - com.google.logging.v2.ListSinksRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListSinksRequest.class, com.google.logging.v2.ListSinksRequest.Builder.class); - } - - // Construct using com.google.logging.v2.ListSinksRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectName_ = ""; - - pageToken_ = ""; - - pageSize_ = 0; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksRequest_descriptor; - } - - public com.google.logging.v2.ListSinksRequest getDefaultInstanceForType() { - return com.google.logging.v2.ListSinksRequest.getDefaultInstance(); - } - - public com.google.logging.v2.ListSinksRequest build() { - com.google.logging.v2.ListSinksRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListSinksRequest buildPartial() { - com.google.logging.v2.ListSinksRequest result = new com.google.logging.v2.ListSinksRequest(this); - result.projectName_ = projectName_; - result.pageToken_ = pageToken_; - result.pageSize_ = pageSize_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListSinksRequest) { - return mergeFrom((com.google.logging.v2.ListSinksRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListSinksRequest other) { - if (other == com.google.logging.v2.ListSinksRequest.getDefaultInstance()) return this; - if (!other.getProjectName().isEmpty()) { - projectName_ = other.projectName_; - onChanged(); - } - if (!other.getPageToken().isEmpty()) { - pageToken_ = other.pageToken_; - onChanged(); - } - if (other.getPageSize() != 0) { - setPageSize(other.getPageSize()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListSinksRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListSinksRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object projectName_ = ""; - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name of the project owning the sinks.
-     * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-     * 
- */ - public java.lang.String getProjectName() { - java.lang.Object ref = projectName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - projectName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name of the project owning the sinks.
-     * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectNameBytes() { - java.lang.Object ref = projectName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - projectName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name of the project owning the sinks.
-     * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-     * 
- */ - public Builder setProjectName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - projectName_ = value; - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name of the project owning the sinks.
-     * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-     * 
- */ - public Builder clearProjectName() { - - projectName_ = getDefaultInstance().getProjectName(); - onChanged(); - return this; - } - /** - * optional string project_name = 1; - * - *
-     * Required. The resource name of the project owning the sinks.
-     * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-     * 
- */ - public Builder setProjectNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - projectName_ = value; - onChanged(); - return this; - } - - private java.lang.Object pageToken_ = ""; - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request. The value of `projectName` must be the same as in the
-     * previous request.
-     * 
- */ - public java.lang.String getPageToken() { - java.lang.Object ref = pageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - pageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request. The value of `projectName` must be the same as in the
-     * previous request.
-     * 
- */ - public com.google.protobuf.ByteString - getPageTokenBytes() { - java.lang.Object ref = pageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - pageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request. The value of `projectName` must be the same as in the
-     * previous request.
-     * 
- */ - public Builder setPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - pageToken_ = value; - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request. The value of `projectName` must be the same as in the
-     * previous request.
-     * 
- */ - public Builder clearPageToken() { - - pageToken_ = getDefaultInstance().getPageToken(); - onChanged(); - return this; - } - /** - * optional string page_token = 2; - * - *
-     * Optional. If the `pageToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `pageToken` parameter must
-     * be set with the value of the `nextPageToken` result parameter from the
-     * previous request. The value of `projectName` must be the same as in the
-     * previous request.
-     * 
- */ - public Builder setPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - pageToken_ = value; - onChanged(); - return this; - } - - private int pageSize_ ; - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public int getPageSize() { - return pageSize_; - } - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder setPageSize(int value) { - - pageSize_ = value; - onChanged(); - return this; - } - /** - * optional int32 page_size = 3; - * - *
-     * Optional. The maximum number of results to return from this request.  Fewer
-     * results might be returned. You must check for the 'nextPageToken` result to
-     * determine if additional results are available, which you can retrieve by
-     * passing the `nextPageToken` value in the `pageToken` parameter to the next
-     * request.
-     * 
- */ - public Builder clearPageSize() { - - pageSize_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListSinksRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - private static final com.google.logging.v2.ListSinksRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListSinksRequest(); - } - - public static com.google.logging.v2.ListSinksRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListSinksRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListSinksRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListSinksRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequestOrBuilder.java deleted file mode 100644 index 906d04b854f1..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksRequestOrBuilder.java +++ /dev/null @@ -1,68 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface ListSinksRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListSinksRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name of the project owning the sinks.
-   * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-   * 
- */ - java.lang.String getProjectName(); - /** - * optional string project_name = 1; - * - *
-   * Required. The resource name of the project owning the sinks.
-   * Example: `"projects/my-logging-project"`, `"projects/01234567890"`.
-   * 
- */ - com.google.protobuf.ByteString - getProjectNameBytes(); - - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request. The value of `projectName` must be the same as in the
-   * previous request.
-   * 
- */ - java.lang.String getPageToken(); - /** - * optional string page_token = 2; - * - *
-   * Optional. If the `pageToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `pageToken` parameter must
-   * be set with the value of the `nextPageToken` result parameter from the
-   * previous request. The value of `projectName` must be the same as in the
-   * previous request.
-   * 
- */ - com.google.protobuf.ByteString - getPageTokenBytes(); - - /** - * optional int32 page_size = 3; - * - *
-   * Optional. The maximum number of results to return from this request.  Fewer
-   * results might be returned. You must check for the 'nextPageToken` result to
-   * determine if additional results are available, which you can retrieve by
-   * passing the `nextPageToken` value in the `pageToken` parameter to the next
-   * request.
-   * 
- */ - int getPageSize(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponse.java deleted file mode 100644 index 34fe7374d53c..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponse.java +++ /dev/null @@ -1,923 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ListSinksResponse} - * - *
- * Result returned from `ListSinks`.
- * 
- */ -public final class ListSinksResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ListSinksResponse) - ListSinksResponseOrBuilder { - // Use ListSinksResponse.newBuilder() to construct. - private ListSinksResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ListSinksResponse() { - sinks_ = java.util.Collections.emptyList(); - nextPageToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ListSinksResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - sinks_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - sinks_.add(input.readMessage(com.google.logging.v2.LogSink.parser(), extensionRegistry)); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - nextPageToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - sinks_ = java.util.Collections.unmodifiableList(sinks_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListSinksResponse.class, com.google.logging.v2.ListSinksResponse.Builder.class); - } - - private int bitField0_; - public static final int SINKS_FIELD_NUMBER = 1; - private java.util.List sinks_; - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - public java.util.List getSinksList() { - return sinks_; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - public java.util.List - getSinksOrBuilderList() { - return sinks_; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - public int getSinksCount() { - return sinks_.size(); - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - public com.google.logging.v2.LogSink getSinks(int index) { - return sinks_.get(index); - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinksOrBuilder( - int index) { - return sinks_.get(index); - } - - public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object nextPageToken_; - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } - } - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < sinks_.size(); i++) { - output.writeMessage(1, sinks_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, nextPageToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < sinks_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, sinks_.get(i)); - } - if (!getNextPageTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, nextPageToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ListSinksResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListSinksResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListSinksResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ListSinksResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ListSinksResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListSinksResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListSinksResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ListSinksResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ListSinksResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ListSinksResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ListSinksResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ListSinksResponse} - * - *
-   * Result returned from `ListSinks`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ListSinksResponse) - com.google.logging.v2.ListSinksResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ListSinksResponse.class, com.google.logging.v2.ListSinksResponse.Builder.class); - } - - // Construct using com.google.logging.v2.ListSinksResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getSinksFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (sinksBuilder_ == null) { - sinks_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - sinksBuilder_.clear(); - } - nextPageToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_ListSinksResponse_descriptor; - } - - public com.google.logging.v2.ListSinksResponse getDefaultInstanceForType() { - return com.google.logging.v2.ListSinksResponse.getDefaultInstance(); - } - - public com.google.logging.v2.ListSinksResponse build() { - com.google.logging.v2.ListSinksResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ListSinksResponse buildPartial() { - com.google.logging.v2.ListSinksResponse result = new com.google.logging.v2.ListSinksResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (sinksBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - sinks_ = java.util.Collections.unmodifiableList(sinks_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.sinks_ = sinks_; - } else { - result.sinks_ = sinksBuilder_.build(); - } - result.nextPageToken_ = nextPageToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ListSinksResponse) { - return mergeFrom((com.google.logging.v2.ListSinksResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ListSinksResponse other) { - if (other == com.google.logging.v2.ListSinksResponse.getDefaultInstance()) return this; - if (sinksBuilder_ == null) { - if (!other.sinks_.isEmpty()) { - if (sinks_.isEmpty()) { - sinks_ = other.sinks_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureSinksIsMutable(); - sinks_.addAll(other.sinks_); - } - onChanged(); - } - } else { - if (!other.sinks_.isEmpty()) { - if (sinksBuilder_.isEmpty()) { - sinksBuilder_.dispose(); - sinksBuilder_ = null; - sinks_ = other.sinks_; - bitField0_ = (bitField0_ & ~0x00000001); - sinksBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getSinksFieldBuilder() : null; - } else { - sinksBuilder_.addAllMessages(other.sinks_); - } - } - } - if (!other.getNextPageToken().isEmpty()) { - nextPageToken_ = other.nextPageToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ListSinksResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ListSinksResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List sinks_ = - java.util.Collections.emptyList(); - private void ensureSinksIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - sinks_ = new java.util.ArrayList(sinks_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> sinksBuilder_; - - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public java.util.List getSinksList() { - if (sinksBuilder_ == null) { - return java.util.Collections.unmodifiableList(sinks_); - } else { - return sinksBuilder_.getMessageList(); - } - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public int getSinksCount() { - if (sinksBuilder_ == null) { - return sinks_.size(); - } else { - return sinksBuilder_.getCount(); - } - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public com.google.logging.v2.LogSink getSinks(int index) { - if (sinksBuilder_ == null) { - return sinks_.get(index); - } else { - return sinksBuilder_.getMessage(index); - } - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder setSinks( - int index, com.google.logging.v2.LogSink value) { - if (sinksBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureSinksIsMutable(); - sinks_.set(index, value); - onChanged(); - } else { - sinksBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder setSinks( - int index, com.google.logging.v2.LogSink.Builder builderForValue) { - if (sinksBuilder_ == null) { - ensureSinksIsMutable(); - sinks_.set(index, builderForValue.build()); - onChanged(); - } else { - sinksBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder addSinks(com.google.logging.v2.LogSink value) { - if (sinksBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureSinksIsMutable(); - sinks_.add(value); - onChanged(); - } else { - sinksBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder addSinks( - int index, com.google.logging.v2.LogSink value) { - if (sinksBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureSinksIsMutable(); - sinks_.add(index, value); - onChanged(); - } else { - sinksBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder addSinks( - com.google.logging.v2.LogSink.Builder builderForValue) { - if (sinksBuilder_ == null) { - ensureSinksIsMutable(); - sinks_.add(builderForValue.build()); - onChanged(); - } else { - sinksBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder addSinks( - int index, com.google.logging.v2.LogSink.Builder builderForValue) { - if (sinksBuilder_ == null) { - ensureSinksIsMutable(); - sinks_.add(index, builderForValue.build()); - onChanged(); - } else { - sinksBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder addAllSinks( - java.lang.Iterable values) { - if (sinksBuilder_ == null) { - ensureSinksIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, sinks_); - onChanged(); - } else { - sinksBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder clearSinks() { - if (sinksBuilder_ == null) { - sinks_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - sinksBuilder_.clear(); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public Builder removeSinks(int index) { - if (sinksBuilder_ == null) { - ensureSinksIsMutable(); - sinks_.remove(index); - onChanged(); - } else { - sinksBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public com.google.logging.v2.LogSink.Builder getSinksBuilder( - int index) { - return getSinksFieldBuilder().getBuilder(index); - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinksOrBuilder( - int index) { - if (sinksBuilder_ == null) { - return sinks_.get(index); } else { - return sinksBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public java.util.List - getSinksOrBuilderList() { - if (sinksBuilder_ != null) { - return sinksBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(sinks_); - } - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public com.google.logging.v2.LogSink.Builder addSinksBuilder() { - return getSinksFieldBuilder().addBuilder( - com.google.logging.v2.LogSink.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public com.google.logging.v2.LogSink.Builder addSinksBuilder( - int index) { - return getSinksFieldBuilder().addBuilder( - index, com.google.logging.v2.LogSink.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-     * A list of sinks.
-     * 
- */ - public java.util.List - getSinksBuilderList() { - return getSinksFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> - getSinksFieldBuilder() { - if (sinksBuilder_ == null) { - sinksBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder>( - sinks_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - sinks_ = null; - } - return sinksBuilder_; - } - - private java.lang.Object nextPageToken_ = ""; - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public java.lang.String getNextPageToken() { - java.lang.Object ref = nextPageToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nextPageToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public com.google.protobuf.ByteString - getNextPageTokenBytes() { - java.lang.Object ref = nextPageToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nextPageToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - nextPageToken_ = value; - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder clearNextPageToken() { - - nextPageToken_ = getDefaultInstance().getNextPageToken(); - onChanged(); - return this; - } - /** - * optional string next_page_token = 2; - * - *
-     * If there are more results than were returned, then `nextPageToken` is
-     * given a value in the response.  To get the next batch of results, call this
-     * method again using the value of `nextPageToken` as `pageToken`.
-     * 
- */ - public Builder setNextPageTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - nextPageToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ListSinksResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - private static final com.google.logging.v2.ListSinksResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ListSinksResponse(); - } - - public static com.google.logging.v2.ListSinksResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ListSinksResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ListSinksResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ListSinksResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponseOrBuilder.java deleted file mode 100644 index a3b19888ff59..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ListSinksResponseOrBuilder.java +++ /dev/null @@ -1,75 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface ListSinksResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ListSinksResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - java.util.List - getSinksList(); - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - com.google.logging.v2.LogSink getSinks(int index); - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - int getSinksCount(); - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - java.util.List - getSinksOrBuilderList(); - /** - * repeated .google.logging.v2.LogSink sinks = 1; - * - *
-   * A list of sinks.
-   * 
- */ - com.google.logging.v2.LogSinkOrBuilder getSinksOrBuilder( - int index); - - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - java.lang.String getNextPageToken(); - /** - * optional string next_page_token = 2; - * - *
-   * If there are more results than were returned, then `nextPageToken` is
-   * given a value in the response.  To get the next batch of results, call this
-   * method again using the value of `nextPageToken` as `pageToken`.
-   * 
- */ - com.google.protobuf.ByteString - getNextPageTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntry.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntry.java deleted file mode 100644 index 960515d3c347..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntry.java +++ /dev/null @@ -1,2648 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/log_entry.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.LogEntry} - * - *
- * An individual entry in a log.
- * 
- */ -public final class LogEntry extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.LogEntry) - LogEntryOrBuilder { - // Use LogEntry.newBuilder() to construct. - private LogEntry(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private LogEntry() { - logName_ = ""; - severity_ = 0; - insertId_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private LogEntry( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 18: { - com.google.protobuf.Any.Builder subBuilder = null; - if (payloadCase_ == 2) { - subBuilder = ((com.google.protobuf.Any) payload_).toBuilder(); - } - payload_ = - input.readMessage(com.google.protobuf.Any.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom((com.google.protobuf.Any) payload_); - payload_ = subBuilder.buildPartial(); - } - payloadCase_ = 2; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - payloadCase_ = 3; - payload_ = s; - break; - } - case 34: { - String s = input.readStringRequireUtf8(); - - insertId_ = s; - break; - } - case 50: { - com.google.protobuf.Struct.Builder subBuilder = null; - if (payloadCase_ == 6) { - subBuilder = ((com.google.protobuf.Struct) payload_).toBuilder(); - } - payload_ = - input.readMessage(com.google.protobuf.Struct.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom((com.google.protobuf.Struct) payload_); - payload_ = subBuilder.buildPartial(); - } - payloadCase_ = 6; - break; - } - case 58: { - com.google.logging.type.HttpRequest.Builder subBuilder = null; - if (httpRequest_ != null) { - subBuilder = httpRequest_.toBuilder(); - } - httpRequest_ = input.readMessage(com.google.logging.type.HttpRequest.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(httpRequest_); - httpRequest_ = subBuilder.buildPartial(); - } - - break; - } - case 66: { - com.google.api.MonitoredResource.Builder subBuilder = null; - if (resource_ != null) { - subBuilder = resource_.toBuilder(); - } - resource_ = input.readMessage(com.google.api.MonitoredResource.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(resource_); - resource_ = subBuilder.buildPartial(); - } - - break; - } - case 74: { - com.google.protobuf.Timestamp.Builder subBuilder = null; - if (timestamp_ != null) { - subBuilder = timestamp_.toBuilder(); - } - timestamp_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(timestamp_); - timestamp_ = subBuilder.buildPartial(); - } - - break; - } - case 80: { - int rawValue = input.readEnum(); - - severity_ = rawValue; - break; - } - case 90: { - if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { - labels_ = com.google.protobuf.MapField.newMapField( - LabelsDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000200; - } - com.google.protobuf.MapEntry - labels = input.readMessage( - LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); - labels_.getMutableMap().put(labels.getKey(), labels.getValue()); - break; - } - case 98: { - String s = input.readStringRequireUtf8(); - - logName_ = s; - break; - } - case 122: { - com.google.logging.v2.LogEntryOperation.Builder subBuilder = null; - if (operation_ != null) { - subBuilder = operation_.toBuilder(); - } - operation_ = input.readMessage(com.google.logging.v2.LogEntryOperation.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(operation_); - operation_ = subBuilder.buildPartial(); - } - - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 11: - return internalGetLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogEntry.class, com.google.logging.v2.LogEntry.Builder.class); - } - - private int bitField0_; - private int payloadCase_ = 0; - private java.lang.Object payload_; - public enum PayloadCase - implements com.google.protobuf.Internal.EnumLite { - PROTO_PAYLOAD(2), - TEXT_PAYLOAD(3), - JSON_PAYLOAD(6), - PAYLOAD_NOT_SET(0); - private int value = 0; - private PayloadCase(int value) { - this.value = value; - } - public static PayloadCase valueOf(int value) { - switch (value) { - case 2: return PROTO_PAYLOAD; - case 3: return TEXT_PAYLOAD; - case 6: return JSON_PAYLOAD; - case 0: return PAYLOAD_NOT_SET; - default: throw new java.lang.IllegalArgumentException( - "Value is undefined for this oneof enum."); - } - } - public int getNumber() { - return this.value; - } - }; - - public PayloadCase - getPayloadCase() { - return PayloadCase.valueOf( - payloadCase_); - } - - public static final int LOG_NAME_FIELD_NUMBER = 12; - private volatile java.lang.Object logName_; - /** - * optional string log_name = 12; - * - *
-   * Required. The resource name of the log to which this log entry
-   * belongs. The format of the name is
-   * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-   * `"projects/my-projectid/logs/syslog"`,
-   * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-   * The log ID part of resource name must be less than 512 characters
-   * long and can only include the following characters: upper and
-   * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-   * characters: forward-slash, underscore, hyphen, and period.
-   * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-   * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } - } - /** - * optional string log_name = 12; - * - *
-   * Required. The resource name of the log to which this log entry
-   * belongs. The format of the name is
-   * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-   * `"projects/my-projectid/logs/syslog"`,
-   * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-   * The log ID part of resource name must be less than 512 characters
-   * long and can only include the following characters: upper and
-   * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-   * characters: forward-slash, underscore, hyphen, and period.
-   * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-   * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int RESOURCE_FIELD_NUMBER = 8; - private com.google.api.MonitoredResource resource_; - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - public boolean hasResource() { - return resource_ != null; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - public com.google.api.MonitoredResource getResource() { - return resource_ == null ? com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - public com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder() { - return getResource(); - } - - public static final int PROTO_PAYLOAD_FIELD_NUMBER = 2; - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-   * The log entry payload, represented as a protocol buffer.
-   * You can only use `protoPayload` values that belong to a set of approved
-   * types.
-   * 
- */ - public com.google.protobuf.Any getProtoPayload() { - if (payloadCase_ == 2) { - return (com.google.protobuf.Any) payload_; - } - return com.google.protobuf.Any.getDefaultInstance(); - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-   * The log entry payload, represented as a protocol buffer.
-   * You can only use `protoPayload` values that belong to a set of approved
-   * types.
-   * 
- */ - public com.google.protobuf.AnyOrBuilder getProtoPayloadOrBuilder() { - if (payloadCase_ == 2) { - return (com.google.protobuf.Any) payload_; - } - return com.google.protobuf.Any.getDefaultInstance(); - } - - public static final int TEXT_PAYLOAD_FIELD_NUMBER = 3; - /** - * optional string text_payload = 3; - * - *
-   * The log entry payload, represented as a Unicode string (UTF-8).
-   * 
- */ - public java.lang.String getTextPayload() { - java.lang.Object ref = ""; - if (payloadCase_ == 3) { - ref = payload_; - } - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (payloadCase_ == 3) { - payload_ = s; - } - return s; - } - } - /** - * optional string text_payload = 3; - * - *
-   * The log entry payload, represented as a Unicode string (UTF-8).
-   * 
- */ - public com.google.protobuf.ByteString - getTextPayloadBytes() { - java.lang.Object ref = ""; - if (payloadCase_ == 3) { - ref = payload_; - } - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - if (payloadCase_ == 3) { - payload_ = b; - } - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int JSON_PAYLOAD_FIELD_NUMBER = 6; - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-   * The log entry payload, represented as a structure that
-   * is expressed as a JSON object.
-   * 
- */ - public com.google.protobuf.Struct getJsonPayload() { - if (payloadCase_ == 6) { - return (com.google.protobuf.Struct) payload_; - } - return com.google.protobuf.Struct.getDefaultInstance(); - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-   * The log entry payload, represented as a structure that
-   * is expressed as a JSON object.
-   * 
- */ - public com.google.protobuf.StructOrBuilder getJsonPayloadOrBuilder() { - if (payloadCase_ == 6) { - return (com.google.protobuf.Struct) payload_; - } - return com.google.protobuf.Struct.getDefaultInstance(); - } - - public static final int TIMESTAMP_FIELD_NUMBER = 9; - private com.google.protobuf.Timestamp timestamp_; - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - public boolean hasTimestamp() { - return timestamp_ != null; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - public com.google.protobuf.Timestamp getTimestamp() { - return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() { - return getTimestamp(); - } - - public static final int SEVERITY_FIELD_NUMBER = 10; - private int severity_; - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-   * Optional. The severity of the log entry. The default value is
-   * `LogSeverity.DEFAULT`.
-   * 
- */ - public int getSeverityValue() { - return severity_; - } - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-   * Optional. The severity of the log entry. The default value is
-   * `LogSeverity.DEFAULT`.
-   * 
- */ - public com.google.logging.type.LogSeverity getSeverity() { - com.google.logging.type.LogSeverity result = com.google.logging.type.LogSeverity.valueOf(severity_); - return result == null ? com.google.logging.type.LogSeverity.UNRECOGNIZED : result; - } - - public static final int INSERT_ID_FIELD_NUMBER = 4; - private volatile java.lang.Object insertId_; - /** - * optional string insert_id = 4; - * - *
-   * Optional. A unique ID for the log entry. If you provide this field, the
-   * logging service considers other log entries in the same log with the same
-   * ID as duplicates which can be removed.
-   * If omitted, Cloud Logging will generate a unique ID for this log entry.
-   * 
- */ - public java.lang.String getInsertId() { - java.lang.Object ref = insertId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - insertId_ = s; - return s; - } - } - /** - * optional string insert_id = 4; - * - *
-   * Optional. A unique ID for the log entry. If you provide this field, the
-   * logging service considers other log entries in the same log with the same
-   * ID as duplicates which can be removed.
-   * If omitted, Cloud Logging will generate a unique ID for this log entry.
-   * 
- */ - public com.google.protobuf.ByteString - getInsertIdBytes() { - java.lang.Object ref = insertId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - insertId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int HTTP_REQUEST_FIELD_NUMBER = 7; - private com.google.logging.type.HttpRequest httpRequest_; - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - public boolean hasHttpRequest() { - return httpRequest_ != null; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - public com.google.logging.type.HttpRequest getHttpRequest() { - return httpRequest_ == null ? com.google.logging.type.HttpRequest.getDefaultInstance() : httpRequest_; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - public com.google.logging.type.HttpRequestOrBuilder getHttpRequestOrBuilder() { - return getHttpRequest(); - } - - public static final int LABELS_FIELD_NUMBER = 11; - private static final class LabelsDefaultEntryHolder { - static final com.google.protobuf.MapEntry< - java.lang.String, java.lang.String> defaultEntry = - com.google.protobuf.MapEntry - .newDefaultInstance( - com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_LabelsEntry_descriptor, - com.google.protobuf.WireFormat.FieldType.STRING, - "", - com.google.protobuf.WireFormat.FieldType.STRING, - ""); - } - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> labels_; - private com.google.protobuf.MapField - internalGetLabels() { - if (labels_ == null) { - return com.google.protobuf.MapField.emptyMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - return labels_; - } - /** - * map<string, string> labels = 11; - * - *
-   * Optional. A set of user-defined (key, value) data that provides additional
-   * information about the log entry.
-   * 
- */ - - public java.util.Map getLabels() { - return internalGetLabels().getMap(); - } - - public static final int OPERATION_FIELD_NUMBER = 15; - private com.google.logging.v2.LogEntryOperation operation_; - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - public boolean hasOperation() { - return operation_ != null; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - public com.google.logging.v2.LogEntryOperation getOperation() { - return operation_ == null ? com.google.logging.v2.LogEntryOperation.getDefaultInstance() : operation_; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - public com.google.logging.v2.LogEntryOperationOrBuilder getOperationOrBuilder() { - return getOperation(); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (payloadCase_ == 2) { - output.writeMessage(2, (com.google.protobuf.Any) payload_); - } - if (payloadCase_ == 3) { - com.google.protobuf.GeneratedMessage.writeString(output, 3, payload_); - } - if (!getInsertIdBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 4, insertId_); - } - if (payloadCase_ == 6) { - output.writeMessage(6, (com.google.protobuf.Struct) payload_); - } - if (httpRequest_ != null) { - output.writeMessage(7, getHttpRequest()); - } - if (resource_ != null) { - output.writeMessage(8, getResource()); - } - if (timestamp_ != null) { - output.writeMessage(9, getTimestamp()); - } - if (severity_ != com.google.logging.type.LogSeverity.DEFAULT.getNumber()) { - output.writeEnum(10, severity_); - } - for (java.util.Map.Entry entry - : internalGetLabels().getMap().entrySet()) { - com.google.protobuf.MapEntry - labels = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() - .setKey(entry.getKey()) - .setValue(entry.getValue()) - .build(); - output.writeMessage(11, labels); - } - if (!getLogNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 12, logName_); - } - if (operation_ != null) { - output.writeMessage(15, getOperation()); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (payloadCase_ == 2) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, (com.google.protobuf.Any) payload_); - } - if (payloadCase_ == 3) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(3, payload_); - } - if (!getInsertIdBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(4, insertId_); - } - if (payloadCase_ == 6) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, (com.google.protobuf.Struct) payload_); - } - if (httpRequest_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(7, getHttpRequest()); - } - if (resource_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(8, getResource()); - } - if (timestamp_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(9, getTimestamp()); - } - if (severity_ != com.google.logging.type.LogSeverity.DEFAULT.getNumber()) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(10, severity_); - } - for (java.util.Map.Entry entry - : internalGetLabels().getMap().entrySet()) { - com.google.protobuf.MapEntry - labels = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() - .setKey(entry.getKey()) - .setValue(entry.getValue()) - .build(); - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(11, labels); - } - if (!getLogNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(12, logName_); - } - if (operation_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(15, getOperation()); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.LogEntry parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogEntry parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogEntry parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogEntry parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogEntry parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogEntry parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogEntry parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.LogEntry parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogEntry parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogEntry parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.LogEntry prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.LogEntry} - * - *
-   * An individual entry in a log.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.LogEntry) - com.google.logging.v2.LogEntryOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 11: - return internalGetLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField( - int number) { - switch (number) { - case 11: - return internalGetMutableLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogEntry.class, com.google.logging.v2.LogEntry.Builder.class); - } - - // Construct using com.google.logging.v2.LogEntry.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - logName_ = ""; - - if (resourceBuilder_ == null) { - resource_ = null; - } else { - resource_ = null; - resourceBuilder_ = null; - } - if (timestampBuilder_ == null) { - timestamp_ = null; - } else { - timestamp_ = null; - timestampBuilder_ = null; - } - severity_ = 0; - - insertId_ = ""; - - if (httpRequestBuilder_ == null) { - httpRequest_ = null; - } else { - httpRequest_ = null; - httpRequestBuilder_ = null; - } - internalGetMutableLabels().clear(); - if (operationBuilder_ == null) { - operation_ = null; - } else { - operation_ = null; - operationBuilder_ = null; - } - payloadCase_ = 0; - payload_ = null; - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntry_descriptor; - } - - public com.google.logging.v2.LogEntry getDefaultInstanceForType() { - return com.google.logging.v2.LogEntry.getDefaultInstance(); - } - - public com.google.logging.v2.LogEntry build() { - com.google.logging.v2.LogEntry result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.LogEntry buildPartial() { - com.google.logging.v2.LogEntry result = new com.google.logging.v2.LogEntry(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - result.logName_ = logName_; - if (resourceBuilder_ == null) { - result.resource_ = resource_; - } else { - result.resource_ = resourceBuilder_.build(); - } - if (payloadCase_ == 2) { - if (protoPayloadBuilder_ == null) { - result.payload_ = payload_; - } else { - result.payload_ = protoPayloadBuilder_.build(); - } - } - if (payloadCase_ == 3) { - result.payload_ = payload_; - } - if (payloadCase_ == 6) { - if (jsonPayloadBuilder_ == null) { - result.payload_ = payload_; - } else { - result.payload_ = jsonPayloadBuilder_.build(); - } - } - if (timestampBuilder_ == null) { - result.timestamp_ = timestamp_; - } else { - result.timestamp_ = timestampBuilder_.build(); - } - result.severity_ = severity_; - result.insertId_ = insertId_; - if (httpRequestBuilder_ == null) { - result.httpRequest_ = httpRequest_; - } else { - result.httpRequest_ = httpRequestBuilder_.build(); - } - result.labels_ = internalGetLabels(); - result.labels_.makeImmutable(); - if (operationBuilder_ == null) { - result.operation_ = operation_; - } else { - result.operation_ = operationBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - result.payloadCase_ = payloadCase_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.LogEntry) { - return mergeFrom((com.google.logging.v2.LogEntry)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.LogEntry other) { - if (other == com.google.logging.v2.LogEntry.getDefaultInstance()) return this; - if (!other.getLogName().isEmpty()) { - logName_ = other.logName_; - onChanged(); - } - if (other.hasResource()) { - mergeResource(other.getResource()); - } - if (other.hasTimestamp()) { - mergeTimestamp(other.getTimestamp()); - } - if (other.severity_ != 0) { - setSeverityValue(other.getSeverityValue()); - } - if (!other.getInsertId().isEmpty()) { - insertId_ = other.insertId_; - onChanged(); - } - if (other.hasHttpRequest()) { - mergeHttpRequest(other.getHttpRequest()); - } - internalGetMutableLabels().mergeFrom( - other.internalGetLabels()); - if (other.hasOperation()) { - mergeOperation(other.getOperation()); - } - switch (other.getPayloadCase()) { - case PROTO_PAYLOAD: { - mergeProtoPayload(other.getProtoPayload()); - break; - } - case TEXT_PAYLOAD: { - payloadCase_ = 3; - payload_ = other.payload_; - onChanged(); - break; - } - case JSON_PAYLOAD: { - mergeJsonPayload(other.getJsonPayload()); - break; - } - case PAYLOAD_NOT_SET: { - break; - } - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.LogEntry parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.LogEntry) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int payloadCase_ = 0; - private java.lang.Object payload_; - public PayloadCase - getPayloadCase() { - return PayloadCase.valueOf( - payloadCase_); - } - - public Builder clearPayload() { - payloadCase_ = 0; - payload_ = null; - onChanged(); - return this; - } - - private int bitField0_; - - private java.lang.Object logName_ = ""; - /** - * optional string log_name = 12; - * - *
-     * Required. The resource name of the log to which this log entry
-     * belongs. The format of the name is
-     * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-     * `"projects/my-projectid/logs/syslog"`,
-     * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-     * The log ID part of resource name must be less than 512 characters
-     * long and can only include the following characters: upper and
-     * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-     * characters: forward-slash, underscore, hyphen, and period.
-     * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-     * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string log_name = 12; - * - *
-     * Required. The resource name of the log to which this log entry
-     * belongs. The format of the name is
-     * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-     * `"projects/my-projectid/logs/syslog"`,
-     * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-     * The log ID part of resource name must be less than 512 characters
-     * long and can only include the following characters: upper and
-     * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-     * characters: forward-slash, underscore, hyphen, and period.
-     * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-     * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string log_name = 12; - * - *
-     * Required. The resource name of the log to which this log entry
-     * belongs. The format of the name is
-     * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-     * `"projects/my-projectid/logs/syslog"`,
-     * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-     * The log ID part of resource name must be less than 512 characters
-     * long and can only include the following characters: upper and
-     * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-     * characters: forward-slash, underscore, hyphen, and period.
-     * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-     * 
- */ - public Builder setLogName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - logName_ = value; - onChanged(); - return this; - } - /** - * optional string log_name = 12; - * - *
-     * Required. The resource name of the log to which this log entry
-     * belongs. The format of the name is
-     * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-     * `"projects/my-projectid/logs/syslog"`,
-     * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-     * The log ID part of resource name must be less than 512 characters
-     * long and can only include the following characters: upper and
-     * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-     * characters: forward-slash, underscore, hyphen, and period.
-     * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-     * 
- */ - public Builder clearLogName() { - - logName_ = getDefaultInstance().getLogName(); - onChanged(); - return this; - } - /** - * optional string log_name = 12; - * - *
-     * Required. The resource name of the log to which this log entry
-     * belongs. The format of the name is
-     * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-     * `"projects/my-projectid/logs/syslog"`,
-     * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-     * The log ID part of resource name must be less than 512 characters
-     * long and can only include the following characters: upper and
-     * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-     * characters: forward-slash, underscore, hyphen, and period.
-     * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-     * 
- */ - public Builder setLogNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - logName_ = value; - onChanged(); - return this; - } - - private com.google.api.MonitoredResource resource_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> resourceBuilder_; - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public boolean hasResource() { - return resourceBuilder_ != null || resource_ != null; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public com.google.api.MonitoredResource getResource() { - if (resourceBuilder_ == null) { - return resource_ == null ? com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } else { - return resourceBuilder_.getMessage(); - } - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public Builder setResource(com.google.api.MonitoredResource value) { - if (resourceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - resource_ = value; - onChanged(); - } else { - resourceBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public Builder setResource( - com.google.api.MonitoredResource.Builder builderForValue) { - if (resourceBuilder_ == null) { - resource_ = builderForValue.build(); - onChanged(); - } else { - resourceBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public Builder mergeResource(com.google.api.MonitoredResource value) { - if (resourceBuilder_ == null) { - if (resource_ != null) { - resource_ = - com.google.api.MonitoredResource.newBuilder(resource_).mergeFrom(value).buildPartial(); - } else { - resource_ = value; - } - onChanged(); - } else { - resourceBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public Builder clearResource() { - if (resourceBuilder_ == null) { - resource_ = null; - onChanged(); - } else { - resource_ = null; - resourceBuilder_ = null; - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public com.google.api.MonitoredResource.Builder getResourceBuilder() { - - onChanged(); - return getResourceFieldBuilder().getBuilder(); - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - public com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder() { - if (resourceBuilder_ != null) { - return resourceBuilder_.getMessageOrBuilder(); - } else { - return resource_ == null ? - com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } - } - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-     * Required. The monitored resource associated with this log entry.
-     * Example: a log entry that reports a database error would be
-     * associated with the monitored resource designating the particular
-     * database that reported the error.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> - getResourceFieldBuilder() { - if (resourceBuilder_ == null) { - resourceBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder>( - getResource(), - getParentForChildren(), - isClean()); - resource_ = null; - } - return resourceBuilder_; - } - - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> protoPayloadBuilder_; - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public com.google.protobuf.Any getProtoPayload() { - if (protoPayloadBuilder_ == null) { - if (payloadCase_ == 2) { - return (com.google.protobuf.Any) payload_; - } - return com.google.protobuf.Any.getDefaultInstance(); - } else { - if (payloadCase_ == 2) { - return protoPayloadBuilder_.getMessage(); - } - return com.google.protobuf.Any.getDefaultInstance(); - } - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public Builder setProtoPayload(com.google.protobuf.Any value) { - if (protoPayloadBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - payload_ = value; - onChanged(); - } else { - protoPayloadBuilder_.setMessage(value); - } - payloadCase_ = 2; - return this; - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public Builder setProtoPayload( - com.google.protobuf.Any.Builder builderForValue) { - if (protoPayloadBuilder_ == null) { - payload_ = builderForValue.build(); - onChanged(); - } else { - protoPayloadBuilder_.setMessage(builderForValue.build()); - } - payloadCase_ = 2; - return this; - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public Builder mergeProtoPayload(com.google.protobuf.Any value) { - if (protoPayloadBuilder_ == null) { - if (payloadCase_ == 2 && - payload_ != com.google.protobuf.Any.getDefaultInstance()) { - payload_ = com.google.protobuf.Any.newBuilder((com.google.protobuf.Any) payload_) - .mergeFrom(value).buildPartial(); - } else { - payload_ = value; - } - onChanged(); - } else { - if (payloadCase_ == 2) { - protoPayloadBuilder_.mergeFrom(value); - } - protoPayloadBuilder_.setMessage(value); - } - payloadCase_ = 2; - return this; - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public Builder clearProtoPayload() { - if (protoPayloadBuilder_ == null) { - if (payloadCase_ == 2) { - payloadCase_ = 0; - payload_ = null; - onChanged(); - } - } else { - if (payloadCase_ == 2) { - payloadCase_ = 0; - payload_ = null; - } - protoPayloadBuilder_.clear(); - } - return this; - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public com.google.protobuf.Any.Builder getProtoPayloadBuilder() { - return getProtoPayloadFieldBuilder().getBuilder(); - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - public com.google.protobuf.AnyOrBuilder getProtoPayloadOrBuilder() { - if ((payloadCase_ == 2) && (protoPayloadBuilder_ != null)) { - return protoPayloadBuilder_.getMessageOrBuilder(); - } else { - if (payloadCase_ == 2) { - return (com.google.protobuf.Any) payload_; - } - return com.google.protobuf.Any.getDefaultInstance(); - } - } - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-     * The log entry payload, represented as a protocol buffer.
-     * You can only use `protoPayload` values that belong to a set of approved
-     * types.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> - getProtoPayloadFieldBuilder() { - if (protoPayloadBuilder_ == null) { - if (!(payloadCase_ == 2)) { - payload_ = com.google.protobuf.Any.getDefaultInstance(); - } - protoPayloadBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder>( - (com.google.protobuf.Any) payload_, - getParentForChildren(), - isClean()); - payload_ = null; - } - payloadCase_ = 2; - onChanged();; - return protoPayloadBuilder_; - } - - /** - * optional string text_payload = 3; - * - *
-     * The log entry payload, represented as a Unicode string (UTF-8).
-     * 
- */ - public java.lang.String getTextPayload() { - java.lang.Object ref = ""; - if (payloadCase_ == 3) { - ref = payload_; - } - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (payloadCase_ == 3) { - payload_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string text_payload = 3; - * - *
-     * The log entry payload, represented as a Unicode string (UTF-8).
-     * 
- */ - public com.google.protobuf.ByteString - getTextPayloadBytes() { - java.lang.Object ref = ""; - if (payloadCase_ == 3) { - ref = payload_; - } - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - if (payloadCase_ == 3) { - payload_ = b; - } - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string text_payload = 3; - * - *
-     * The log entry payload, represented as a Unicode string (UTF-8).
-     * 
- */ - public Builder setTextPayload( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - payloadCase_ = 3; - payload_ = value; - onChanged(); - return this; - } - /** - * optional string text_payload = 3; - * - *
-     * The log entry payload, represented as a Unicode string (UTF-8).
-     * 
- */ - public Builder clearTextPayload() { - if (payloadCase_ == 3) { - payloadCase_ = 0; - payload_ = null; - onChanged(); - } - return this; - } - /** - * optional string text_payload = 3; - * - *
-     * The log entry payload, represented as a Unicode string (UTF-8).
-     * 
- */ - public Builder setTextPayloadBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - payloadCase_ = 3; - payload_ = value; - onChanged(); - return this; - } - - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> jsonPayloadBuilder_; - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public com.google.protobuf.Struct getJsonPayload() { - if (jsonPayloadBuilder_ == null) { - if (payloadCase_ == 6) { - return (com.google.protobuf.Struct) payload_; - } - return com.google.protobuf.Struct.getDefaultInstance(); - } else { - if (payloadCase_ == 6) { - return jsonPayloadBuilder_.getMessage(); - } - return com.google.protobuf.Struct.getDefaultInstance(); - } - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public Builder setJsonPayload(com.google.protobuf.Struct value) { - if (jsonPayloadBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - payload_ = value; - onChanged(); - } else { - jsonPayloadBuilder_.setMessage(value); - } - payloadCase_ = 6; - return this; - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public Builder setJsonPayload( - com.google.protobuf.Struct.Builder builderForValue) { - if (jsonPayloadBuilder_ == null) { - payload_ = builderForValue.build(); - onChanged(); - } else { - jsonPayloadBuilder_.setMessage(builderForValue.build()); - } - payloadCase_ = 6; - return this; - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public Builder mergeJsonPayload(com.google.protobuf.Struct value) { - if (jsonPayloadBuilder_ == null) { - if (payloadCase_ == 6 && - payload_ != com.google.protobuf.Struct.getDefaultInstance()) { - payload_ = com.google.protobuf.Struct.newBuilder((com.google.protobuf.Struct) payload_) - .mergeFrom(value).buildPartial(); - } else { - payload_ = value; - } - onChanged(); - } else { - if (payloadCase_ == 6) { - jsonPayloadBuilder_.mergeFrom(value); - } - jsonPayloadBuilder_.setMessage(value); - } - payloadCase_ = 6; - return this; - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public Builder clearJsonPayload() { - if (jsonPayloadBuilder_ == null) { - if (payloadCase_ == 6) { - payloadCase_ = 0; - payload_ = null; - onChanged(); - } - } else { - if (payloadCase_ == 6) { - payloadCase_ = 0; - payload_ = null; - } - jsonPayloadBuilder_.clear(); - } - return this; - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public com.google.protobuf.Struct.Builder getJsonPayloadBuilder() { - return getJsonPayloadFieldBuilder().getBuilder(); - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - public com.google.protobuf.StructOrBuilder getJsonPayloadOrBuilder() { - if ((payloadCase_ == 6) && (jsonPayloadBuilder_ != null)) { - return jsonPayloadBuilder_.getMessageOrBuilder(); - } else { - if (payloadCase_ == 6) { - return (com.google.protobuf.Struct) payload_; - } - return com.google.protobuf.Struct.getDefaultInstance(); - } - } - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-     * The log entry payload, represented as a structure that
-     * is expressed as a JSON object.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> - getJsonPayloadFieldBuilder() { - if (jsonPayloadBuilder_ == null) { - if (!(payloadCase_ == 6)) { - payload_ = com.google.protobuf.Struct.getDefaultInstance(); - } - jsonPayloadBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder>( - (com.google.protobuf.Struct) payload_, - getParentForChildren(), - isClean()); - payload_ = null; - } - payloadCase_ = 6; - onChanged();; - return jsonPayloadBuilder_; - } - - private com.google.protobuf.Timestamp timestamp_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timestampBuilder_; - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public boolean hasTimestamp() { - return timestampBuilder_ != null || timestamp_ != null; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public com.google.protobuf.Timestamp getTimestamp() { - if (timestampBuilder_ == null) { - return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_; - } else { - return timestampBuilder_.getMessage(); - } - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public Builder setTimestamp(com.google.protobuf.Timestamp value) { - if (timestampBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - timestamp_ = value; - onChanged(); - } else { - timestampBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public Builder setTimestamp( - com.google.protobuf.Timestamp.Builder builderForValue) { - if (timestampBuilder_ == null) { - timestamp_ = builderForValue.build(); - onChanged(); - } else { - timestampBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public Builder mergeTimestamp(com.google.protobuf.Timestamp value) { - if (timestampBuilder_ == null) { - if (timestamp_ != null) { - timestamp_ = - com.google.protobuf.Timestamp.newBuilder(timestamp_).mergeFrom(value).buildPartial(); - } else { - timestamp_ = value; - } - onChanged(); - } else { - timestampBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public Builder clearTimestamp() { - if (timestampBuilder_ == null) { - timestamp_ = null; - onChanged(); - } else { - timestamp_ = null; - timestampBuilder_ = null; - } - - return this; - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public com.google.protobuf.Timestamp.Builder getTimestampBuilder() { - - onChanged(); - return getTimestampFieldBuilder().getBuilder(); - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() { - if (timestampBuilder_ != null) { - return timestampBuilder_.getMessageOrBuilder(); - } else { - return timestamp_ == null ? - com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_; - } - } - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-     * Optional. The time the event described by the log entry occurred.  If
-     * omitted, Cloud Logging will use the time the log entry is written.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> - getTimestampFieldBuilder() { - if (timestampBuilder_ == null) { - timestampBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( - getTimestamp(), - getParentForChildren(), - isClean()); - timestamp_ = null; - } - return timestampBuilder_; - } - - private int severity_ = 0; - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-     * Optional. The severity of the log entry. The default value is
-     * `LogSeverity.DEFAULT`.
-     * 
- */ - public int getSeverityValue() { - return severity_; - } - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-     * Optional. The severity of the log entry. The default value is
-     * `LogSeverity.DEFAULT`.
-     * 
- */ - public Builder setSeverityValue(int value) { - severity_ = value; - onChanged(); - return this; - } - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-     * Optional. The severity of the log entry. The default value is
-     * `LogSeverity.DEFAULT`.
-     * 
- */ - public com.google.logging.type.LogSeverity getSeverity() { - com.google.logging.type.LogSeverity result = com.google.logging.type.LogSeverity.valueOf(severity_); - return result == null ? com.google.logging.type.LogSeverity.UNRECOGNIZED : result; - } - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-     * Optional. The severity of the log entry. The default value is
-     * `LogSeverity.DEFAULT`.
-     * 
- */ - public Builder setSeverity(com.google.logging.type.LogSeverity value) { - if (value == null) { - throw new NullPointerException(); - } - - severity_ = value.getNumber(); - onChanged(); - return this; - } - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-     * Optional. The severity of the log entry. The default value is
-     * `LogSeverity.DEFAULT`.
-     * 
- */ - public Builder clearSeverity() { - - severity_ = 0; - onChanged(); - return this; - } - - private java.lang.Object insertId_ = ""; - /** - * optional string insert_id = 4; - * - *
-     * Optional. A unique ID for the log entry. If you provide this field, the
-     * logging service considers other log entries in the same log with the same
-     * ID as duplicates which can be removed.
-     * If omitted, Cloud Logging will generate a unique ID for this log entry.
-     * 
- */ - public java.lang.String getInsertId() { - java.lang.Object ref = insertId_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - insertId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string insert_id = 4; - * - *
-     * Optional. A unique ID for the log entry. If you provide this field, the
-     * logging service considers other log entries in the same log with the same
-     * ID as duplicates which can be removed.
-     * If omitted, Cloud Logging will generate a unique ID for this log entry.
-     * 
- */ - public com.google.protobuf.ByteString - getInsertIdBytes() { - java.lang.Object ref = insertId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - insertId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string insert_id = 4; - * - *
-     * Optional. A unique ID for the log entry. If you provide this field, the
-     * logging service considers other log entries in the same log with the same
-     * ID as duplicates which can be removed.
-     * If omitted, Cloud Logging will generate a unique ID for this log entry.
-     * 
- */ - public Builder setInsertId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - insertId_ = value; - onChanged(); - return this; - } - /** - * optional string insert_id = 4; - * - *
-     * Optional. A unique ID for the log entry. If you provide this field, the
-     * logging service considers other log entries in the same log with the same
-     * ID as duplicates which can be removed.
-     * If omitted, Cloud Logging will generate a unique ID for this log entry.
-     * 
- */ - public Builder clearInsertId() { - - insertId_ = getDefaultInstance().getInsertId(); - onChanged(); - return this; - } - /** - * optional string insert_id = 4; - * - *
-     * Optional. A unique ID for the log entry. If you provide this field, the
-     * logging service considers other log entries in the same log with the same
-     * ID as duplicates which can be removed.
-     * If omitted, Cloud Logging will generate a unique ID for this log entry.
-     * 
- */ - public Builder setInsertIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - insertId_ = value; - onChanged(); - return this; - } - - private com.google.logging.type.HttpRequest httpRequest_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.type.HttpRequest, com.google.logging.type.HttpRequest.Builder, com.google.logging.type.HttpRequestOrBuilder> httpRequestBuilder_; - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public boolean hasHttpRequest() { - return httpRequestBuilder_ != null || httpRequest_ != null; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public com.google.logging.type.HttpRequest getHttpRequest() { - if (httpRequestBuilder_ == null) { - return httpRequest_ == null ? com.google.logging.type.HttpRequest.getDefaultInstance() : httpRequest_; - } else { - return httpRequestBuilder_.getMessage(); - } - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public Builder setHttpRequest(com.google.logging.type.HttpRequest value) { - if (httpRequestBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - httpRequest_ = value; - onChanged(); - } else { - httpRequestBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public Builder setHttpRequest( - com.google.logging.type.HttpRequest.Builder builderForValue) { - if (httpRequestBuilder_ == null) { - httpRequest_ = builderForValue.build(); - onChanged(); - } else { - httpRequestBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public Builder mergeHttpRequest(com.google.logging.type.HttpRequest value) { - if (httpRequestBuilder_ == null) { - if (httpRequest_ != null) { - httpRequest_ = - com.google.logging.type.HttpRequest.newBuilder(httpRequest_).mergeFrom(value).buildPartial(); - } else { - httpRequest_ = value; - } - onChanged(); - } else { - httpRequestBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public Builder clearHttpRequest() { - if (httpRequestBuilder_ == null) { - httpRequest_ = null; - onChanged(); - } else { - httpRequest_ = null; - httpRequestBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public com.google.logging.type.HttpRequest.Builder getHttpRequestBuilder() { - - onChanged(); - return getHttpRequestFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - public com.google.logging.type.HttpRequestOrBuilder getHttpRequestOrBuilder() { - if (httpRequestBuilder_ != null) { - return httpRequestBuilder_.getMessageOrBuilder(); - } else { - return httpRequest_ == null ? - com.google.logging.type.HttpRequest.getDefaultInstance() : httpRequest_; - } - } - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-     * Optional. Information about the HTTP request associated with this log entry,
-     * if applicable.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.type.HttpRequest, com.google.logging.type.HttpRequest.Builder, com.google.logging.type.HttpRequestOrBuilder> - getHttpRequestFieldBuilder() { - if (httpRequestBuilder_ == null) { - httpRequestBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.type.HttpRequest, com.google.logging.type.HttpRequest.Builder, com.google.logging.type.HttpRequestOrBuilder>( - getHttpRequest(), - getParentForChildren(), - isClean()); - httpRequest_ = null; - } - return httpRequestBuilder_; - } - - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> labels_; - private com.google.protobuf.MapField - internalGetLabels() { - if (labels_ == null) { - return com.google.protobuf.MapField.emptyMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - return labels_; - } - private com.google.protobuf.MapField - internalGetMutableLabels() { - onChanged();; - if (labels_ == null) { - labels_ = com.google.protobuf.MapField.newMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - if (!labels_.isMutable()) { - labels_ = labels_.copy(); - } - return labels_; - } - /** - * map<string, string> labels = 11; - * - *
-     * Optional. A set of user-defined (key, value) data that provides additional
-     * information about the log entry.
-     * 
- */ - public java.util.Map getLabels() { - return internalGetLabels().getMap(); - } - /** - * map<string, string> labels = 11; - * - *
-     * Optional. A set of user-defined (key, value) data that provides additional
-     * information about the log entry.
-     * 
- */ - public java.util.Map - getMutableLabels() { - return internalGetMutableLabels().getMutableMap(); - } - /** - * map<string, string> labels = 11; - * - *
-     * Optional. A set of user-defined (key, value) data that provides additional
-     * information about the log entry.
-     * 
- */ - public Builder putAllLabels( - java.util.Map values) { - getMutableLabels().putAll(values); - return this; - } - - private com.google.logging.v2.LogEntryOperation operation_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogEntryOperation, com.google.logging.v2.LogEntryOperation.Builder, com.google.logging.v2.LogEntryOperationOrBuilder> operationBuilder_; - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public boolean hasOperation() { - return operationBuilder_ != null || operation_ != null; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public com.google.logging.v2.LogEntryOperation getOperation() { - if (operationBuilder_ == null) { - return operation_ == null ? com.google.logging.v2.LogEntryOperation.getDefaultInstance() : operation_; - } else { - return operationBuilder_.getMessage(); - } - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public Builder setOperation(com.google.logging.v2.LogEntryOperation value) { - if (operationBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - operation_ = value; - onChanged(); - } else { - operationBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public Builder setOperation( - com.google.logging.v2.LogEntryOperation.Builder builderForValue) { - if (operationBuilder_ == null) { - operation_ = builderForValue.build(); - onChanged(); - } else { - operationBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public Builder mergeOperation(com.google.logging.v2.LogEntryOperation value) { - if (operationBuilder_ == null) { - if (operation_ != null) { - operation_ = - com.google.logging.v2.LogEntryOperation.newBuilder(operation_).mergeFrom(value).buildPartial(); - } else { - operation_ = value; - } - onChanged(); - } else { - operationBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public Builder clearOperation() { - if (operationBuilder_ == null) { - operation_ = null; - onChanged(); - } else { - operation_ = null; - operationBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public com.google.logging.v2.LogEntryOperation.Builder getOperationBuilder() { - - onChanged(); - return getOperationFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - public com.google.logging.v2.LogEntryOperationOrBuilder getOperationOrBuilder() { - if (operationBuilder_ != null) { - return operationBuilder_.getMessageOrBuilder(); - } else { - return operation_ == null ? - com.google.logging.v2.LogEntryOperation.getDefaultInstance() : operation_; - } - } - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-     * Optional. Information about an operation associated with the log entry, if
-     * applicable.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogEntryOperation, com.google.logging.v2.LogEntryOperation.Builder, com.google.logging.v2.LogEntryOperationOrBuilder> - getOperationFieldBuilder() { - if (operationBuilder_ == null) { - operationBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogEntryOperation, com.google.logging.v2.LogEntryOperation.Builder, com.google.logging.v2.LogEntryOperationOrBuilder>( - getOperation(), - getParentForChildren(), - isClean()); - operation_ = null; - } - return operationBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.LogEntry) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - private static final com.google.logging.v2.LogEntry DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.LogEntry(); - } - - public static com.google.logging.v2.LogEntry getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public LogEntry parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new LogEntry(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.LogEntry getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperation.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperation.java deleted file mode 100644 index abbb0fd5619f..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperation.java +++ /dev/null @@ -1,790 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/log_entry.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.LogEntryOperation} - * - *
- * Additional information about a potentially long-running operation with which
- * a log entry is associated.
- * 
- */ -public final class LogEntryOperation extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.LogEntryOperation) - LogEntryOperationOrBuilder { - // Use LogEntryOperation.newBuilder() to construct. - private LogEntryOperation(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private LogEntryOperation() { - id_ = ""; - producer_ = ""; - first_ = false; - last_ = false; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private LogEntryOperation( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - id_ = s; - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - producer_ = s; - break; - } - case 24: { - - first_ = input.readBool(); - break; - } - case 32: { - - last_ = input.readBool(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntryOperation_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntryOperation_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogEntryOperation.class, com.google.logging.v2.LogEntryOperation.Builder.class); - } - - public static final int ID_FIELD_NUMBER = 1; - private volatile java.lang.Object id_; - /** - * optional string id = 1; - * - *
-   * Required. An arbitrary operation identifier. Log entries with the
-   * same identifier are assumed to be part of the same operation.
-   * 
- */ - public java.lang.String getId() { - java.lang.Object ref = id_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - id_ = s; - return s; - } - } - /** - * optional string id = 1; - * - *
-   * Required. An arbitrary operation identifier. Log entries with the
-   * same identifier are assumed to be part of the same operation.
-   * 
- */ - public com.google.protobuf.ByteString - getIdBytes() { - java.lang.Object ref = id_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - id_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PRODUCER_FIELD_NUMBER = 2; - private volatile java.lang.Object producer_; - /** - * optional string producer = 2; - * - *
-   * Required. A arbitrary producer identifier. The combination of
-   * `id` and `producer` must be globally unique.  Examples for `producer`:
-   * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-   * 
- */ - public java.lang.String getProducer() { - java.lang.Object ref = producer_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - producer_ = s; - return s; - } - } - /** - * optional string producer = 2; - * - *
-   * Required. A arbitrary producer identifier. The combination of
-   * `id` and `producer` must be globally unique.  Examples for `producer`:
-   * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-   * 
- */ - public com.google.protobuf.ByteString - getProducerBytes() { - java.lang.Object ref = producer_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - producer_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int FIRST_FIELD_NUMBER = 3; - private boolean first_; - /** - * optional bool first = 3; - * - *
-   * Optional. Set this to True if this is the first log entry in the operation.
-   * 
- */ - public boolean getFirst() { - return first_; - } - - public static final int LAST_FIELD_NUMBER = 4; - private boolean last_; - /** - * optional bool last = 4; - * - *
-   * Optional. Set this to True if this is the last log entry in the operation.
-   * 
- */ - public boolean getLast() { - return last_; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getIdBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, id_); - } - if (!getProducerBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, producer_); - } - if (first_ != false) { - output.writeBool(3, first_); - } - if (last_ != false) { - output.writeBool(4, last_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getIdBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, id_); - } - if (!getProducerBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, producer_); - } - if (first_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, first_); - } - if (last_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(4, last_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.LogEntryOperation parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogEntryOperation parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogEntryOperation parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogEntryOperation parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogEntryOperation parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogEntryOperation parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogEntryOperation parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.LogEntryOperation parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogEntryOperation parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogEntryOperation parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.LogEntryOperation prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.LogEntryOperation} - * - *
-   * Additional information about a potentially long-running operation with which
-   * a log entry is associated.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.LogEntryOperation) - com.google.logging.v2.LogEntryOperationOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntryOperation_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntryOperation_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogEntryOperation.class, com.google.logging.v2.LogEntryOperation.Builder.class); - } - - // Construct using com.google.logging.v2.LogEntryOperation.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - id_ = ""; - - producer_ = ""; - - first_ = false; - - last_ = false; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LogEntryProto.internal_static_google_logging_v2_LogEntryOperation_descriptor; - } - - public com.google.logging.v2.LogEntryOperation getDefaultInstanceForType() { - return com.google.logging.v2.LogEntryOperation.getDefaultInstance(); - } - - public com.google.logging.v2.LogEntryOperation build() { - com.google.logging.v2.LogEntryOperation result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.LogEntryOperation buildPartial() { - com.google.logging.v2.LogEntryOperation result = new com.google.logging.v2.LogEntryOperation(this); - result.id_ = id_; - result.producer_ = producer_; - result.first_ = first_; - result.last_ = last_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.LogEntryOperation) { - return mergeFrom((com.google.logging.v2.LogEntryOperation)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.LogEntryOperation other) { - if (other == com.google.logging.v2.LogEntryOperation.getDefaultInstance()) return this; - if (!other.getId().isEmpty()) { - id_ = other.id_; - onChanged(); - } - if (!other.getProducer().isEmpty()) { - producer_ = other.producer_; - onChanged(); - } - if (other.getFirst() != false) { - setFirst(other.getFirst()); - } - if (other.getLast() != false) { - setLast(other.getLast()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.LogEntryOperation parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.LogEntryOperation) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object id_ = ""; - /** - * optional string id = 1; - * - *
-     * Required. An arbitrary operation identifier. Log entries with the
-     * same identifier are assumed to be part of the same operation.
-     * 
- */ - public java.lang.String getId() { - java.lang.Object ref = id_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - id_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string id = 1; - * - *
-     * Required. An arbitrary operation identifier. Log entries with the
-     * same identifier are assumed to be part of the same operation.
-     * 
- */ - public com.google.protobuf.ByteString - getIdBytes() { - java.lang.Object ref = id_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - id_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string id = 1; - * - *
-     * Required. An arbitrary operation identifier. Log entries with the
-     * same identifier are assumed to be part of the same operation.
-     * 
- */ - public Builder setId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - id_ = value; - onChanged(); - return this; - } - /** - * optional string id = 1; - * - *
-     * Required. An arbitrary operation identifier. Log entries with the
-     * same identifier are assumed to be part of the same operation.
-     * 
- */ - public Builder clearId() { - - id_ = getDefaultInstance().getId(); - onChanged(); - return this; - } - /** - * optional string id = 1; - * - *
-     * Required. An arbitrary operation identifier. Log entries with the
-     * same identifier are assumed to be part of the same operation.
-     * 
- */ - public Builder setIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - id_ = value; - onChanged(); - return this; - } - - private java.lang.Object producer_ = ""; - /** - * optional string producer = 2; - * - *
-     * Required. A arbitrary producer identifier. The combination of
-     * `id` and `producer` must be globally unique.  Examples for `producer`:
-     * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-     * 
- */ - public java.lang.String getProducer() { - java.lang.Object ref = producer_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - producer_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string producer = 2; - * - *
-     * Required. A arbitrary producer identifier. The combination of
-     * `id` and `producer` must be globally unique.  Examples for `producer`:
-     * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-     * 
- */ - public com.google.protobuf.ByteString - getProducerBytes() { - java.lang.Object ref = producer_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - producer_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string producer = 2; - * - *
-     * Required. A arbitrary producer identifier. The combination of
-     * `id` and `producer` must be globally unique.  Examples for `producer`:
-     * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-     * 
- */ - public Builder setProducer( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - producer_ = value; - onChanged(); - return this; - } - /** - * optional string producer = 2; - * - *
-     * Required. A arbitrary producer identifier. The combination of
-     * `id` and `producer` must be globally unique.  Examples for `producer`:
-     * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-     * 
- */ - public Builder clearProducer() { - - producer_ = getDefaultInstance().getProducer(); - onChanged(); - return this; - } - /** - * optional string producer = 2; - * - *
-     * Required. A arbitrary producer identifier. The combination of
-     * `id` and `producer` must be globally unique.  Examples for `producer`:
-     * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-     * 
- */ - public Builder setProducerBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - producer_ = value; - onChanged(); - return this; - } - - private boolean first_ ; - /** - * optional bool first = 3; - * - *
-     * Optional. Set this to True if this is the first log entry in the operation.
-     * 
- */ - public boolean getFirst() { - return first_; - } - /** - * optional bool first = 3; - * - *
-     * Optional. Set this to True if this is the first log entry in the operation.
-     * 
- */ - public Builder setFirst(boolean value) { - - first_ = value; - onChanged(); - return this; - } - /** - * optional bool first = 3; - * - *
-     * Optional. Set this to True if this is the first log entry in the operation.
-     * 
- */ - public Builder clearFirst() { - - first_ = false; - onChanged(); - return this; - } - - private boolean last_ ; - /** - * optional bool last = 4; - * - *
-     * Optional. Set this to True if this is the last log entry in the operation.
-     * 
- */ - public boolean getLast() { - return last_; - } - /** - * optional bool last = 4; - * - *
-     * Optional. Set this to True if this is the last log entry in the operation.
-     * 
- */ - public Builder setLast(boolean value) { - - last_ = value; - onChanged(); - return this; - } - /** - * optional bool last = 4; - * - *
-     * Optional. Set this to True if this is the last log entry in the operation.
-     * 
- */ - public Builder clearLast() { - - last_ = false; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.LogEntryOperation) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - private static final com.google.logging.v2.LogEntryOperation DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.LogEntryOperation(); - } - - public static com.google.logging.v2.LogEntryOperation getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public LogEntryOperation parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new LogEntryOperation(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.LogEntryOperation getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperationOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperationOrBuilder.java deleted file mode 100644 index d3a2c769313e..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOperationOrBuilder.java +++ /dev/null @@ -1,69 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/log_entry.proto - -package com.google.logging.v2; - -public interface LogEntryOperationOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.LogEntryOperation) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string id = 1; - * - *
-   * Required. An arbitrary operation identifier. Log entries with the
-   * same identifier are assumed to be part of the same operation.
-   * 
- */ - java.lang.String getId(); - /** - * optional string id = 1; - * - *
-   * Required. An arbitrary operation identifier. Log entries with the
-   * same identifier are assumed to be part of the same operation.
-   * 
- */ - com.google.protobuf.ByteString - getIdBytes(); - - /** - * optional string producer = 2; - * - *
-   * Required. A arbitrary producer identifier. The combination of
-   * `id` and `producer` must be globally unique.  Examples for `producer`:
-   * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-   * 
- */ - java.lang.String getProducer(); - /** - * optional string producer = 2; - * - *
-   * Required. A arbitrary producer identifier. The combination of
-   * `id` and `producer` must be globally unique.  Examples for `producer`:
-   * `"MyDivision.MyBigCompany.com"`, "github.com/MyProject/MyApplication"`.
-   * 
- */ - com.google.protobuf.ByteString - getProducerBytes(); - - /** - * optional bool first = 3; - * - *
-   * Optional. Set this to True if this is the first log entry in the operation.
-   * 
- */ - boolean getFirst(); - - /** - * optional bool last = 4; - * - *
-   * Optional. Set this to True if this is the last log entry in the operation.
-   * 
- */ - boolean getLast(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOrBuilder.java deleted file mode 100644 index 7ad92e438faa..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryOrBuilder.java +++ /dev/null @@ -1,277 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/log_entry.proto - -package com.google.logging.v2; - -public interface LogEntryOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.LogEntry) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string log_name = 12; - * - *
-   * Required. The resource name of the log to which this log entry
-   * belongs. The format of the name is
-   * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-   * `"projects/my-projectid/logs/syslog"`,
-   * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-   * The log ID part of resource name must be less than 512 characters
-   * long and can only include the following characters: upper and
-   * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-   * characters: forward-slash, underscore, hyphen, and period.
-   * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-   * 
- */ - java.lang.String getLogName(); - /** - * optional string log_name = 12; - * - *
-   * Required. The resource name of the log to which this log entry
-   * belongs. The format of the name is
-   * `projects/&lt;project-id&gt;/logs/&lt;log-id%gt;`.  Examples:
-   * `"projects/my-projectid/logs/syslog"`,
-   * `"projects/1234567890/logs/library.googleapis.com%2Fbook_log"`.
-   * The log ID part of resource name must be less than 512 characters
-   * long and can only include the following characters: upper and
-   * lower case alphanumeric characters: [A-Za-z0-9]; and punctuation
-   * characters: forward-slash, underscore, hyphen, and period.
-   * Forward-slash (`/`) characters in the log ID must be URL-encoded.
-   * 
- */ - com.google.protobuf.ByteString - getLogNameBytes(); - - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - boolean hasResource(); - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - com.google.api.MonitoredResource getResource(); - /** - * optional .google.api.MonitoredResource resource = 8; - * - *
-   * Required. The monitored resource associated with this log entry.
-   * Example: a log entry that reports a database error would be
-   * associated with the monitored resource designating the particular
-   * database that reported the error.
-   * 
- */ - com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder(); - - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-   * The log entry payload, represented as a protocol buffer.
-   * You can only use `protoPayload` values that belong to a set of approved
-   * types.
-   * 
- */ - com.google.protobuf.Any getProtoPayload(); - /** - * optional .google.protobuf.Any proto_payload = 2; - * - *
-   * The log entry payload, represented as a protocol buffer.
-   * You can only use `protoPayload` values that belong to a set of approved
-   * types.
-   * 
- */ - com.google.protobuf.AnyOrBuilder getProtoPayloadOrBuilder(); - - /** - * optional string text_payload = 3; - * - *
-   * The log entry payload, represented as a Unicode string (UTF-8).
-   * 
- */ - java.lang.String getTextPayload(); - /** - * optional string text_payload = 3; - * - *
-   * The log entry payload, represented as a Unicode string (UTF-8).
-   * 
- */ - com.google.protobuf.ByteString - getTextPayloadBytes(); - - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-   * The log entry payload, represented as a structure that
-   * is expressed as a JSON object.
-   * 
- */ - com.google.protobuf.Struct getJsonPayload(); - /** - * optional .google.protobuf.Struct json_payload = 6; - * - *
-   * The log entry payload, represented as a structure that
-   * is expressed as a JSON object.
-   * 
- */ - com.google.protobuf.StructOrBuilder getJsonPayloadOrBuilder(); - - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - boolean hasTimestamp(); - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - com.google.protobuf.Timestamp getTimestamp(); - /** - * optional .google.protobuf.Timestamp timestamp = 9; - * - *
-   * Optional. The time the event described by the log entry occurred.  If
-   * omitted, Cloud Logging will use the time the log entry is written.
-   * 
- */ - com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder(); - - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-   * Optional. The severity of the log entry. The default value is
-   * `LogSeverity.DEFAULT`.
-   * 
- */ - int getSeverityValue(); - /** - * optional .google.logging.type.LogSeverity severity = 10; - * - *
-   * Optional. The severity of the log entry. The default value is
-   * `LogSeverity.DEFAULT`.
-   * 
- */ - com.google.logging.type.LogSeverity getSeverity(); - - /** - * optional string insert_id = 4; - * - *
-   * Optional. A unique ID for the log entry. If you provide this field, the
-   * logging service considers other log entries in the same log with the same
-   * ID as duplicates which can be removed.
-   * If omitted, Cloud Logging will generate a unique ID for this log entry.
-   * 
- */ - java.lang.String getInsertId(); - /** - * optional string insert_id = 4; - * - *
-   * Optional. A unique ID for the log entry. If you provide this field, the
-   * logging service considers other log entries in the same log with the same
-   * ID as duplicates which can be removed.
-   * If omitted, Cloud Logging will generate a unique ID for this log entry.
-   * 
- */ - com.google.protobuf.ByteString - getInsertIdBytes(); - - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - boolean hasHttpRequest(); - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - com.google.logging.type.HttpRequest getHttpRequest(); - /** - * optional .google.logging.type.HttpRequest http_request = 7; - * - *
-   * Optional. Information about the HTTP request associated with this log entry,
-   * if applicable.
-   * 
- */ - com.google.logging.type.HttpRequestOrBuilder getHttpRequestOrBuilder(); - - /** - * map<string, string> labels = 11; - * - *
-   * Optional. A set of user-defined (key, value) data that provides additional
-   * information about the log entry.
-   * 
- */ - java.util.Map - getLabels(); - - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - boolean hasOperation(); - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - com.google.logging.v2.LogEntryOperation getOperation(); - /** - * optional .google.logging.v2.LogEntryOperation operation = 15; - * - *
-   * Optional. Information about an operation associated with the log entry, if
-   * applicable.
-   * 
- */ - com.google.logging.v2.LogEntryOperationOrBuilder getOperationOrBuilder(); - - public com.google.logging.v2.LogEntry.PayloadCase getPayloadCase(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryProto.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryProto.java deleted file mode 100644 index 3be0a91f6922..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogEntryProto.java +++ /dev/null @@ -1,108 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/log_entry.proto - -package com.google.logging.v2; - -public final class LogEntryProto { - private LogEntryProto() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_LogEntry_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_LogEntry_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_LogEntry_LabelsEntry_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_LogEntry_LabelsEntry_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_LogEntryOperation_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_LogEntryOperation_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n!google/logging/v2/log_entry.proto\022\021goo" + - "gle.logging.v2\032\034google/api/annotations.p" + - "roto\032#google/api/monitored_resource.prot" + - "o\032&google/logging/type/http_request.prot" + - "o\032&google/logging/type/log_severity.prot" + - "o\032\031google/protobuf/any.proto\032\034google/pro" + - "tobuf/struct.proto\032\037google/protobuf/time" + - "stamp.proto\"\237\004\n\010LogEntry\022\020\n\010log_name\030\014 \001" + - "(\t\022/\n\010resource\030\010 \001(\0132\035.google.api.Monito" + - "redResource\022-\n\rproto_payload\030\002 \001(\0132\024.goo", - "gle.protobuf.AnyH\000\022\026\n\014text_payload\030\003 \001(\t" + - "H\000\022/\n\014json_payload\030\006 \001(\0132\027.google.protob" + - "uf.StructH\000\022-\n\ttimestamp\030\t \001(\0132\032.google." + - "protobuf.Timestamp\0222\n\010severity\030\n \001(\0162 .g" + - "oogle.logging.type.LogSeverity\022\021\n\tinsert" + - "_id\030\004 \001(\t\0226\n\014http_request\030\007 \001(\0132 .google" + - ".logging.type.HttpRequest\0227\n\006labels\030\013 \003(" + - "\0132\'.google.logging.v2.LogEntry.LabelsEnt" + - "ry\0227\n\toperation\030\017 \001(\0132$.google.logging.v" + - "2.LogEntryOperation\032-\n\013LabelsEntry\022\013\n\003ke", - "y\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001B\t\n\007payload\"N\n" + - "\021LogEntryOperation\022\n\n\002id\030\001 \001(\t\022\020\n\010produc" + - "er\030\002 \001(\t\022\r\n\005first\030\003 \001(\010\022\014\n\004last\030\004 \001(\010B(\n" + - "\025com.google.logging.v2B\rLogEntryProtoP\001b" + - "\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.api.AnnotationsProto.getDescriptor(), - com.google.api.MonitoredResourceProto.getDescriptor(), - com.google.logging.type.HttpRequestProto.getDescriptor(), - com.google.logging.type.LogSeverityProto.getDescriptor(), - com.google.protobuf.AnyProto.getDescriptor(), - com.google.protobuf.StructProto.getDescriptor(), - com.google.protobuf.TimestampProto.getDescriptor(), - }, assigner); - internal_static_google_logging_v2_LogEntry_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_logging_v2_LogEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_LogEntry_descriptor, - new java.lang.String[] { "LogName", "Resource", "ProtoPayload", "TextPayload", "JsonPayload", "Timestamp", "Severity", "InsertId", "HttpRequest", "Labels", "Operation", "Payload", }); - internal_static_google_logging_v2_LogEntry_LabelsEntry_descriptor = - internal_static_google_logging_v2_LogEntry_descriptor.getNestedTypes().get(0); - internal_static_google_logging_v2_LogEntry_LabelsEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_LogEntry_LabelsEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_google_logging_v2_LogEntryOperation_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_google_logging_v2_LogEntryOperation_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_LogEntryOperation_descriptor, - new java.lang.String[] { "Id", "Producer", "First", "Last", }); - com.google.api.AnnotationsProto.getDescriptor(); - com.google.api.MonitoredResourceProto.getDescriptor(); - com.google.logging.type.HttpRequestProto.getDescriptor(); - com.google.logging.type.LogSeverityProto.getDescriptor(); - com.google.protobuf.AnyProto.getDescriptor(); - com.google.protobuf.StructProto.getDescriptor(); - com.google.protobuf.TimestampProto.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetric.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetric.java deleted file mode 100644 index 7b5439627e7b..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetric.java +++ /dev/null @@ -1,934 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.LogMetric} - * - *
- * Describes a logs-based metric.  The value of the metric is the
- * number of log entries that match a logs filter.
- * 
- */ -public final class LogMetric extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.LogMetric) - LogMetricOrBuilder { - // Use LogMetric.newBuilder() to construct. - private LogMetric(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private LogMetric() { - name_ = ""; - description_ = ""; - filter_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private LogMetric( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - name_ = s; - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - description_ = s; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - - filter_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_LogMetric_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_LogMetric_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogMetric.class, com.google.logging.v2.LogMetric.Builder.class); - } - - /** - * Protobuf enum {@code google.logging.v2.LogMetric.ApiVersion} - * - *
-   * Cloud Logging API version.
-   * 
- */ - public enum ApiVersion - implements com.google.protobuf.ProtocolMessageEnum { - /** - * V2 = 0; - * - *
-     * Cloud Logging API V2.
-     * 
- */ - V2(0, 0), - /** - * V1 = 1; - * - *
-     * Cloud Logging API V1.
-     * 
- */ - V1(1, 1), - UNRECOGNIZED(-1, -1), - ; - - /** - * V2 = 0; - * - *
-     * Cloud Logging API V2.
-     * 
- */ - public static final int V2_VALUE = 0; - /** - * V1 = 1; - * - *
-     * Cloud Logging API V1.
-     * 
- */ - public static final int V1_VALUE = 1; - - - public final int getNumber() { - if (index == -1) { - throw new java.lang.IllegalArgumentException( - "Can't get the number of an unknown enum value."); - } - return value; - } - - public static ApiVersion valueOf(int value) { - switch (value) { - case 0: return V2; - case 1: return V1; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static final com.google.protobuf.Internal.EnumLiteMap< - ApiVersion> internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public ApiVersion findValueByNumber(int number) { - return ApiVersion.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return com.google.logging.v2.LogMetric.getDescriptor().getEnumTypes().get(0); - } - - private static final ApiVersion[] VALUES = values(); - - public static ApiVersion valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - if (desc.getIndex() == -1) { - return UNRECOGNIZED; - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private ApiVersion(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:google.logging.v2.LogMetric.ApiVersion) - } - - public static final int NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object name_; - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned metric identifier. Example:
-   * `"severe_errors"`.  Metric identifiers are limited to 1000
-   * characters and can include only the following characters: `A-Z`,
-   * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-   * forward-slash character (`/`) denotes a hierarchy of name pieces,
-   * and it cannot be the first character of the name.
-   * 
- */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } - } - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned metric identifier. Example:
-   * `"severe_errors"`.  Metric identifiers are limited to 1000
-   * characters and can include only the following characters: `A-Z`,
-   * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-   * forward-slash character (`/`) denotes a hierarchy of name pieces,
-   * and it cannot be the first character of the name.
-   * 
- */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int DESCRIPTION_FIELD_NUMBER = 2; - private volatile java.lang.Object description_; - /** - * optional string description = 2; - * - *
-   * A description of this metric, which is used in documentation.
-   * 
- */ - public java.lang.String getDescription() { - java.lang.Object ref = description_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - description_ = s; - return s; - } - } - /** - * optional string description = 2; - * - *
-   * A description of this metric, which is used in documentation.
-   * 
- */ - public com.google.protobuf.ByteString - getDescriptionBytes() { - java.lang.Object ref = description_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - description_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int FILTER_FIELD_NUMBER = 3; - private volatile java.lang.Object filter_; - /** - * optional string filter = 3; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } - } - /** - * optional string filter = 3; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); - } - if (!getDescriptionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, description_); - } - if (!getFilterBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 3, filter_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); - } - if (!getDescriptionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, description_); - } - if (!getFilterBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(3, filter_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.LogMetric parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogMetric parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogMetric parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogMetric parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogMetric parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogMetric parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogMetric parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.LogMetric parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogMetric parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogMetric parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.LogMetric prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.LogMetric} - * - *
-   * Describes a logs-based metric.  The value of the metric is the
-   * number of log entries that match a logs filter.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.LogMetric) - com.google.logging.v2.LogMetricOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_LogMetric_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_LogMetric_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogMetric.class, com.google.logging.v2.LogMetric.Builder.class); - } - - // Construct using com.google.logging.v2.LogMetric.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - name_ = ""; - - description_ = ""; - - filter_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_LogMetric_descriptor; - } - - public com.google.logging.v2.LogMetric getDefaultInstanceForType() { - return com.google.logging.v2.LogMetric.getDefaultInstance(); - } - - public com.google.logging.v2.LogMetric build() { - com.google.logging.v2.LogMetric result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.LogMetric buildPartial() { - com.google.logging.v2.LogMetric result = new com.google.logging.v2.LogMetric(this); - result.name_ = name_; - result.description_ = description_; - result.filter_ = filter_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.LogMetric) { - return mergeFrom((com.google.logging.v2.LogMetric)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.LogMetric other) { - if (other == com.google.logging.v2.LogMetric.getDefaultInstance()) return this; - if (!other.getName().isEmpty()) { - name_ = other.name_; - onChanged(); - } - if (!other.getDescription().isEmpty()) { - description_ = other.description_; - onChanged(); - } - if (!other.getFilter().isEmpty()) { - filter_ = other.filter_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.LogMetric parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.LogMetric) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object name_ = ""; - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned metric identifier. Example:
-     * `"severe_errors"`.  Metric identifiers are limited to 1000
-     * characters and can include only the following characters: `A-Z`,
-     * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-     * forward-slash character (`/`) denotes a hierarchy of name pieces,
-     * and it cannot be the first character of the name.
-     * 
- */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned metric identifier. Example:
-     * `"severe_errors"`.  Metric identifiers are limited to 1000
-     * characters and can include only the following characters: `A-Z`,
-     * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-     * forward-slash character (`/`) denotes a hierarchy of name pieces,
-     * and it cannot be the first character of the name.
-     * 
- */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned metric identifier. Example:
-     * `"severe_errors"`.  Metric identifiers are limited to 1000
-     * characters and can include only the following characters: `A-Z`,
-     * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-     * forward-slash character (`/`) denotes a hierarchy of name pieces,
-     * and it cannot be the first character of the name.
-     * 
- */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - name_ = value; - onChanged(); - return this; - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned metric identifier. Example:
-     * `"severe_errors"`.  Metric identifiers are limited to 1000
-     * characters and can include only the following characters: `A-Z`,
-     * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-     * forward-slash character (`/`) denotes a hierarchy of name pieces,
-     * and it cannot be the first character of the name.
-     * 
- */ - public Builder clearName() { - - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned metric identifier. Example:
-     * `"severe_errors"`.  Metric identifiers are limited to 1000
-     * characters and can include only the following characters: `A-Z`,
-     * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-     * forward-slash character (`/`) denotes a hierarchy of name pieces,
-     * and it cannot be the first character of the name.
-     * 
- */ - public Builder setNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - name_ = value; - onChanged(); - return this; - } - - private java.lang.Object description_ = ""; - /** - * optional string description = 2; - * - *
-     * A description of this metric, which is used in documentation.
-     * 
- */ - public java.lang.String getDescription() { - java.lang.Object ref = description_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - description_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string description = 2; - * - *
-     * A description of this metric, which is used in documentation.
-     * 
- */ - public com.google.protobuf.ByteString - getDescriptionBytes() { - java.lang.Object ref = description_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - description_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string description = 2; - * - *
-     * A description of this metric, which is used in documentation.
-     * 
- */ - public Builder setDescription( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - description_ = value; - onChanged(); - return this; - } - /** - * optional string description = 2; - * - *
-     * A description of this metric, which is used in documentation.
-     * 
- */ - public Builder clearDescription() { - - description_ = getDefaultInstance().getDescription(); - onChanged(); - return this; - } - /** - * optional string description = 2; - * - *
-     * A description of this metric, which is used in documentation.
-     * 
- */ - public Builder setDescriptionBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - description_ = value; - onChanged(); - return this; - } - - private java.lang.Object filter_ = ""; - /** - * optional string filter = 3; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string filter = 3; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string filter = 3; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder setFilter( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - filter_ = value; - onChanged(); - return this; - } - /** - * optional string filter = 3; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder clearFilter() { - - filter_ = getDefaultInstance().getFilter(); - onChanged(); - return this; - } - /** - * optional string filter = 3; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder setFilterBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - filter_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.LogMetric) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - private static final com.google.logging.v2.LogMetric DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.LogMetric(); - } - - public static com.google.logging.v2.LogMetric getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public LogMetric parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new LogMetric(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.LogMetric getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetricOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetricOrBuilder.java deleted file mode 100644 index 19b324038e38..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogMetricOrBuilder.java +++ /dev/null @@ -1,75 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface LogMetricOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.LogMetric) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned metric identifier. Example:
-   * `"severe_errors"`.  Metric identifiers are limited to 1000
-   * characters and can include only the following characters: `A-Z`,
-   * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-   * forward-slash character (`/`) denotes a hierarchy of name pieces,
-   * and it cannot be the first character of the name.
-   * 
- */ - java.lang.String getName(); - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned metric identifier. Example:
-   * `"severe_errors"`.  Metric identifiers are limited to 1000
-   * characters and can include only the following characters: `A-Z`,
-   * `a-z`, `0-9`, and the special characters `_-.,+!*',()%/\`.  The
-   * forward-slash character (`/`) denotes a hierarchy of name pieces,
-   * and it cannot be the first character of the name.
-   * 
- */ - com.google.protobuf.ByteString - getNameBytes(); - - /** - * optional string description = 2; - * - *
-   * A description of this metric, which is used in documentation.
-   * 
- */ - java.lang.String getDescription(); - /** - * optional string description = 2; - * - *
-   * A description of this metric, which is used in documentation.
-   * 
- */ - com.google.protobuf.ByteString - getDescriptionBytes(); - - /** - * optional string filter = 3; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - java.lang.String getFilter(); - /** - * optional string filter = 3; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - com.google.protobuf.ByteString - getFilterBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSink.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSink.java deleted file mode 100644 index 0edaa4c3a704..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSink.java +++ /dev/null @@ -1,1115 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.LogSink} - * - *
- * Describes a sink used to export log entries outside Cloud Logging.
- * 
- */ -public final class LogSink extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.LogSink) - LogSinkOrBuilder { - // Use LogSink.newBuilder() to construct. - private LogSink(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private LogSink() { - name_ = ""; - destination_ = ""; - filter_ = ""; - outputVersionFormat_ = 0; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private LogSink( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - name_ = s; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - - destination_ = s; - break; - } - case 42: { - String s = input.readStringRequireUtf8(); - - filter_ = s; - break; - } - case 48: { - int rawValue = input.readEnum(); - - outputVersionFormat_ = rawValue; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_LogSink_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_LogSink_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogSink.class, com.google.logging.v2.LogSink.Builder.class); - } - - /** - * Protobuf enum {@code google.logging.v2.LogSink.VersionFormat} - * - *
-   * Available log entry formats. Log entries can be written to Cloud
-   * Logging in either format and can be exported in either format.
-   * Version 2 is the preferred format.
-   * 
- */ - public enum VersionFormat - implements com.google.protobuf.ProtocolMessageEnum { - /** - * VERSION_FORMAT_UNSPECIFIED = 0; - * - *
-     * An unspecified version format will default to V2.
-     * 
- */ - VERSION_FORMAT_UNSPECIFIED(0, 0), - /** - * V2 = 1; - * - *
-     * `LogEntry` version 2 format.
-     * 
- */ - V2(1, 1), - /** - * V1 = 2; - * - *
-     * `LogEntry` version 1 format.
-     * 
- */ - V1(2, 2), - UNRECOGNIZED(-1, -1), - ; - - /** - * VERSION_FORMAT_UNSPECIFIED = 0; - * - *
-     * An unspecified version format will default to V2.
-     * 
- */ - public static final int VERSION_FORMAT_UNSPECIFIED_VALUE = 0; - /** - * V2 = 1; - * - *
-     * `LogEntry` version 2 format.
-     * 
- */ - public static final int V2_VALUE = 1; - /** - * V1 = 2; - * - *
-     * `LogEntry` version 1 format.
-     * 
- */ - public static final int V1_VALUE = 2; - - - public final int getNumber() { - if (index == -1) { - throw new java.lang.IllegalArgumentException( - "Can't get the number of an unknown enum value."); - } - return value; - } - - public static VersionFormat valueOf(int value) { - switch (value) { - case 0: return VERSION_FORMAT_UNSPECIFIED; - case 1: return V2; - case 2: return V1; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static final com.google.protobuf.Internal.EnumLiteMap< - VersionFormat> internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public VersionFormat findValueByNumber(int number) { - return VersionFormat.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return com.google.logging.v2.LogSink.getDescriptor().getEnumTypes().get(0); - } - - private static final VersionFormat[] VALUES = values(); - - public static VersionFormat valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - if (desc.getIndex() == -1) { - return UNRECOGNIZED; - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private VersionFormat(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:google.logging.v2.LogSink.VersionFormat) - } - - public static final int NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object name_; - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned sink identifier. Example:
-   * `"my-severe-errors-to-pubsub"`.
-   * Sink identifiers are limited to 1000 characters
-   * and can include only the following characters: `A-Z`, `a-z`,
-   * `0-9`, and the special characters `_-.`.
-   * 
- */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } - } - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned sink identifier. Example:
-   * `"my-severe-errors-to-pubsub"`.
-   * Sink identifiers are limited to 1000 characters
-   * and can include only the following characters: `A-Z`, `a-z`,
-   * `0-9`, and the special characters `_-.`.
-   * 
- */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int DESTINATION_FIELD_NUMBER = 3; - private volatile java.lang.Object destination_; - /** - * optional string destination = 3; - * - *
-   * The export destination. See
-   * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-   * Examples: `"storage.googleapis.com/a-bucket"`,
-   * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-   * 
- */ - public java.lang.String getDestination() { - java.lang.Object ref = destination_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - destination_ = s; - return s; - } - } - /** - * optional string destination = 3; - * - *
-   * The export destination. See
-   * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-   * Examples: `"storage.googleapis.com/a-bucket"`,
-   * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-   * 
- */ - public com.google.protobuf.ByteString - getDestinationBytes() { - java.lang.Object ref = destination_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int FILTER_FIELD_NUMBER = 5; - private volatile java.lang.Object filter_; - /** - * optional string filter = 5; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters)
-   * that defines the log entries to be exported.  The filter must be
-   * consistent with the log entry format designed by the
-   * `outputVersionFormat` parameter, regardless of the format of the
-   * log entry that was originally written to Cloud Logging.
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } - } - /** - * optional string filter = 5; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters)
-   * that defines the log entries to be exported.  The filter must be
-   * consistent with the log entry format designed by the
-   * `outputVersionFormat` parameter, regardless of the format of the
-   * log entry that was originally written to Cloud Logging.
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int OUTPUT_VERSION_FORMAT_FIELD_NUMBER = 6; - private int outputVersionFormat_; - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-   * The log entry version used when exporting log entries from this
-   * sink.  This version does not have to correspond to the version of
-   * the log entry when it was written to Cloud Logging.
-   * 
- */ - public int getOutputVersionFormatValue() { - return outputVersionFormat_; - } - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-   * The log entry version used when exporting log entries from this
-   * sink.  This version does not have to correspond to the version of
-   * the log entry when it was written to Cloud Logging.
-   * 
- */ - public com.google.logging.v2.LogSink.VersionFormat getOutputVersionFormat() { - com.google.logging.v2.LogSink.VersionFormat result = com.google.logging.v2.LogSink.VersionFormat.valueOf(outputVersionFormat_); - return result == null ? com.google.logging.v2.LogSink.VersionFormat.UNRECOGNIZED : result; - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, name_); - } - if (!getDestinationBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 3, destination_); - } - if (!getFilterBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 5, filter_); - } - if (outputVersionFormat_ != com.google.logging.v2.LogSink.VersionFormat.VERSION_FORMAT_UNSPECIFIED.getNumber()) { - output.writeEnum(6, outputVersionFormat_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, name_); - } - if (!getDestinationBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(3, destination_); - } - if (!getFilterBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(5, filter_); - } - if (outputVersionFormat_ != com.google.logging.v2.LogSink.VersionFormat.VERSION_FORMAT_UNSPECIFIED.getNumber()) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(6, outputVersionFormat_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.LogSink parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogSink parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogSink parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.LogSink parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.LogSink parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogSink parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogSink parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.LogSink parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.LogSink parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.LogSink parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.LogSink prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.LogSink} - * - *
-   * Describes a sink used to export log entries outside Cloud Logging.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.LogSink) - com.google.logging.v2.LogSinkOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_LogSink_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_LogSink_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.LogSink.class, com.google.logging.v2.LogSink.Builder.class); - } - - // Construct using com.google.logging.v2.LogSink.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - name_ = ""; - - destination_ = ""; - - filter_ = ""; - - outputVersionFormat_ = 0; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_LogSink_descriptor; - } - - public com.google.logging.v2.LogSink getDefaultInstanceForType() { - return com.google.logging.v2.LogSink.getDefaultInstance(); - } - - public com.google.logging.v2.LogSink build() { - com.google.logging.v2.LogSink result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.LogSink buildPartial() { - com.google.logging.v2.LogSink result = new com.google.logging.v2.LogSink(this); - result.name_ = name_; - result.destination_ = destination_; - result.filter_ = filter_; - result.outputVersionFormat_ = outputVersionFormat_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.LogSink) { - return mergeFrom((com.google.logging.v2.LogSink)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.LogSink other) { - if (other == com.google.logging.v2.LogSink.getDefaultInstance()) return this; - if (!other.getName().isEmpty()) { - name_ = other.name_; - onChanged(); - } - if (!other.getDestination().isEmpty()) { - destination_ = other.destination_; - onChanged(); - } - if (!other.getFilter().isEmpty()) { - filter_ = other.filter_; - onChanged(); - } - if (other.outputVersionFormat_ != 0) { - setOutputVersionFormatValue(other.getOutputVersionFormatValue()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.LogSink parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.LogSink) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object name_ = ""; - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned sink identifier. Example:
-     * `"my-severe-errors-to-pubsub"`.
-     * Sink identifiers are limited to 1000 characters
-     * and can include only the following characters: `A-Z`, `a-z`,
-     * `0-9`, and the special characters `_-.`.
-     * 
- */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned sink identifier. Example:
-     * `"my-severe-errors-to-pubsub"`.
-     * Sink identifiers are limited to 1000 characters
-     * and can include only the following characters: `A-Z`, `a-z`,
-     * `0-9`, and the special characters `_-.`.
-     * 
- */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned sink identifier. Example:
-     * `"my-severe-errors-to-pubsub"`.
-     * Sink identifiers are limited to 1000 characters
-     * and can include only the following characters: `A-Z`, `a-z`,
-     * `0-9`, and the special characters `_-.`.
-     * 
- */ - public Builder setName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - name_ = value; - onChanged(); - return this; - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned sink identifier. Example:
-     * `"my-severe-errors-to-pubsub"`.
-     * Sink identifiers are limited to 1000 characters
-     * and can include only the following characters: `A-Z`, `a-z`,
-     * `0-9`, and the special characters `_-.`.
-     * 
- */ - public Builder clearName() { - - name_ = getDefaultInstance().getName(); - onChanged(); - return this; - } - /** - * optional string name = 1; - * - *
-     * Required. The client-assigned sink identifier. Example:
-     * `"my-severe-errors-to-pubsub"`.
-     * Sink identifiers are limited to 1000 characters
-     * and can include only the following characters: `A-Z`, `a-z`,
-     * `0-9`, and the special characters `_-.`.
-     * 
- */ - public Builder setNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - name_ = value; - onChanged(); - return this; - } - - private java.lang.Object destination_ = ""; - /** - * optional string destination = 3; - * - *
-     * The export destination. See
-     * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-     * Examples: `"storage.googleapis.com/a-bucket"`,
-     * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-     * 
- */ - public java.lang.String getDestination() { - java.lang.Object ref = destination_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - destination_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string destination = 3; - * - *
-     * The export destination. See
-     * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-     * Examples: `"storage.googleapis.com/a-bucket"`,
-     * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-     * 
- */ - public com.google.protobuf.ByteString - getDestinationBytes() { - java.lang.Object ref = destination_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string destination = 3; - * - *
-     * The export destination. See
-     * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-     * Examples: `"storage.googleapis.com/a-bucket"`,
-     * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-     * 
- */ - public Builder setDestination( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - destination_ = value; - onChanged(); - return this; - } - /** - * optional string destination = 3; - * - *
-     * The export destination. See
-     * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-     * Examples: `"storage.googleapis.com/a-bucket"`,
-     * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-     * 
- */ - public Builder clearDestination() { - - destination_ = getDefaultInstance().getDestination(); - onChanged(); - return this; - } - /** - * optional string destination = 3; - * - *
-     * The export destination. See
-     * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-     * Examples: `"storage.googleapis.com/a-bucket"`,
-     * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-     * 
- */ - public Builder setDestinationBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - destination_ = value; - onChanged(); - return this; - } - - private java.lang.Object filter_ = ""; - /** - * optional string filter = 5; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters)
-     * that defines the log entries to be exported.  The filter must be
-     * consistent with the log entry format designed by the
-     * `outputVersionFormat` parameter, regardless of the format of the
-     * log entry that was originally written to Cloud Logging.
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string filter = 5; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters)
-     * that defines the log entries to be exported.  The filter must be
-     * consistent with the log entry format designed by the
-     * `outputVersionFormat` parameter, regardless of the format of the
-     * log entry that was originally written to Cloud Logging.
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string filter = 5; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters)
-     * that defines the log entries to be exported.  The filter must be
-     * consistent with the log entry format designed by the
-     * `outputVersionFormat` parameter, regardless of the format of the
-     * log entry that was originally written to Cloud Logging.
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder setFilter( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - filter_ = value; - onChanged(); - return this; - } - /** - * optional string filter = 5; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters)
-     * that defines the log entries to be exported.  The filter must be
-     * consistent with the log entry format designed by the
-     * `outputVersionFormat` parameter, regardless of the format of the
-     * log entry that was originally written to Cloud Logging.
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder clearFilter() { - - filter_ = getDefaultInstance().getFilter(); - onChanged(); - return this; - } - /** - * optional string filter = 5; - * - *
-     * An [advanced logs filter](/logging/docs/view/advanced_filters)
-     * that defines the log entries to be exported.  The filter must be
-     * consistent with the log entry format designed by the
-     * `outputVersionFormat` parameter, regardless of the format of the
-     * log entry that was originally written to Cloud Logging.
-     * Example: `"logName:syslog AND severity>=ERROR"`.
-     * 
- */ - public Builder setFilterBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - filter_ = value; - onChanged(); - return this; - } - - private int outputVersionFormat_ = 0; - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-     * The log entry version used when exporting log entries from this
-     * sink.  This version does not have to correspond to the version of
-     * the log entry when it was written to Cloud Logging.
-     * 
- */ - public int getOutputVersionFormatValue() { - return outputVersionFormat_; - } - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-     * The log entry version used when exporting log entries from this
-     * sink.  This version does not have to correspond to the version of
-     * the log entry when it was written to Cloud Logging.
-     * 
- */ - public Builder setOutputVersionFormatValue(int value) { - outputVersionFormat_ = value; - onChanged(); - return this; - } - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-     * The log entry version used when exporting log entries from this
-     * sink.  This version does not have to correspond to the version of
-     * the log entry when it was written to Cloud Logging.
-     * 
- */ - public com.google.logging.v2.LogSink.VersionFormat getOutputVersionFormat() { - com.google.logging.v2.LogSink.VersionFormat result = com.google.logging.v2.LogSink.VersionFormat.valueOf(outputVersionFormat_); - return result == null ? com.google.logging.v2.LogSink.VersionFormat.UNRECOGNIZED : result; - } - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-     * The log entry version used when exporting log entries from this
-     * sink.  This version does not have to correspond to the version of
-     * the log entry when it was written to Cloud Logging.
-     * 
- */ - public Builder setOutputVersionFormat(com.google.logging.v2.LogSink.VersionFormat value) { - if (value == null) { - throw new NullPointerException(); - } - - outputVersionFormat_ = value.getNumber(); - onChanged(); - return this; - } - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-     * The log entry version used when exporting log entries from this
-     * sink.  This version does not have to correspond to the version of
-     * the log entry when it was written to Cloud Logging.
-     * 
- */ - public Builder clearOutputVersionFormat() { - - outputVersionFormat_ = 0; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.LogSink) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - private static final com.google.logging.v2.LogSink DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.LogSink(); - } - - public static com.google.logging.v2.LogSink getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public LogSink parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new LogSink(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.LogSink getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSinkOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSinkOrBuilder.java deleted file mode 100644 index 4d3a9c6ed0ec..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LogSinkOrBuilder.java +++ /dev/null @@ -1,108 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface LogSinkOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.LogSink) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned sink identifier. Example:
-   * `"my-severe-errors-to-pubsub"`.
-   * Sink identifiers are limited to 1000 characters
-   * and can include only the following characters: `A-Z`, `a-z`,
-   * `0-9`, and the special characters `_-.`.
-   * 
- */ - java.lang.String getName(); - /** - * optional string name = 1; - * - *
-   * Required. The client-assigned sink identifier. Example:
-   * `"my-severe-errors-to-pubsub"`.
-   * Sink identifiers are limited to 1000 characters
-   * and can include only the following characters: `A-Z`, `a-z`,
-   * `0-9`, and the special characters `_-.`.
-   * 
- */ - com.google.protobuf.ByteString - getNameBytes(); - - /** - * optional string destination = 3; - * - *
-   * The export destination. See
-   * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-   * Examples: `"storage.googleapis.com/a-bucket"`,
-   * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-   * 
- */ - java.lang.String getDestination(); - /** - * optional string destination = 3; - * - *
-   * The export destination. See
-   * [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs).
-   * Examples: `"storage.googleapis.com/a-bucket"`,
-   * `"bigquery.googleapis.com/projects/a-project-id/datasets/a-dataset"`.
-   * 
- */ - com.google.protobuf.ByteString - getDestinationBytes(); - - /** - * optional string filter = 5; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters)
-   * that defines the log entries to be exported.  The filter must be
-   * consistent with the log entry format designed by the
-   * `outputVersionFormat` parameter, regardless of the format of the
-   * log entry that was originally written to Cloud Logging.
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - java.lang.String getFilter(); - /** - * optional string filter = 5; - * - *
-   * An [advanced logs filter](/logging/docs/view/advanced_filters)
-   * that defines the log entries to be exported.  The filter must be
-   * consistent with the log entry format designed by the
-   * `outputVersionFormat` parameter, regardless of the format of the
-   * log entry that was originally written to Cloud Logging.
-   * Example: `"logName:syslog AND severity>=ERROR"`.
-   * 
- */ - com.google.protobuf.ByteString - getFilterBytes(); - - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-   * The log entry version used when exporting log entries from this
-   * sink.  This version does not have to correspond to the version of
-   * the log entry when it was written to Cloud Logging.
-   * 
- */ - int getOutputVersionFormatValue(); - /** - * optional .google.logging.v2.LogSink.VersionFormat output_version_format = 6; - * - *
-   * The log entry version used when exporting log entries from this
-   * sink.  This version does not have to correspond to the version of
-   * the log entry when it was written to Cloud Logging.
-   * 
- */ - com.google.logging.v2.LogSink.VersionFormat getOutputVersionFormat(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingConfig.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingConfig.java deleted file mode 100644 index 853e6313d2e1..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingConfig.java +++ /dev/null @@ -1,162 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public final class LoggingConfig { - private LoggingConfig() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_LogSink_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_LogSink_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListSinksRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListSinksResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListSinksResponse_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_GetSinkRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_GetSinkRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_CreateSinkRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_CreateSinkRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_UpdateSinkRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_DeleteSinkRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_DeleteSinkRequest_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n&google/logging/v2/logging_config.proto" + - "\022\021google.logging.v2\032\034google/api/annotati" + - "ons.proto\032\033google/protobuf/empty.proto\032\037" + - "google/protobuf/timestamp.proto\"\306\001\n\007LogS" + - "ink\022\014\n\004name\030\001 \001(\t\022\023\n\013destination\030\003 \001(\t\022\016" + - "\n\006filter\030\005 \001(\t\022G\n\025output_version_format\030" + - "\006 \001(\0162(.google.logging.v2.LogSink.Versio" + - "nFormat\"?\n\rVersionFormat\022\036\n\032VERSION_FORM" + - "AT_UNSPECIFIED\020\000\022\006\n\002V2\020\001\022\006\n\002V1\020\002\"O\n\020List" + - "SinksRequest\022\024\n\014project_name\030\001 \001(\t\022\022\n\npa", - "ge_token\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\"W\n\021Lis" + - "tSinksResponse\022)\n\005sinks\030\001 \003(\0132\032.google.l" + - "ogging.v2.LogSink\022\027\n\017next_page_token\030\002 \001" + - "(\t\"#\n\016GetSinkRequest\022\021\n\tsink_name\030\001 \001(\t\"" + - "S\n\021CreateSinkRequest\022\024\n\014project_name\030\001 \001" + - "(\t\022(\n\004sink\030\002 \001(\0132\032.google.logging.v2.Log" + - "Sink\"P\n\021UpdateSinkRequest\022\021\n\tsink_name\030\001" + - " \001(\t\022(\n\004sink\030\002 \001(\0132\032.google.logging.v2.L" + - "ogSink\"&\n\021DeleteSinkRequest\022\021\n\tsink_name" + - "\030\001 \001(\t2\245\005\n\017ConfigServiceV2\022\210\001\n\tListSinks", - "\022#.google.logging.v2.ListSinksRequest\032$." + - "google.logging.v2.ListSinksResponse\"0\202\323\344" + - "\223\002*\022(/v2beta1/{project_name=projects/*}/" + - "sinks\022y\n\007GetSink\022!.google.logging.v2.Get" + - "SinkRequest\032\032.google.logging.v2.LogSink\"" + - "/\202\323\344\223\002)\022\'/v2beta1/{sink_name=projects/*/" + - "sinks/*}\022\206\001\n\nCreateSink\022$.google.logging" + - ".v2.CreateSinkRequest\032\032.google.logging.v" + - "2.LogSink\"6\202\323\344\223\0020\"(/v2beta1/{project_nam" + - "e=projects/*}/sinks:\004sink\022\205\001\n\nUpdateSink", - "\022$.google.logging.v2.UpdateSinkRequest\032\032" + - ".google.logging.v2.LogSink\"5\202\323\344\223\002/\032\'/v2b" + - "eta1/{sink_name=projects/*/sinks/*}:\004sin" + - "k\022{\n\nDeleteSink\022$.google.logging.v2.Dele" + - "teSinkRequest\032\026.google.protobuf.Empty\"/\202" + - "\323\344\223\002)*\'/v2beta1/{sink_name=projects/*/si" + - "nks/*}B(\n\025com.google.logging.v2B\rLogging" + - "ConfigP\001b\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.api.AnnotationsProto.getDescriptor(), - com.google.protobuf.EmptyProto.getDescriptor(), - com.google.protobuf.TimestampProto.getDescriptor(), - }, assigner); - internal_static_google_logging_v2_LogSink_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_logging_v2_LogSink_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_LogSink_descriptor, - new java.lang.String[] { "Name", "Destination", "Filter", "OutputVersionFormat", }); - internal_static_google_logging_v2_ListSinksRequest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_google_logging_v2_ListSinksRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListSinksRequest_descriptor, - new java.lang.String[] { "ProjectName", "PageToken", "PageSize", }); - internal_static_google_logging_v2_ListSinksResponse_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_google_logging_v2_ListSinksResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListSinksResponse_descriptor, - new java.lang.String[] { "Sinks", "NextPageToken", }); - internal_static_google_logging_v2_GetSinkRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_google_logging_v2_GetSinkRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_GetSinkRequest_descriptor, - new java.lang.String[] { "SinkName", }); - internal_static_google_logging_v2_CreateSinkRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_google_logging_v2_CreateSinkRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_CreateSinkRequest_descriptor, - new java.lang.String[] { "ProjectName", "Sink", }); - internal_static_google_logging_v2_UpdateSinkRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_UpdateSinkRequest_descriptor, - new java.lang.String[] { "SinkName", "Sink", }); - internal_static_google_logging_v2_DeleteSinkRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_google_logging_v2_DeleteSinkRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_DeleteSinkRequest_descriptor, - new java.lang.String[] { "SinkName", }); - com.google.protobuf.ExtensionRegistry registry = - com.google.protobuf.ExtensionRegistry.newInstance(); - registry.add(com.google.api.AnnotationsProto.http); - com.google.protobuf.Descriptors.FileDescriptor - .internalUpdateFileDescriptor(descriptor, registry); - com.google.api.AnnotationsProto.getDescriptor(); - com.google.protobuf.EmptyProto.getDescriptor(); - com.google.protobuf.TimestampProto.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingMetrics.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingMetrics.java deleted file mode 100644 index b4c55f0e8ecc..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingMetrics.java +++ /dev/null @@ -1,159 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public final class LoggingMetrics { - private LoggingMetrics() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_LogMetric_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_LogMetric_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListLogMetricsRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListLogMetricsRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListLogMetricsResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_GetLogMetricRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_GetLogMetricRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_CreateLogMetricRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_CreateLogMetricRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_UpdateLogMetricRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_DeleteLogMetricRequest_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\'google/logging/v2/logging_metrics.prot" + - "o\022\021google.logging.v2\032\034google/api/annotat" + - "ions.proto\032\033google/protobuf/empty.proto\"" + - "\\\n\tLogMetric\022\014\n\004name\030\001 \001(\t\022\023\n\013descriptio" + - "n\030\002 \001(\t\022\016\n\006filter\030\003 \001(\t\"\034\n\nApiVersion\022\006\n" + - "\002V2\020\000\022\006\n\002V1\020\001\"T\n\025ListLogMetricsRequest\022\024" + - "\n\014project_name\030\001 \001(\t\022\022\n\npage_token\030\002 \001(\t" + - "\022\021\n\tpage_size\030\003 \001(\005\"`\n\026ListLogMetricsRes" + - "ponse\022-\n\007metrics\030\001 \003(\0132\034.google.logging." + - "v2.LogMetric\022\027\n\017next_page_token\030\002 \001(\t\"*\n", - "\023GetLogMetricRequest\022\023\n\013metric_name\030\001 \001(" + - "\t\"\\\n\026CreateLogMetricRequest\022\024\n\014project_n" + - "ame\030\001 \001(\t\022,\n\006metric\030\002 \001(\0132\034.google.loggi" + - "ng.v2.LogMetric\"[\n\026UpdateLogMetricReques" + - "t\022\023\n\013metric_name\030\001 \001(\t\022,\n\006metric\030\002 \001(\0132\034" + - ".google.logging.v2.LogMetric\"-\n\026DeleteLo" + - "gMetricRequest\022\023\n\013metric_name\030\001 \001(\t2\371\005\n\020" + - "MetricsServiceV2\022\231\001\n\016ListLogMetrics\022(.go" + - "ogle.logging.v2.ListLogMetricsRequest\032)." + - "google.logging.v2.ListLogMetricsResponse", - "\"2\202\323\344\223\002,\022*/v2beta1/{project_name=project" + - "s/*}/metrics\022\211\001\n\014GetLogMetric\022&.google.l" + - "ogging.v2.GetLogMetricRequest\032\034.google.l" + - "ogging.v2.LogMetric\"3\202\323\344\223\002-\022+/v2beta1/{m" + - "etric_name=projects/*/metrics/*}\022\226\001\n\017Cre" + - "ateLogMetric\022).google.logging.v2.CreateL" + - "ogMetricRequest\032\034.google.logging.v2.LogM" + - "etric\":\202\323\344\223\0024\"*/v2beta1/{project_name=pr" + - "ojects/*}/metrics:\006metric\022\227\001\n\017UpdateLogM" + - "etric\022).google.logging.v2.UpdateLogMetri", - "cRequest\032\034.google.logging.v2.LogMetric\";" + - "\202\323\344\223\0025\032+/v2beta1/{metric_name=projects/*" + - "/metrics/*}:\006metric\022\211\001\n\017DeleteLogMetric\022" + - ").google.logging.v2.DeleteLogMetricReque" + - "st\032\026.google.protobuf.Empty\"3\202\323\344\223\002-*+/v2b" + - "eta1/{metric_name=projects/*/metrics/*}B" + - "\031\n\025com.google.logging.v2P\001b\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.api.AnnotationsProto.getDescriptor(), - com.google.protobuf.EmptyProto.getDescriptor(), - }, assigner); - internal_static_google_logging_v2_LogMetric_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_logging_v2_LogMetric_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_LogMetric_descriptor, - new java.lang.String[] { "Name", "Description", "Filter", }); - internal_static_google_logging_v2_ListLogMetricsRequest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_google_logging_v2_ListLogMetricsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListLogMetricsRequest_descriptor, - new java.lang.String[] { "ProjectName", "PageToken", "PageSize", }); - internal_static_google_logging_v2_ListLogMetricsResponse_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_google_logging_v2_ListLogMetricsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListLogMetricsResponse_descriptor, - new java.lang.String[] { "Metrics", "NextPageToken", }); - internal_static_google_logging_v2_GetLogMetricRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_google_logging_v2_GetLogMetricRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_GetLogMetricRequest_descriptor, - new java.lang.String[] { "MetricName", }); - internal_static_google_logging_v2_CreateLogMetricRequest_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_google_logging_v2_CreateLogMetricRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_CreateLogMetricRequest_descriptor, - new java.lang.String[] { "ProjectName", "Metric", }); - internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_google_logging_v2_UpdateLogMetricRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor, - new java.lang.String[] { "MetricName", "Metric", }); - internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_google_logging_v2_DeleteLogMetricRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_DeleteLogMetricRequest_descriptor, - new java.lang.String[] { "MetricName", }); - com.google.protobuf.ExtensionRegistry registry = - com.google.protobuf.ExtensionRegistry.newInstance(); - registry.add(com.google.api.AnnotationsProto.http); - com.google.protobuf.Descriptors.FileDescriptor - .internalUpdateFileDescriptor(descriptor, registry); - com.google.api.AnnotationsProto.getDescriptor(); - com.google.protobuf.EmptyProto.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingProto.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingProto.java deleted file mode 100644 index d112735796ce..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingProto.java +++ /dev/null @@ -1,208 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public final class LoggingProto { - private LoggingProto() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_DeleteLogRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_DeleteLogRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_WriteLogEntriesRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_WriteLogEntriesResponse_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListLogEntriesRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListLogEntriesRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListLogEntriesResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListLogEntriesResponse_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ReadLogEntriesRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ReadLogEntriesResponse_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_fieldAccessorTable; - static com.google.protobuf.Descriptors.Descriptor - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor; - static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\037google/logging/v2/logging.proto\022\021googl" + - "e.logging.v2\032\034google/api/annotations.pro" + - "to\032#google/api/monitored_resource.proto\032" + - "!google/logging/v2/log_entry.proto\032\033goog" + - "le/protobuf/empty.proto\"$\n\020DeleteLogRequ" + - "est\022\020\n\010log_name\030\001 \001(\t\"\377\001\n\026WriteLogEntrie" + - "sRequest\022\020\n\010log_name\030\001 \001(\t\022/\n\010resource\030\002" + - " \001(\0132\035.google.api.MonitoredResource\022E\n\006l" + - "abels\030\003 \003(\01325.google.logging.v2.WriteLog" + - "EntriesRequest.LabelsEntry\022,\n\007entries\030\004 ", - "\003(\0132\033.google.logging.v2.LogEntry\032-\n\013Labe" + - "lsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"" + - "\031\n\027WriteLogEntriesResponse\"u\n\025ListLogEnt" + - "riesRequest\022\023\n\013project_ids\030\001 \003(\t\022\016\n\006filt" + - "er\030\002 \001(\t\022\020\n\010order_by\030\003 \001(\t\022\021\n\tpage_size\030" + - "\004 \001(\005\022\022\n\npage_token\030\005 \001(\t\"_\n\026ListLogEntr" + - "iesResponse\022,\n\007entries\030\001 \003(\0132\033.google.lo" + - "gging.v2.LogEntry\022\027\n\017next_page_token\030\002 \001" + - "(\t\"d\n\025ReadLogEntriesRequest\022\023\n\013project_i" + - "ds\030\001 \003(\t\022\016\n\006filter\030\002 \001(\t\022\020\n\010order_by\030\003 \001", - "(\t\022\024\n\014resume_token\030\004 \001(\t\"\\\n\026ReadLogEntri" + - "esResponse\022,\n\007entries\030\001 \003(\0132\033.google.log" + - "ging.v2.LogEntry\022\024\n\014resume_token\030\002 \001(\t\"P" + - "\n\'ListMonitoredResourceDescriptorsReques" + - "t\022\021\n\tpage_size\030\001 \001(\005\022\022\n\npage_token\030\002 \001(\t" + - "\"\212\001\n(ListMonitoredResourceDescriptorsRes" + - "ponse\022E\n\024resource_descriptors\030\001 \003(\0132\'.go" + - "ogle.api.MonitoredResourceDescriptor\022\027\n\017" + - "next_page_token\030\002 \001(\t2\374\005\n\020LoggingService" + - "V2\022w\n\tDeleteLog\022#.google.logging.v2.Dele", - "teLogRequest\032\026.google.protobuf.Empty\"-\202\323" + - "\344\223\002\'*%/v2beta1/{log_name=projects/*/logs" + - "/*}\022\213\001\n\017WriteLogEntries\022).google.logging" + - ".v2.WriteLogEntriesRequest\032*.google.logg" + - "ing.v2.WriteLogEntriesResponse\"!\202\323\344\223\002\033\"\026" + - "/v2beta1/entries:write:\001*\022\207\001\n\016ListLogEnt" + - "ries\022(.google.logging.v2.ListLogEntriesR" + - "equest\032).google.logging.v2.ListLogEntrie" + - "sResponse\" \202\323\344\223\002\032\"\025/v2beta1/entries:list" + - ":\001*\022\211\001\n\016ReadLogEntries\022(.google.logging.", - "v2.ReadLogEntriesRequest\032).google.loggin" + - "g.v2.ReadLogEntriesResponse\" \202\323\344\223\002\032\"\025/v2" + - "beta1/entries:read:\001*0\001\022\312\001\n ListMonitore" + - "dResourceDescriptors\022:.google.logging.v2" + - ".ListMonitoredResourceDescriptorsRequest" + - "\032;.google.logging.v2.ListMonitoredResour" + - "ceDescriptorsResponse\"-\202\323\344\223\002\'\022%/v2beta1/" + - "monitoredResourceDescriptorsB*\n\025com.goog" + - "le.logging.v2B\014LoggingProtoP\001\370\001\001b\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.api.AnnotationsProto.getDescriptor(), - com.google.api.MonitoredResourceProto.getDescriptor(), - com.google.logging.v2.LogEntryProto.getDescriptor(), - com.google.protobuf.EmptyProto.getDescriptor(), - }, assigner); - internal_static_google_logging_v2_DeleteLogRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_google_logging_v2_DeleteLogRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_DeleteLogRequest_descriptor, - new java.lang.String[] { "LogName", }); - internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_google_logging_v2_WriteLogEntriesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor, - new java.lang.String[] { "LogName", "Resource", "Labels", "Entries", }); - internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_descriptor = - internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor.getNestedTypes().get(0); - internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_google_logging_v2_WriteLogEntriesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor, - new java.lang.String[] { }); - internal_static_google_logging_v2_ListLogEntriesRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_google_logging_v2_ListLogEntriesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListLogEntriesRequest_descriptor, - new java.lang.String[] { "ProjectIds", "Filter", "OrderBy", "PageSize", "PageToken", }); - internal_static_google_logging_v2_ListLogEntriesResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_google_logging_v2_ListLogEntriesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListLogEntriesResponse_descriptor, - new java.lang.String[] { "Entries", "NextPageToken", }); - internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_google_logging_v2_ReadLogEntriesRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor, - new java.lang.String[] { "ProjectIds", "Filter", "OrderBy", "ResumeToken", }); - internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_google_logging_v2_ReadLogEntriesResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor, - new java.lang.String[] { "Entries", "ResumeToken", }); - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsRequest_descriptor, - new java.lang.String[] { "PageSize", "PageToken", }); - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_google_logging_v2_ListMonitoredResourceDescriptorsResponse_descriptor, - new java.lang.String[] { "ResourceDescriptors", "NextPageToken", }); - com.google.protobuf.ExtensionRegistry registry = - com.google.protobuf.ExtensionRegistry.newInstance(); - registry.add(com.google.api.AnnotationsProto.http); - com.google.protobuf.Descriptors.FileDescriptor - .internalUpdateFileDescriptor(descriptor, registry); - com.google.api.AnnotationsProto.getDescriptor(); - com.google.api.MonitoredResourceProto.getDescriptor(); - com.google.logging.v2.LogEntryProto.getDescriptor(); - com.google.protobuf.EmptyProto.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingServiceV2Grpc.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingServiceV2Grpc.java deleted file mode 100644 index 41c301059a75..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/LoggingServiceV2Grpc.java +++ /dev/null @@ -1,348 +0,0 @@ -package com.google.logging.v2; - -import static io.grpc.stub.ClientCalls.asyncUnaryCall; -import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; -import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; -import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; -import static io.grpc.stub.ClientCalls.blockingUnaryCall; -import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; -import static io.grpc.stub.ClientCalls.futureUnaryCall; -import static io.grpc.MethodDescriptor.generateFullMethodName; -import static io.grpc.stub.ServerCalls.asyncUnaryCall; -import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; -import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; -import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; - -@javax.annotation.Generated("by gRPC proto compiler") -public class LoggingServiceV2Grpc { - - private LoggingServiceV2Grpc() {} - - public static final String SERVICE_NAME = "google.logging.v2.LoggingServiceV2"; - - // Static method descriptors that strictly reflect the proto. - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_DELETE_LOG = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.LoggingServiceV2", "DeleteLog"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.DeleteLogRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_WRITE_LOG_ENTRIES = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.LoggingServiceV2", "WriteLogEntries"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.WriteLogEntriesRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.WriteLogEntriesResponse.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_LIST_LOG_ENTRIES = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.LoggingServiceV2", "ListLogEntries"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListLogEntriesRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListLogEntriesResponse.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_READ_LOG_ENTRIES = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.SERVER_STREAMING, - generateFullMethodName( - "google.logging.v2.LoggingServiceV2", "ReadLogEntries"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ReadLogEntriesRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ReadLogEntriesResponse.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.LoggingServiceV2", "ListMonitoredResourceDescriptors"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListMonitoredResourceDescriptorsResponse.getDefaultInstance())); - - public static LoggingServiceV2Stub newStub(io.grpc.Channel channel) { - return new LoggingServiceV2Stub(channel); - } - - public static LoggingServiceV2BlockingStub newBlockingStub( - io.grpc.Channel channel) { - return new LoggingServiceV2BlockingStub(channel); - } - - public static LoggingServiceV2FutureStub newFutureStub( - io.grpc.Channel channel) { - return new LoggingServiceV2FutureStub(channel); - } - - public static interface LoggingServiceV2 { - - public void deleteLog(com.google.logging.v2.DeleteLogRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void writeLogEntries(com.google.logging.v2.WriteLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void listLogEntries(com.google.logging.v2.ListLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void readLogEntries(com.google.logging.v2.ReadLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void listMonitoredResourceDescriptors(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request, - io.grpc.stub.StreamObserver responseObserver); - } - - public static interface LoggingServiceV2BlockingClient { - - public com.google.protobuf.Empty deleteLog(com.google.logging.v2.DeleteLogRequest request); - - public com.google.logging.v2.WriteLogEntriesResponse writeLogEntries(com.google.logging.v2.WriteLogEntriesRequest request); - - public com.google.logging.v2.ListLogEntriesResponse listLogEntries(com.google.logging.v2.ListLogEntriesRequest request); - - public java.util.Iterator readLogEntries( - com.google.logging.v2.ReadLogEntriesRequest request); - - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse listMonitoredResourceDescriptors(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request); - } - - public static interface LoggingServiceV2FutureClient { - - public com.google.common.util.concurrent.ListenableFuture deleteLog( - com.google.logging.v2.DeleteLogRequest request); - - public com.google.common.util.concurrent.ListenableFuture writeLogEntries( - com.google.logging.v2.WriteLogEntriesRequest request); - - public com.google.common.util.concurrent.ListenableFuture listLogEntries( - com.google.logging.v2.ListLogEntriesRequest request); - - public com.google.common.util.concurrent.ListenableFuture listMonitoredResourceDescriptors( - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request); - } - - public static class LoggingServiceV2Stub extends io.grpc.stub.AbstractStub - implements LoggingServiceV2 { - private LoggingServiceV2Stub(io.grpc.Channel channel) { - super(channel); - } - - private LoggingServiceV2Stub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LoggingServiceV2Stub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LoggingServiceV2Stub(channel, callOptions); - } - - @java.lang.Override - public void deleteLog(com.google.logging.v2.DeleteLogRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void writeLogEntries(com.google.logging.v2.WriteLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_WRITE_LOG_ENTRIES, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void listLogEntries(com.google.logging.v2.ListLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_ENTRIES, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void readLogEntries(com.google.logging.v2.ReadLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncServerStreamingCall( - getChannel().newCall(METHOD_READ_LOG_ENTRIES, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void listMonitoredResourceDescriptors(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, getCallOptions()), request, responseObserver); - } - } - - public static class LoggingServiceV2BlockingStub extends io.grpc.stub.AbstractStub - implements LoggingServiceV2BlockingClient { - private LoggingServiceV2BlockingStub(io.grpc.Channel channel) { - super(channel); - } - - private LoggingServiceV2BlockingStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LoggingServiceV2BlockingStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LoggingServiceV2BlockingStub(channel, callOptions); - } - - @java.lang.Override - public com.google.protobuf.Empty deleteLog(com.google.logging.v2.DeleteLogRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.WriteLogEntriesResponse writeLogEntries(com.google.logging.v2.WriteLogEntriesRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_WRITE_LOG_ENTRIES, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.ListLogEntriesResponse listLogEntries(com.google.logging.v2.ListLogEntriesRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_ENTRIES, getCallOptions()), request); - } - - @java.lang.Override - public java.util.Iterator readLogEntries( - com.google.logging.v2.ReadLogEntriesRequest request) { - return blockingServerStreamingCall( - getChannel().newCall(METHOD_READ_LOG_ENTRIES, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.ListMonitoredResourceDescriptorsResponse listMonitoredResourceDescriptors(com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, getCallOptions()), request); - } - } - - public static class LoggingServiceV2FutureStub extends io.grpc.stub.AbstractStub - implements LoggingServiceV2FutureClient { - private LoggingServiceV2FutureStub(io.grpc.Channel channel) { - super(channel); - } - - private LoggingServiceV2FutureStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected LoggingServiceV2FutureStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new LoggingServiceV2FutureStub(channel, callOptions); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture deleteLog( - com.google.logging.v2.DeleteLogRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture writeLogEntries( - com.google.logging.v2.WriteLogEntriesRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_WRITE_LOG_ENTRIES, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture listLogEntries( - com.google.logging.v2.ListLogEntriesRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_ENTRIES, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture listMonitoredResourceDescriptors( - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, getCallOptions()), request); - } - } - - public static io.grpc.ServerServiceDefinition bindService( - final LoggingServiceV2 serviceImpl) { - return io.grpc.ServerServiceDefinition.builder(SERVICE_NAME) - .addMethod( - METHOD_DELETE_LOG, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.DeleteLogRequest, - com.google.protobuf.Empty>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.DeleteLogRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.deleteLog(request, responseObserver); - } - })) - .addMethod( - METHOD_WRITE_LOG_ENTRIES, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.WriteLogEntriesRequest, - com.google.logging.v2.WriteLogEntriesResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.WriteLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.writeLogEntries(request, responseObserver); - } - })) - .addMethod( - METHOD_LIST_LOG_ENTRIES, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.ListLogEntriesRequest, - com.google.logging.v2.ListLogEntriesResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.ListLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.listLogEntries(request, responseObserver); - } - })) - .addMethod( - METHOD_READ_LOG_ENTRIES, - asyncServerStreamingCall( - new io.grpc.stub.ServerCalls.ServerStreamingMethod< - com.google.logging.v2.ReadLogEntriesRequest, - com.google.logging.v2.ReadLogEntriesResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.ReadLogEntriesRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.readLogEntries(request, responseObserver); - } - })) - .addMethod( - METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest, - com.google.logging.v2.ListMonitoredResourceDescriptorsResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.ListMonitoredResourceDescriptorsRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.listMonitoredResourceDescriptors(request, responseObserver); - } - })).build(); - } -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/MetricsServiceV2Grpc.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/MetricsServiceV2Grpc.java deleted file mode 100644 index ae6687409bc8..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/MetricsServiceV2Grpc.java +++ /dev/null @@ -1,356 +0,0 @@ -package com.google.logging.v2; - -import static io.grpc.stub.ClientCalls.asyncUnaryCall; -import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; -import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; -import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; -import static io.grpc.stub.ClientCalls.blockingUnaryCall; -import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; -import static io.grpc.stub.ClientCalls.futureUnaryCall; -import static io.grpc.MethodDescriptor.generateFullMethodName; -import static io.grpc.stub.ServerCalls.asyncUnaryCall; -import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; -import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; -import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; - -@javax.annotation.Generated("by gRPC proto compiler") -public class MetricsServiceV2Grpc { - - private MetricsServiceV2Grpc() {} - - public static final String SERVICE_NAME = "google.logging.v2.MetricsServiceV2"; - - // Static method descriptors that strictly reflect the proto. - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_LIST_LOG_METRICS = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.MetricsServiceV2", "ListLogMetrics"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListLogMetricsRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.ListLogMetricsResponse.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_GET_LOG_METRIC = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.MetricsServiceV2", "GetLogMetric"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.GetLogMetricRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogMetric.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_CREATE_LOG_METRIC = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.MetricsServiceV2", "CreateLogMetric"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.CreateLogMetricRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogMetric.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_UPDATE_LOG_METRIC = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.MetricsServiceV2", "UpdateLogMetric"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.UpdateLogMetricRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.LogMetric.getDefaultInstance())); - @io.grpc.ExperimentalApi - public static final io.grpc.MethodDescriptor METHOD_DELETE_LOG_METRIC = - io.grpc.MethodDescriptor.create( - io.grpc.MethodDescriptor.MethodType.UNARY, - generateFullMethodName( - "google.logging.v2.MetricsServiceV2", "DeleteLogMetric"), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.logging.v2.DeleteLogMetricRequest.getDefaultInstance()), - io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.getDefaultInstance())); - - public static MetricsServiceV2Stub newStub(io.grpc.Channel channel) { - return new MetricsServiceV2Stub(channel); - } - - public static MetricsServiceV2BlockingStub newBlockingStub( - io.grpc.Channel channel) { - return new MetricsServiceV2BlockingStub(channel); - } - - public static MetricsServiceV2FutureStub newFutureStub( - io.grpc.Channel channel) { - return new MetricsServiceV2FutureStub(channel); - } - - public static interface MetricsServiceV2 { - - public void listLogMetrics(com.google.logging.v2.ListLogMetricsRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void getLogMetric(com.google.logging.v2.GetLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void createLogMetric(com.google.logging.v2.CreateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void updateLogMetric(com.google.logging.v2.UpdateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver); - - public void deleteLogMetric(com.google.logging.v2.DeleteLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver); - } - - public static interface MetricsServiceV2BlockingClient { - - public com.google.logging.v2.ListLogMetricsResponse listLogMetrics(com.google.logging.v2.ListLogMetricsRequest request); - - public com.google.logging.v2.LogMetric getLogMetric(com.google.logging.v2.GetLogMetricRequest request); - - public com.google.logging.v2.LogMetric createLogMetric(com.google.logging.v2.CreateLogMetricRequest request); - - public com.google.logging.v2.LogMetric updateLogMetric(com.google.logging.v2.UpdateLogMetricRequest request); - - public com.google.protobuf.Empty deleteLogMetric(com.google.logging.v2.DeleteLogMetricRequest request); - } - - public static interface MetricsServiceV2FutureClient { - - public com.google.common.util.concurrent.ListenableFuture listLogMetrics( - com.google.logging.v2.ListLogMetricsRequest request); - - public com.google.common.util.concurrent.ListenableFuture getLogMetric( - com.google.logging.v2.GetLogMetricRequest request); - - public com.google.common.util.concurrent.ListenableFuture createLogMetric( - com.google.logging.v2.CreateLogMetricRequest request); - - public com.google.common.util.concurrent.ListenableFuture updateLogMetric( - com.google.logging.v2.UpdateLogMetricRequest request); - - public com.google.common.util.concurrent.ListenableFuture deleteLogMetric( - com.google.logging.v2.DeleteLogMetricRequest request); - } - - public static class MetricsServiceV2Stub extends io.grpc.stub.AbstractStub - implements MetricsServiceV2 { - private MetricsServiceV2Stub(io.grpc.Channel channel) { - super(channel); - } - - private MetricsServiceV2Stub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected MetricsServiceV2Stub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new MetricsServiceV2Stub(channel, callOptions); - } - - @java.lang.Override - public void listLogMetrics(com.google.logging.v2.ListLogMetricsRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_METRICS, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void getLogMetric(com.google.logging.v2.GetLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_GET_LOG_METRIC, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void createLogMetric(com.google.logging.v2.CreateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_CREATE_LOG_METRIC, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void updateLogMetric(com.google.logging.v2.UpdateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_UPDATE_LOG_METRIC, getCallOptions()), request, responseObserver); - } - - @java.lang.Override - public void deleteLogMetric(com.google.logging.v2.DeleteLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - asyncUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG_METRIC, getCallOptions()), request, responseObserver); - } - } - - public static class MetricsServiceV2BlockingStub extends io.grpc.stub.AbstractStub - implements MetricsServiceV2BlockingClient { - private MetricsServiceV2BlockingStub(io.grpc.Channel channel) { - super(channel); - } - - private MetricsServiceV2BlockingStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected MetricsServiceV2BlockingStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new MetricsServiceV2BlockingStub(channel, callOptions); - } - - @java.lang.Override - public com.google.logging.v2.ListLogMetricsResponse listLogMetrics(com.google.logging.v2.ListLogMetricsRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_METRICS, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogMetric getLogMetric(com.google.logging.v2.GetLogMetricRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_GET_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogMetric createLogMetric(com.google.logging.v2.CreateLogMetricRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_CREATE_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.logging.v2.LogMetric updateLogMetric(com.google.logging.v2.UpdateLogMetricRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_UPDATE_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.protobuf.Empty deleteLogMetric(com.google.logging.v2.DeleteLogMetricRequest request) { - return blockingUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG_METRIC, getCallOptions()), request); - } - } - - public static class MetricsServiceV2FutureStub extends io.grpc.stub.AbstractStub - implements MetricsServiceV2FutureClient { - private MetricsServiceV2FutureStub(io.grpc.Channel channel) { - super(channel); - } - - private MetricsServiceV2FutureStub(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - super(channel, callOptions); - } - - @java.lang.Override - protected MetricsServiceV2FutureStub build(io.grpc.Channel channel, - io.grpc.CallOptions callOptions) { - return new MetricsServiceV2FutureStub(channel, callOptions); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture listLogMetrics( - com.google.logging.v2.ListLogMetricsRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_LIST_LOG_METRICS, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture getLogMetric( - com.google.logging.v2.GetLogMetricRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_GET_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture createLogMetric( - com.google.logging.v2.CreateLogMetricRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_CREATE_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture updateLogMetric( - com.google.logging.v2.UpdateLogMetricRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_UPDATE_LOG_METRIC, getCallOptions()), request); - } - - @java.lang.Override - public com.google.common.util.concurrent.ListenableFuture deleteLogMetric( - com.google.logging.v2.DeleteLogMetricRequest request) { - return futureUnaryCall( - getChannel().newCall(METHOD_DELETE_LOG_METRIC, getCallOptions()), request); - } - } - - public static io.grpc.ServerServiceDefinition bindService( - final MetricsServiceV2 serviceImpl) { - return io.grpc.ServerServiceDefinition.builder(SERVICE_NAME) - .addMethod( - METHOD_LIST_LOG_METRICS, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.ListLogMetricsRequest, - com.google.logging.v2.ListLogMetricsResponse>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.ListLogMetricsRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.listLogMetrics(request, responseObserver); - } - })) - .addMethod( - METHOD_GET_LOG_METRIC, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.GetLogMetricRequest, - com.google.logging.v2.LogMetric>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.GetLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.getLogMetric(request, responseObserver); - } - })) - .addMethod( - METHOD_CREATE_LOG_METRIC, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.CreateLogMetricRequest, - com.google.logging.v2.LogMetric>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.CreateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.createLogMetric(request, responseObserver); - } - })) - .addMethod( - METHOD_UPDATE_LOG_METRIC, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.UpdateLogMetricRequest, - com.google.logging.v2.LogMetric>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.UpdateLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.updateLogMetric(request, responseObserver); - } - })) - .addMethod( - METHOD_DELETE_LOG_METRIC, - asyncUnaryCall( - new io.grpc.stub.ServerCalls.UnaryMethod< - com.google.logging.v2.DeleteLogMetricRequest, - com.google.protobuf.Empty>() { - @java.lang.Override - public void invoke( - com.google.logging.v2.DeleteLogMetricRequest request, - io.grpc.stub.StreamObserver responseObserver) { - serviceImpl.deleteLogMetric(request, responseObserver); - } - })).build(); - } -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequest.java deleted file mode 100644 index b021f77a06e9..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequest.java +++ /dev/null @@ -1,1094 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ReadLogEntriesRequest} - * - *
- * The parameters to `ReadLogEntries`.
- * There are two different use cases for streaming:
- * 1.  To return a very large result set. The request eventually
- *     completes when all entries have been returned.
- * 2.  To "tail" a log stream, returning new entries as they arrive.
- *     In this case, the request never completes.
- * Only the first use case is supported.
- * 
- */ -public final class ReadLogEntriesRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ReadLogEntriesRequest) - ReadLogEntriesRequestOrBuilder { - // Use ReadLogEntriesRequest.newBuilder() to construct. - private ReadLogEntriesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ReadLogEntriesRequest() { - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - filter_ = ""; - orderBy_ = ""; - resumeToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ReadLogEntriesRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000001; - } - projectIds_.add(s); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - filter_ = s; - break; - } - case 26: { - String s = input.readStringRequireUtf8(); - - orderBy_ = s; - break; - } - case 34: { - String s = input.readStringRequireUtf8(); - - resumeToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = projectIds_.getUnmodifiableView(); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ReadLogEntriesRequest.class, com.google.logging.v2.ReadLogEntriesRequest.Builder.class); - } - - private int bitField0_; - public static final int PROJECT_IDS_FIELD_NUMBER = 1; - private com.google.protobuf.LazyStringList projectIds_; - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - public com.google.protobuf.ProtocolStringList - getProjectIdsList() { - return projectIds_; - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - public int getProjectIdsCount() { - return projectIds_.size(); - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - public java.lang.String getProjectIds(int index) { - return projectIds_.get(index); - } - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - public com.google.protobuf.ByteString - getProjectIdsBytes(int index) { - return projectIds_.getByteString(index); - } - - public static final int FILTER_FIELD_NUMBER = 2; - private volatile java.lang.Object filter_; - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The response includes only entries that match the filter.
-   * If `filter` is empty, then all entries in all logs are retrieved.
-   * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } - } - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The response includes only entries that match the filter.
-   * If `filter` is empty, then all entries in all logs are retrieved.
-   * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int ORDER_BY_FIELD_NUMBER = 3; - private volatile java.lang.Object orderBy_; - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps will be returned in order of `LogEntry.insertId`.
-   * 
- */ - public java.lang.String getOrderBy() { - java.lang.Object ref = orderBy_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - orderBy_ = s; - return s; - } - } - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps will be returned in order of `LogEntry.insertId`.
-   * 
- */ - public com.google.protobuf.ByteString - getOrderByBytes() { - java.lang.Object ref = orderBy_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - orderBy_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int RESUME_TOKEN_FIELD_NUMBER = 4; - private volatile java.lang.Object resumeToken_; - /** - * optional string resume_token = 4; - * - *
-   * Optional. If the `resumeToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `resumeToken` request
-   * parameter must be set with the value of the `resumeToken` result parameter
-   * from the previous request.
-   * 
- */ - public java.lang.String getResumeToken() { - java.lang.Object ref = resumeToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - resumeToken_ = s; - return s; - } - } - /** - * optional string resume_token = 4; - * - *
-   * Optional. If the `resumeToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `resumeToken` request
-   * parameter must be set with the value of the `resumeToken` result parameter
-   * from the previous request.
-   * 
- */ - public com.google.protobuf.ByteString - getResumeTokenBytes() { - java.lang.Object ref = resumeToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - resumeToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < projectIds_.size(); i++) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, projectIds_.getRaw(i)); - } - if (!getFilterBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, filter_); - } - if (!getOrderByBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 3, orderBy_); - } - if (!getResumeTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 4, resumeToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < projectIds_.size(); i++) { - dataSize += computeStringSizeNoTag(projectIds_.getRaw(i)); - } - size += dataSize; - size += 1 * getProjectIdsList().size(); - } - if (!getFilterBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, filter_); - } - if (!getOrderByBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(3, orderBy_); - } - if (!getResumeTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(4, resumeToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ReadLogEntriesRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ReadLogEntriesRequest} - * - *
-   * The parameters to `ReadLogEntries`.
-   * There are two different use cases for streaming:
-   * 1.  To return a very large result set. The request eventually
-   *     completes when all entries have been returned.
-   * 2.  To "tail" a log stream, returning new entries as they arrive.
-   *     In this case, the request never completes.
-   * Only the first use case is supported.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ReadLogEntriesRequest) - com.google.logging.v2.ReadLogEntriesRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ReadLogEntriesRequest.class, com.google.logging.v2.ReadLogEntriesRequest.Builder.class); - } - - // Construct using com.google.logging.v2.ReadLogEntriesRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - filter_ = ""; - - orderBy_ = ""; - - resumeToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesRequest_descriptor; - } - - public com.google.logging.v2.ReadLogEntriesRequest getDefaultInstanceForType() { - return com.google.logging.v2.ReadLogEntriesRequest.getDefaultInstance(); - } - - public com.google.logging.v2.ReadLogEntriesRequest build() { - com.google.logging.v2.ReadLogEntriesRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ReadLogEntriesRequest buildPartial() { - com.google.logging.v2.ReadLogEntriesRequest result = new com.google.logging.v2.ReadLogEntriesRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = projectIds_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.projectIds_ = projectIds_; - result.filter_ = filter_; - result.orderBy_ = orderBy_; - result.resumeToken_ = resumeToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ReadLogEntriesRequest) { - return mergeFrom((com.google.logging.v2.ReadLogEntriesRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ReadLogEntriesRequest other) { - if (other == com.google.logging.v2.ReadLogEntriesRequest.getDefaultInstance()) return this; - if (!other.projectIds_.isEmpty()) { - if (projectIds_.isEmpty()) { - projectIds_ = other.projectIds_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureProjectIdsIsMutable(); - projectIds_.addAll(other.projectIds_); - } - onChanged(); - } - if (!other.getFilter().isEmpty()) { - filter_ = other.filter_; - onChanged(); - } - if (!other.getOrderBy().isEmpty()) { - orderBy_ = other.orderBy_; - onChanged(); - } - if (!other.getResumeToken().isEmpty()) { - resumeToken_ = other.resumeToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ReadLogEntriesRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ReadLogEntriesRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private com.google.protobuf.LazyStringList projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureProjectIdsIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - projectIds_ = new com.google.protobuf.LazyStringArrayList(projectIds_); - bitField0_ |= 0x00000001; - } - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public com.google.protobuf.ProtocolStringList - getProjectIdsList() { - return projectIds_.getUnmodifiableView(); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public int getProjectIdsCount() { - return projectIds_.size(); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public java.lang.String getProjectIds(int index) { - return projectIds_.get(index); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public com.google.protobuf.ByteString - getProjectIdsBytes(int index) { - return projectIds_.getByteString(index); - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public Builder setProjectIds( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureProjectIdsIsMutable(); - projectIds_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public Builder addProjectIds( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureProjectIdsIsMutable(); - projectIds_.add(value); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public Builder addAllProjectIds( - java.lang.Iterable values) { - ensureProjectIdsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, projectIds_); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public Builder clearProjectIds() { - projectIds_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - /** - * repeated string project_ids = 1; - * - *
-     * Required. A list of project ids from which to retrieve log entries.
-     * Example: `"my-project-id"`.
-     * 
- */ - public Builder addProjectIdsBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - ensureProjectIdsIsMutable(); - projectIds_.add(value); - onChanged(); - return this; - } - - private java.lang.Object filter_ = ""; - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The response includes only entries that match the filter.
-     * If `filter` is empty, then all entries in all logs are retrieved.
-     * 
- */ - public java.lang.String getFilter() { - java.lang.Object ref = filter_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - filter_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The response includes only entries that match the filter.
-     * If `filter` is empty, then all entries in all logs are retrieved.
-     * 
- */ - public com.google.protobuf.ByteString - getFilterBytes() { - java.lang.Object ref = filter_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - filter_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The response includes only entries that match the filter.
-     * If `filter` is empty, then all entries in all logs are retrieved.
-     * 
- */ - public Builder setFilter( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - filter_ = value; - onChanged(); - return this; - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The response includes only entries that match the filter.
-     * If `filter` is empty, then all entries in all logs are retrieved.
-     * 
- */ - public Builder clearFilter() { - - filter_ = getDefaultInstance().getFilter(); - onChanged(); - return this; - } - /** - * optional string filter = 2; - * - *
-     * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-     * The response includes only entries that match the filter.
-     * If `filter` is empty, then all entries in all logs are retrieved.
-     * 
- */ - public Builder setFilterBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - filter_ = value; - onChanged(); - return this; - } - - private java.lang.Object orderBy_ = ""; - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps will be returned in order of `LogEntry.insertId`.
-     * 
- */ - public java.lang.String getOrderBy() { - java.lang.Object ref = orderBy_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - orderBy_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps will be returned in order of `LogEntry.insertId`.
-     * 
- */ - public com.google.protobuf.ByteString - getOrderByBytes() { - java.lang.Object ref = orderBy_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - orderBy_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps will be returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder setOrderBy( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - orderBy_ = value; - onChanged(); - return this; - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps will be returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder clearOrderBy() { - - orderBy_ = getDefaultInstance().getOrderBy(); - onChanged(); - return this; - } - /** - * optional string order_by = 3; - * - *
-     * Optional. How the results should be sorted.  Presently, the only permitted
-     * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-     * option returns entries in order of increasing values of
-     * `LogEntry.timestamp` (oldest first), and the second option returns entries
-     * in order of decreasing timestamps (newest first).  Entries with equal
-     * timestamps will be returned in order of `LogEntry.insertId`.
-     * 
- */ - public Builder setOrderByBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - orderBy_ = value; - onChanged(); - return this; - } - - private java.lang.Object resumeToken_ = ""; - /** - * optional string resume_token = 4; - * - *
-     * Optional. If the `resumeToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `resumeToken` request
-     * parameter must be set with the value of the `resumeToken` result parameter
-     * from the previous request.
-     * 
- */ - public java.lang.String getResumeToken() { - java.lang.Object ref = resumeToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - resumeToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string resume_token = 4; - * - *
-     * Optional. If the `resumeToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `resumeToken` request
-     * parameter must be set with the value of the `resumeToken` result parameter
-     * from the previous request.
-     * 
- */ - public com.google.protobuf.ByteString - getResumeTokenBytes() { - java.lang.Object ref = resumeToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - resumeToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string resume_token = 4; - * - *
-     * Optional. If the `resumeToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `resumeToken` request
-     * parameter must be set with the value of the `resumeToken` result parameter
-     * from the previous request.
-     * 
- */ - public Builder setResumeToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - resumeToken_ = value; - onChanged(); - return this; - } - /** - * optional string resume_token = 4; - * - *
-     * Optional. If the `resumeToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `resumeToken` request
-     * parameter must be set with the value of the `resumeToken` result parameter
-     * from the previous request.
-     * 
- */ - public Builder clearResumeToken() { - - resumeToken_ = getDefaultInstance().getResumeToken(); - onChanged(); - return this; - } - /** - * optional string resume_token = 4; - * - *
-     * Optional. If the `resumeToken` request parameter is supplied, then the next
-     * page of results in the set are retrieved.  The `resumeToken` request
-     * parameter must be set with the value of the `resumeToken` result parameter
-     * from the previous request.
-     * 
- */ - public Builder setResumeTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - resumeToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ReadLogEntriesRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ReadLogEntriesRequest) - private static final com.google.logging.v2.ReadLogEntriesRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ReadLogEntriesRequest(); - } - - public static com.google.logging.v2.ReadLogEntriesRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ReadLogEntriesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ReadLogEntriesRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ReadLogEntriesRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequestOrBuilder.java deleted file mode 100644 index ef241cdf596b..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesRequestOrBuilder.java +++ /dev/null @@ -1,122 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ReadLogEntriesRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ReadLogEntriesRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - com.google.protobuf.ProtocolStringList - getProjectIdsList(); - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - int getProjectIdsCount(); - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - java.lang.String getProjectIds(int index); - /** - * repeated string project_ids = 1; - * - *
-   * Required. A list of project ids from which to retrieve log entries.
-   * Example: `"my-project-id"`.
-   * 
- */ - com.google.protobuf.ByteString - getProjectIdsBytes(int index); - - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The response includes only entries that match the filter.
-   * If `filter` is empty, then all entries in all logs are retrieved.
-   * 
- */ - java.lang.String getFilter(); - /** - * optional string filter = 2; - * - *
-   * Optional. An [advanced logs filter](/logging/docs/view/advanced_filters).
-   * The response includes only entries that match the filter.
-   * If `filter` is empty, then all entries in all logs are retrieved.
-   * 
- */ - com.google.protobuf.ByteString - getFilterBytes(); - - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps will be returned in order of `LogEntry.insertId`.
-   * 
- */ - java.lang.String getOrderBy(); - /** - * optional string order_by = 3; - * - *
-   * Optional. How the results should be sorted.  Presently, the only permitted
-   * values are `"timestamp"` (default) and `"timestamp desc"`.  The first
-   * option returns entries in order of increasing values of
-   * `LogEntry.timestamp` (oldest first), and the second option returns entries
-   * in order of decreasing timestamps (newest first).  Entries with equal
-   * timestamps will be returned in order of `LogEntry.insertId`.
-   * 
- */ - com.google.protobuf.ByteString - getOrderByBytes(); - - /** - * optional string resume_token = 4; - * - *
-   * Optional. If the `resumeToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `resumeToken` request
-   * parameter must be set with the value of the `resumeToken` result parameter
-   * from the previous request.
-   * 
- */ - java.lang.String getResumeToken(); - /** - * optional string resume_token = 4; - * - *
-   * Optional. If the `resumeToken` request parameter is supplied, then the next
-   * page of results in the set are retrieved.  The `resumeToken` request
-   * parameter must be set with the value of the `resumeToken` result parameter
-   * from the previous request.
-   * 
- */ - com.google.protobuf.ByteString - getResumeTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponse.java deleted file mode 100644 index fefa35362db2..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponse.java +++ /dev/null @@ -1,946 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.ReadLogEntriesResponse} - * - *
- * Result returned from `ReadLogEntries`.
- * 
- */ -public final class ReadLogEntriesResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.ReadLogEntriesResponse) - ReadLogEntriesResponseOrBuilder { - // Use ReadLogEntriesResponse.newBuilder() to construct. - private ReadLogEntriesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ReadLogEntriesResponse() { - entries_ = java.util.Collections.emptyList(); - resumeToken_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private ReadLogEntriesResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - entries_.add(input.readMessage(com.google.logging.v2.LogEntry.parser(), extensionRegistry)); - break; - } - case 18: { - String s = input.readStringRequireUtf8(); - - resumeToken_ = s; - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ReadLogEntriesResponse.class, com.google.logging.v2.ReadLogEntriesResponse.Builder.class); - } - - private int bitField0_; - public static final int ENTRIES_FIELD_NUMBER = 1; - private java.util.List entries_; - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - public java.util.List getEntriesList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - public java.util.List - getEntriesOrBuilderList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - public int getEntriesCount() { - return entries_.size(); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - return entries_.get(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - return entries_.get(index); - } - - public static final int RESUME_TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object resumeToken_; - /** - * optional string resume_token = 2; - * - *
-   * A token to use to resume from this position of the stream.
-   * Note that even if there are no entries, it might still be possible
-   * to continue from this point at some later time.
-   * 
- */ - public java.lang.String getResumeToken() { - java.lang.Object ref = resumeToken_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - resumeToken_ = s; - return s; - } - } - /** - * optional string resume_token = 2; - * - *
-   * A token to use to resume from this position of the stream.
-   * Note that even if there are no entries, it might still be possible
-   * to continue from this point at some later time.
-   * 
- */ - public com.google.protobuf.ByteString - getResumeTokenBytes() { - java.lang.Object ref = resumeToken_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - resumeToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < entries_.size(); i++) { - output.writeMessage(1, entries_.get(i)); - } - if (!getResumeTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, resumeToken_); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < entries_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, entries_.get(i)); - } - if (!getResumeTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, resumeToken_); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.ReadLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.ReadLogEntriesResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.ReadLogEntriesResponse} - * - *
-   * Result returned from `ReadLogEntries`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.ReadLogEntriesResponse) - com.google.logging.v2.ReadLogEntriesResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.ReadLogEntriesResponse.class, com.google.logging.v2.ReadLogEntriesResponse.Builder.class); - } - - // Construct using com.google.logging.v2.ReadLogEntriesResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getEntriesFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - entriesBuilder_.clear(); - } - resumeToken_ = ""; - - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_ReadLogEntriesResponse_descriptor; - } - - public com.google.logging.v2.ReadLogEntriesResponse getDefaultInstanceForType() { - return com.google.logging.v2.ReadLogEntriesResponse.getDefaultInstance(); - } - - public com.google.logging.v2.ReadLogEntriesResponse build() { - com.google.logging.v2.ReadLogEntriesResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.ReadLogEntriesResponse buildPartial() { - com.google.logging.v2.ReadLogEntriesResponse result = new com.google.logging.v2.ReadLogEntriesResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (entriesBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.entries_ = entries_; - } else { - result.entries_ = entriesBuilder_.build(); - } - result.resumeToken_ = resumeToken_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.ReadLogEntriesResponse) { - return mergeFrom((com.google.logging.v2.ReadLogEntriesResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.ReadLogEntriesResponse other) { - if (other == com.google.logging.v2.ReadLogEntriesResponse.getDefaultInstance()) return this; - if (entriesBuilder_ == null) { - if (!other.entries_.isEmpty()) { - if (entries_.isEmpty()) { - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureEntriesIsMutable(); - entries_.addAll(other.entries_); - } - onChanged(); - } - } else { - if (!other.entries_.isEmpty()) { - if (entriesBuilder_.isEmpty()) { - entriesBuilder_.dispose(); - entriesBuilder_ = null; - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000001); - entriesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getEntriesFieldBuilder() : null; - } else { - entriesBuilder_.addAllMessages(other.entries_); - } - } - } - if (!other.getResumeToken().isEmpty()) { - resumeToken_ = other.resumeToken_; - onChanged(); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.ReadLogEntriesResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.ReadLogEntriesResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List entries_ = - java.util.Collections.emptyList(); - private void ensureEntriesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - entries_ = new java.util.ArrayList(entries_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> entriesBuilder_; - - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public java.util.List getEntriesList() { - if (entriesBuilder_ == null) { - return java.util.Collections.unmodifiableList(entries_); - } else { - return entriesBuilder_.getMessageList(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public int getEntriesCount() { - if (entriesBuilder_ == null) { - return entries_.size(); - } else { - return entriesBuilder_.getCount(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); - } else { - return entriesBuilder_.getMessage(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.set(index, value); - onChanged(); - } else { - entriesBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.set(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder addEntries(com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(value); - onChanged(); - } else { - entriesBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(index, value); - onChanged(); - } else { - entriesBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder addEntries( - com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder addAllEntries( - java.lang.Iterable values) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, entries_); - onChanged(); - } else { - entriesBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder clearEntries() { - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - entriesBuilder_.clear(); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public Builder removeEntries(int index) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.remove(index); - onChanged(); - } else { - entriesBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder getEntriesBuilder( - int index) { - return getEntriesFieldBuilder().getBuilder(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); } else { - return entriesBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public java.util.List - getEntriesOrBuilderList() { - if (entriesBuilder_ != null) { - return entriesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(entries_); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder() { - return getEntriesFieldBuilder().addBuilder( - com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder( - int index) { - return getEntriesFieldBuilder().addBuilder( - index, com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-     * A list of log entries. If the list is empty, there are no more entries in
-     * the stream.
-     * 
- */ - public java.util.List - getEntriesBuilderList() { - return getEntriesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> - getEntriesFieldBuilder() { - if (entriesBuilder_ == null) { - entriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder>( - entries_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - entries_ = null; - } - return entriesBuilder_; - } - - private java.lang.Object resumeToken_ = ""; - /** - * optional string resume_token = 2; - * - *
-     * A token to use to resume from this position of the stream.
-     * Note that even if there are no entries, it might still be possible
-     * to continue from this point at some later time.
-     * 
- */ - public java.lang.String getResumeToken() { - java.lang.Object ref = resumeToken_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - resumeToken_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string resume_token = 2; - * - *
-     * A token to use to resume from this position of the stream.
-     * Note that even if there are no entries, it might still be possible
-     * to continue from this point at some later time.
-     * 
- */ - public com.google.protobuf.ByteString - getResumeTokenBytes() { - java.lang.Object ref = resumeToken_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - resumeToken_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string resume_token = 2; - * - *
-     * A token to use to resume from this position of the stream.
-     * Note that even if there are no entries, it might still be possible
-     * to continue from this point at some later time.
-     * 
- */ - public Builder setResumeToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - resumeToken_ = value; - onChanged(); - return this; - } - /** - * optional string resume_token = 2; - * - *
-     * A token to use to resume from this position of the stream.
-     * Note that even if there are no entries, it might still be possible
-     * to continue from this point at some later time.
-     * 
- */ - public Builder clearResumeToken() { - - resumeToken_ = getDefaultInstance().getResumeToken(); - onChanged(); - return this; - } - /** - * optional string resume_token = 2; - * - *
-     * A token to use to resume from this position of the stream.
-     * Note that even if there are no entries, it might still be possible
-     * to continue from this point at some later time.
-     * 
- */ - public Builder setResumeTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - resumeToken_ = value; - onChanged(); - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.ReadLogEntriesResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.ReadLogEntriesResponse) - private static final com.google.logging.v2.ReadLogEntriesResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.ReadLogEntriesResponse(); - } - - public static com.google.logging.v2.ReadLogEntriesResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public ReadLogEntriesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new ReadLogEntriesResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.ReadLogEntriesResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponseOrBuilder.java deleted file mode 100644 index 970a9e6cfa57..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/ReadLogEntriesResponseOrBuilder.java +++ /dev/null @@ -1,80 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface ReadLogEntriesResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.ReadLogEntriesResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - java.util.List - getEntriesList(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - com.google.logging.v2.LogEntry getEntries(int index); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - int getEntriesCount(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - java.util.List - getEntriesOrBuilderList(); - /** - * repeated .google.logging.v2.LogEntry entries = 1; - * - *
-   * A list of log entries. If the list is empty, there are no more entries in
-   * the stream.
-   * 
- */ - com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index); - - /** - * optional string resume_token = 2; - * - *
-   * A token to use to resume from this position of the stream.
-   * Note that even if there are no entries, it might still be possible
-   * to continue from this point at some later time.
-   * 
- */ - java.lang.String getResumeToken(); - /** - * optional string resume_token = 2; - * - *
-   * A token to use to resume from this position of the stream.
-   * Note that even if there are no entries, it might still be possible
-   * to continue from this point at some later time.
-   * 
- */ - com.google.protobuf.ByteString - getResumeTokenBytes(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequest.java deleted file mode 100644 index da942f15c143..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequest.java +++ /dev/null @@ -1,748 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.UpdateLogMetricRequest} - * - *
- * The parameters to UpdateLogMetric.
- * 
- */ -public final class UpdateLogMetricRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.UpdateLogMetricRequest) - UpdateLogMetricRequestOrBuilder { - // Use UpdateLogMetricRequest.newBuilder() to construct. - private UpdateLogMetricRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private UpdateLogMetricRequest() { - metricName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private UpdateLogMetricRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - metricName_ = s; - break; - } - case 18: { - com.google.logging.v2.LogMetric.Builder subBuilder = null; - if (metric_ != null) { - subBuilder = metric_.toBuilder(); - } - metric_ = input.readMessage(com.google.logging.v2.LogMetric.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(metric_); - metric_ = subBuilder.buildPartial(); - } - - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_UpdateLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.UpdateLogMetricRequest.class, com.google.logging.v2.UpdateLogMetricRequest.Builder.class); - } - - public static final int METRIC_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object metricName_; - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to update.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * The updated metric must be provided in the request and have the
-   * same identifier that is specified in `metricName`.
-   * If the metric does not exist, it is created.
-   * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } - } - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to update.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * The updated metric must be provided in the request and have the
-   * same identifier that is specified in `metricName`.
-   * If the metric does not exist, it is created.
-   * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int METRIC_FIELD_NUMBER = 2; - private com.google.logging.v2.LogMetric metric_; - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - public boolean hasMetric() { - return metric_ != null; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - public com.google.logging.v2.LogMetric getMetric() { - return metric_ == null ? com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder() { - return getMetric(); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getMetricNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, metricName_); - } - if (metric_ != null) { - output.writeMessage(2, getMetric()); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getMetricNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, metricName_); - } - if (metric_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getMetric()); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.UpdateLogMetricRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.UpdateLogMetricRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.UpdateLogMetricRequest} - * - *
-   * The parameters to UpdateLogMetric.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.UpdateLogMetricRequest) - com.google.logging.v2.UpdateLogMetricRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_UpdateLogMetricRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.UpdateLogMetricRequest.class, com.google.logging.v2.UpdateLogMetricRequest.Builder.class); - } - - // Construct using com.google.logging.v2.UpdateLogMetricRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - metricName_ = ""; - - if (metricBuilder_ == null) { - metric_ = null; - } else { - metric_ = null; - metricBuilder_ = null; - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingMetrics.internal_static_google_logging_v2_UpdateLogMetricRequest_descriptor; - } - - public com.google.logging.v2.UpdateLogMetricRequest getDefaultInstanceForType() { - return com.google.logging.v2.UpdateLogMetricRequest.getDefaultInstance(); - } - - public com.google.logging.v2.UpdateLogMetricRequest build() { - com.google.logging.v2.UpdateLogMetricRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.UpdateLogMetricRequest buildPartial() { - com.google.logging.v2.UpdateLogMetricRequest result = new com.google.logging.v2.UpdateLogMetricRequest(this); - result.metricName_ = metricName_; - if (metricBuilder_ == null) { - result.metric_ = metric_; - } else { - result.metric_ = metricBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.UpdateLogMetricRequest) { - return mergeFrom((com.google.logging.v2.UpdateLogMetricRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.UpdateLogMetricRequest other) { - if (other == com.google.logging.v2.UpdateLogMetricRequest.getDefaultInstance()) return this; - if (!other.getMetricName().isEmpty()) { - metricName_ = other.metricName_; - onChanged(); - } - if (other.hasMetric()) { - mergeMetric(other.getMetric()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.UpdateLogMetricRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.UpdateLogMetricRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object metricName_ = ""; - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to update.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * The updated metric must be provided in the request and have the
-     * same identifier that is specified in `metricName`.
-     * If the metric does not exist, it is created.
-     * 
- */ - public java.lang.String getMetricName() { - java.lang.Object ref = metricName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metricName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to update.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * The updated metric must be provided in the request and have the
-     * same identifier that is specified in `metricName`.
-     * If the metric does not exist, it is created.
-     * 
- */ - public com.google.protobuf.ByteString - getMetricNameBytes() { - java.lang.Object ref = metricName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metricName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to update.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * The updated metric must be provided in the request and have the
-     * same identifier that is specified in `metricName`.
-     * If the metric does not exist, it is created.
-     * 
- */ - public Builder setMetricName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - metricName_ = value; - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to update.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * The updated metric must be provided in the request and have the
-     * same identifier that is specified in `metricName`.
-     * If the metric does not exist, it is created.
-     * 
- */ - public Builder clearMetricName() { - - metricName_ = getDefaultInstance().getMetricName(); - onChanged(); - return this; - } - /** - * optional string metric_name = 1; - * - *
-     * The resource name of the metric to update.
-     * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-     * The updated metric must be provided in the request and have the
-     * same identifier that is specified in `metricName`.
-     * If the metric does not exist, it is created.
-     * 
- */ - public Builder setMetricNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - metricName_ = value; - onChanged(); - return this; - } - - private com.google.logging.v2.LogMetric metric_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> metricBuilder_; - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public boolean hasMetric() { - return metricBuilder_ != null || metric_ != null; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public com.google.logging.v2.LogMetric getMetric() { - if (metricBuilder_ == null) { - return metric_ == null ? com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } else { - return metricBuilder_.getMessage(); - } - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public Builder setMetric(com.google.logging.v2.LogMetric value) { - if (metricBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - metric_ = value; - onChanged(); - } else { - metricBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public Builder setMetric( - com.google.logging.v2.LogMetric.Builder builderForValue) { - if (metricBuilder_ == null) { - metric_ = builderForValue.build(); - onChanged(); - } else { - metricBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public Builder mergeMetric(com.google.logging.v2.LogMetric value) { - if (metricBuilder_ == null) { - if (metric_ != null) { - metric_ = - com.google.logging.v2.LogMetric.newBuilder(metric_).mergeFrom(value).buildPartial(); - } else { - metric_ = value; - } - onChanged(); - } else { - metricBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public Builder clearMetric() { - if (metricBuilder_ == null) { - metric_ = null; - onChanged(); - } else { - metric_ = null; - metricBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public com.google.logging.v2.LogMetric.Builder getMetricBuilder() { - - onChanged(); - return getMetricFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - public com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder() { - if (metricBuilder_ != null) { - return metricBuilder_.getMessageOrBuilder(); - } else { - return metric_ == null ? - com.google.logging.v2.LogMetric.getDefaultInstance() : metric_; - } - } - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-     * The updated metric, whose name must be the same as the
-     * metric identifier in `metricName`. If `metricName` does not
-     * exist, then a new metric is created.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder> - getMetricFieldBuilder() { - if (metricBuilder_ == null) { - metricBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogMetric, com.google.logging.v2.LogMetric.Builder, com.google.logging.v2.LogMetricOrBuilder>( - getMetric(), - getParentForChildren(), - isClean()); - metric_ = null; - } - return metricBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.UpdateLogMetricRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - private static final com.google.logging.v2.UpdateLogMetricRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.UpdateLogMetricRequest(); - } - - public static com.google.logging.v2.UpdateLogMetricRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public UpdateLogMetricRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new UpdateLogMetricRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.UpdateLogMetricRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequestOrBuilder.java deleted file mode 100644 index 4c203e0f314e..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateLogMetricRequestOrBuilder.java +++ /dev/null @@ -1,66 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_metrics.proto - -package com.google.logging.v2; - -public interface UpdateLogMetricRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.UpdateLogMetricRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to update.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * The updated metric must be provided in the request and have the
-   * same identifier that is specified in `metricName`.
-   * If the metric does not exist, it is created.
-   * 
- */ - java.lang.String getMetricName(); - /** - * optional string metric_name = 1; - * - *
-   * The resource name of the metric to update.
-   * Example: `"projects/my-project-id/metrics/my-metric-id"`.
-   * The updated metric must be provided in the request and have the
-   * same identifier that is specified in `metricName`.
-   * If the metric does not exist, it is created.
-   * 
- */ - com.google.protobuf.ByteString - getMetricNameBytes(); - - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - boolean hasMetric(); - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - com.google.logging.v2.LogMetric getMetric(); - /** - * optional .google.logging.v2.LogMetric metric = 2; - * - *
-   * The updated metric, whose name must be the same as the
-   * metric identifier in `metricName`. If `metricName` does not
-   * exist, then a new metric is created.
-   * 
- */ - com.google.logging.v2.LogMetricOrBuilder getMetricOrBuilder(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequest.java deleted file mode 100644 index 422b598fb9e6..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequest.java +++ /dev/null @@ -1,748 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.UpdateSinkRequest} - * - *
- * The parameters to `UpdateSink`.
- * 
- */ -public final class UpdateSinkRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.UpdateSinkRequest) - UpdateSinkRequestOrBuilder { - // Use UpdateSinkRequest.newBuilder() to construct. - private UpdateSinkRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private UpdateSinkRequest() { - sinkName_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private UpdateSinkRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - sinkName_ = s; - break; - } - case 18: { - com.google.logging.v2.LogSink.Builder subBuilder = null; - if (sink_ != null) { - subBuilder = sink_.toBuilder(); - } - sink_ = input.readMessage(com.google.logging.v2.LogSink.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(sink_); - sink_ = subBuilder.buildPartial(); - } - - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_UpdateSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.UpdateSinkRequest.class, com.google.logging.v2.UpdateSinkRequest.Builder.class); - } - - public static final int SINK_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object sinkName_; - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to update.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * The updated sink must be provided in the request and have the
-   * same name that is specified in `sinkName`.  If the sink does not
-   * exist, it is created.
-   * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } - } - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to update.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * The updated sink must be provided in the request and have the
-   * same name that is specified in `sinkName`.  If the sink does not
-   * exist, it is created.
-   * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int SINK_FIELD_NUMBER = 2; - private com.google.logging.v2.LogSink sink_; - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - public boolean hasSink() { - return sink_ != null; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - public com.google.logging.v2.LogSink getSink() { - return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() { - return getSink(); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getSinkNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, sinkName_); - } - if (sink_ != null) { - output.writeMessage(2, getSink()); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getSinkNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, sinkName_); - } - if (sink_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getSink()); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.UpdateSinkRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.UpdateSinkRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.UpdateSinkRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.UpdateSinkRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.UpdateSinkRequest} - * - *
-   * The parameters to `UpdateSink`.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.UpdateSinkRequest) - com.google.logging.v2.UpdateSinkRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_UpdateSinkRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_UpdateSinkRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.UpdateSinkRequest.class, com.google.logging.v2.UpdateSinkRequest.Builder.class); - } - - // Construct using com.google.logging.v2.UpdateSinkRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - sinkName_ = ""; - - if (sinkBuilder_ == null) { - sink_ = null; - } else { - sink_ = null; - sinkBuilder_ = null; - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingConfig.internal_static_google_logging_v2_UpdateSinkRequest_descriptor; - } - - public com.google.logging.v2.UpdateSinkRequest getDefaultInstanceForType() { - return com.google.logging.v2.UpdateSinkRequest.getDefaultInstance(); - } - - public com.google.logging.v2.UpdateSinkRequest build() { - com.google.logging.v2.UpdateSinkRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.UpdateSinkRequest buildPartial() { - com.google.logging.v2.UpdateSinkRequest result = new com.google.logging.v2.UpdateSinkRequest(this); - result.sinkName_ = sinkName_; - if (sinkBuilder_ == null) { - result.sink_ = sink_; - } else { - result.sink_ = sinkBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.UpdateSinkRequest) { - return mergeFrom((com.google.logging.v2.UpdateSinkRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.UpdateSinkRequest other) { - if (other == com.google.logging.v2.UpdateSinkRequest.getDefaultInstance()) return this; - if (!other.getSinkName().isEmpty()) { - sinkName_ = other.sinkName_; - onChanged(); - } - if (other.hasSink()) { - mergeSink(other.getSink()); - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.UpdateSinkRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.UpdateSinkRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private java.lang.Object sinkName_ = ""; - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to update.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * The updated sink must be provided in the request and have the
-     * same name that is specified in `sinkName`.  If the sink does not
-     * exist, it is created.
-     * 
- */ - public java.lang.String getSinkName() { - java.lang.Object ref = sinkName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - sinkName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to update.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * The updated sink must be provided in the request and have the
-     * same name that is specified in `sinkName`.  If the sink does not
-     * exist, it is created.
-     * 
- */ - public com.google.protobuf.ByteString - getSinkNameBytes() { - java.lang.Object ref = sinkName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - sinkName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to update.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * The updated sink must be provided in the request and have the
-     * same name that is specified in `sinkName`.  If the sink does not
-     * exist, it is created.
-     * 
- */ - public Builder setSinkName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - sinkName_ = value; - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to update.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * The updated sink must be provided in the request and have the
-     * same name that is specified in `sinkName`.  If the sink does not
-     * exist, it is created.
-     * 
- */ - public Builder clearSinkName() { - - sinkName_ = getDefaultInstance().getSinkName(); - onChanged(); - return this; - } - /** - * optional string sink_name = 1; - * - *
-     * The resource name of the sink to update.
-     * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-     * The updated sink must be provided in the request and have the
-     * same name that is specified in `sinkName`.  If the sink does not
-     * exist, it is created.
-     * 
- */ - public Builder setSinkNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - sinkName_ = value; - onChanged(); - return this; - } - - private com.google.logging.v2.LogSink sink_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> sinkBuilder_; - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public boolean hasSink() { - return sinkBuilder_ != null || sink_ != null; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public com.google.logging.v2.LogSink getSink() { - if (sinkBuilder_ == null) { - return sink_ == null ? com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } else { - return sinkBuilder_.getMessage(); - } - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public Builder setSink(com.google.logging.v2.LogSink value) { - if (sinkBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - sink_ = value; - onChanged(); - } else { - sinkBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public Builder setSink( - com.google.logging.v2.LogSink.Builder builderForValue) { - if (sinkBuilder_ == null) { - sink_ = builderForValue.build(); - onChanged(); - } else { - sinkBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public Builder mergeSink(com.google.logging.v2.LogSink value) { - if (sinkBuilder_ == null) { - if (sink_ != null) { - sink_ = - com.google.logging.v2.LogSink.newBuilder(sink_).mergeFrom(value).buildPartial(); - } else { - sink_ = value; - } - onChanged(); - } else { - sinkBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public Builder clearSink() { - if (sinkBuilder_ == null) { - sink_ = null; - onChanged(); - } else { - sink_ = null; - sinkBuilder_ = null; - } - - return this; - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public com.google.logging.v2.LogSink.Builder getSinkBuilder() { - - onChanged(); - return getSinkFieldBuilder().getBuilder(); - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - public com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder() { - if (sinkBuilder_ != null) { - return sinkBuilder_.getMessageOrBuilder(); - } else { - return sink_ == null ? - com.google.logging.v2.LogSink.getDefaultInstance() : sink_; - } - } - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-     * The updated sink, whose name must be the same as the sink
-     * identifier in `sinkName`.  If `sinkName` does not exist, then
-     * this method creates a new sink.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder> - getSinkFieldBuilder() { - if (sinkBuilder_ == null) { - sinkBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.logging.v2.LogSink, com.google.logging.v2.LogSink.Builder, com.google.logging.v2.LogSinkOrBuilder>( - getSink(), - getParentForChildren(), - isClean()); - sink_ = null; - } - return sinkBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.UpdateSinkRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - private static final com.google.logging.v2.UpdateSinkRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.UpdateSinkRequest(); - } - - public static com.google.logging.v2.UpdateSinkRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public UpdateSinkRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new UpdateSinkRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.UpdateSinkRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequestOrBuilder.java deleted file mode 100644 index 665ec44341f9..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/UpdateSinkRequestOrBuilder.java +++ /dev/null @@ -1,66 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging_config.proto - -package com.google.logging.v2; - -public interface UpdateSinkRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.UpdateSinkRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to update.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * The updated sink must be provided in the request and have the
-   * same name that is specified in `sinkName`.  If the sink does not
-   * exist, it is created.
-   * 
- */ - java.lang.String getSinkName(); - /** - * optional string sink_name = 1; - * - *
-   * The resource name of the sink to update.
-   * Example: `"projects/my-project-id/sinks/my-sink-id"`.
-   * The updated sink must be provided in the request and have the
-   * same name that is specified in `sinkName`.  If the sink does not
-   * exist, it is created.
-   * 
- */ - com.google.protobuf.ByteString - getSinkNameBytes(); - - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - boolean hasSink(); - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - com.google.logging.v2.LogSink getSink(); - /** - * optional .google.logging.v2.LogSink sink = 2; - * - *
-   * The updated sink, whose name must be the same as the sink
-   * identifier in `sinkName`.  If `sinkName` does not exist, then
-   * this method creates a new sink.
-   * 
- */ - com.google.logging.v2.LogSinkOrBuilder getSinkOrBuilder(); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequest.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequest.java deleted file mode 100644 index 15abf64119ce..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequest.java +++ /dev/null @@ -1,1356 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.WriteLogEntriesRequest} - * - *
- * The parameters to WriteLogEntries.
- * 
- */ -public final class WriteLogEntriesRequest extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.WriteLogEntriesRequest) - WriteLogEntriesRequestOrBuilder { - // Use WriteLogEntriesRequest.newBuilder() to construct. - private WriteLogEntriesRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private WriteLogEntriesRequest() { - logName_ = ""; - entries_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private WriteLogEntriesRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - int mutable_bitField0_ = 0; - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - case 10: { - String s = input.readStringRequireUtf8(); - - logName_ = s; - break; - } - case 18: { - com.google.api.MonitoredResource.Builder subBuilder = null; - if (resource_ != null) { - subBuilder = resource_.toBuilder(); - } - resource_ = input.readMessage(com.google.api.MonitoredResource.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(resource_); - resource_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { - if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { - labels_ = com.google.protobuf.MapField.newMapField( - LabelsDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000004; - } - com.google.protobuf.MapEntry - labels = input.readMessage( - LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); - labels_.getMutableMap().put(labels.getKey(), labels.getValue()); - break; - } - case 34: { - if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - entries_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000008; - } - entries_.add(input.readMessage(com.google.logging.v2.LogEntry.parser(), extensionRegistry)); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - } - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 3: - return internalGetLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.WriteLogEntriesRequest.class, com.google.logging.v2.WriteLogEntriesRequest.Builder.class); - } - - private int bitField0_; - public static final int LOG_NAME_FIELD_NUMBER = 1; - private volatile java.lang.Object logName_; - /** - * optional string log_name = 1; - * - *
-   * Optional. A default log resource name for those log entries in `entries`
-   * that do not specify their own `logName`.  Example:
-   * `"projects/my-project/logs/syslog"`.  See
-   * [LogEntry][google.logging.v2.LogEntry].
-   * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } - } - /** - * optional string log_name = 1; - * - *
-   * Optional. A default log resource name for those log entries in `entries`
-   * that do not specify their own `logName`.  Example:
-   * `"projects/my-project/logs/syslog"`.  See
-   * [LogEntry][google.logging.v2.LogEntry].
-   * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int RESOURCE_FIELD_NUMBER = 2; - private com.google.api.MonitoredResource resource_; - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - public boolean hasResource() { - return resource_ != null; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - public com.google.api.MonitoredResource getResource() { - return resource_ == null ? com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - public com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder() { - return getResource(); - } - - public static final int LABELS_FIELD_NUMBER = 3; - private static final class LabelsDefaultEntryHolder { - static final com.google.protobuf.MapEntry< - java.lang.String, java.lang.String> defaultEntry = - com.google.protobuf.MapEntry - .newDefaultInstance( - com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_LabelsEntry_descriptor, - com.google.protobuf.WireFormat.FieldType.STRING, - "", - com.google.protobuf.WireFormat.FieldType.STRING, - ""); - } - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> labels_; - private com.google.protobuf.MapField - internalGetLabels() { - if (labels_ == null) { - return com.google.protobuf.MapField.emptyMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - return labels_; - } - /** - * map<string, string> labels = 3; - * - *
-   * Optional. User-defined `key:value` items that are added to
-   * the `labels` field of each log entry in `entries`, except when a log
-   * entry specifies its own 'key:value' item with the same key.
-   * Example: `{ "size": "large", "color":"red" }`
-   * 
- */ - - public java.util.Map getLabels() { - return internalGetLabels().getMap(); - } - - public static final int ENTRIES_FIELD_NUMBER = 4; - private java.util.List entries_; - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - public java.util.List getEntriesList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - public java.util.List - getEntriesOrBuilderList() { - return entries_; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - public int getEntriesCount() { - return entries_.size(); - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - return entries_.get(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - return entries_.get(index); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (!getLogNameBytes().isEmpty()) { - com.google.protobuf.GeneratedMessage.writeString(output, 1, logName_); - } - if (resource_ != null) { - output.writeMessage(2, getResource()); - } - for (java.util.Map.Entry entry - : internalGetLabels().getMap().entrySet()) { - com.google.protobuf.MapEntry - labels = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() - .setKey(entry.getKey()) - .setValue(entry.getValue()) - .build(); - output.writeMessage(3, labels); - } - for (int i = 0; i < entries_.size(); i++) { - output.writeMessage(4, entries_.get(i)); - } - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (!getLogNameBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(1, logName_); - } - if (resource_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getResource()); - } - for (java.util.Map.Entry entry - : internalGetLabels().getMap().entrySet()) { - com.google.protobuf.MapEntry - labels = LabelsDefaultEntryHolder.defaultEntry.newBuilderForType() - .setKey(entry.getKey()) - .setValue(entry.getValue()) - .build(); - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, labels); - } - for (int i = 0; i < entries_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, entries_.get(i)); - } - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.WriteLogEntriesRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.WriteLogEntriesRequest} - * - *
-   * The parameters to WriteLogEntries.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.WriteLogEntriesRequest) - com.google.logging.v2.WriteLogEntriesRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 3: - return internalGetLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField( - int number) { - switch (number) { - case 3: - return internalGetMutableLabels(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.WriteLogEntriesRequest.class, com.google.logging.v2.WriteLogEntriesRequest.Builder.class); - } - - // Construct using com.google.logging.v2.WriteLogEntriesRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getEntriesFieldBuilder(); - } - } - public Builder clear() { - super.clear(); - logName_ = ""; - - if (resourceBuilder_ == null) { - resource_ = null; - } else { - resource_ = null; - resourceBuilder_ = null; - } - internalGetMutableLabels().clear(); - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - } else { - entriesBuilder_.clear(); - } - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesRequest_descriptor; - } - - public com.google.logging.v2.WriteLogEntriesRequest getDefaultInstanceForType() { - return com.google.logging.v2.WriteLogEntriesRequest.getDefaultInstance(); - } - - public com.google.logging.v2.WriteLogEntriesRequest build() { - com.google.logging.v2.WriteLogEntriesRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.WriteLogEntriesRequest buildPartial() { - com.google.logging.v2.WriteLogEntriesRequest result = new com.google.logging.v2.WriteLogEntriesRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - result.logName_ = logName_; - if (resourceBuilder_ == null) { - result.resource_ = resource_; - } else { - result.resource_ = resourceBuilder_.build(); - } - result.labels_ = internalGetLabels(); - result.labels_.makeImmutable(); - if (entriesBuilder_ == null) { - if (((bitField0_ & 0x00000008) == 0x00000008)) { - entries_ = java.util.Collections.unmodifiableList(entries_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.entries_ = entries_; - } else { - result.entries_ = entriesBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.WriteLogEntriesRequest) { - return mergeFrom((com.google.logging.v2.WriteLogEntriesRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.WriteLogEntriesRequest other) { - if (other == com.google.logging.v2.WriteLogEntriesRequest.getDefaultInstance()) return this; - if (!other.getLogName().isEmpty()) { - logName_ = other.logName_; - onChanged(); - } - if (other.hasResource()) { - mergeResource(other.getResource()); - } - internalGetMutableLabels().mergeFrom( - other.internalGetLabels()); - if (entriesBuilder_ == null) { - if (!other.entries_.isEmpty()) { - if (entries_.isEmpty()) { - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureEntriesIsMutable(); - entries_.addAll(other.entries_); - } - onChanged(); - } - } else { - if (!other.entries_.isEmpty()) { - if (entriesBuilder_.isEmpty()) { - entriesBuilder_.dispose(); - entriesBuilder_ = null; - entries_ = other.entries_; - bitField0_ = (bitField0_ & ~0x00000008); - entriesBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getEntriesFieldBuilder() : null; - } else { - entriesBuilder_.addAllMessages(other.entries_); - } - } - } - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.WriteLogEntriesRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.WriteLogEntriesRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.lang.Object logName_ = ""; - /** - * optional string log_name = 1; - * - *
-     * Optional. A default log resource name for those log entries in `entries`
-     * that do not specify their own `logName`.  Example:
-     * `"projects/my-project/logs/syslog"`.  See
-     * [LogEntry][google.logging.v2.LogEntry].
-     * 
- */ - public java.lang.String getLogName() { - java.lang.Object ref = logName_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - logName_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string log_name = 1; - * - *
-     * Optional. A default log resource name for those log entries in `entries`
-     * that do not specify their own `logName`.  Example:
-     * `"projects/my-project/logs/syslog"`.  See
-     * [LogEntry][google.logging.v2.LogEntry].
-     * 
- */ - public com.google.protobuf.ByteString - getLogNameBytes() { - java.lang.Object ref = logName_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - logName_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string log_name = 1; - * - *
-     * Optional. A default log resource name for those log entries in `entries`
-     * that do not specify their own `logName`.  Example:
-     * `"projects/my-project/logs/syslog"`.  See
-     * [LogEntry][google.logging.v2.LogEntry].
-     * 
- */ - public Builder setLogName( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - logName_ = value; - onChanged(); - return this; - } - /** - * optional string log_name = 1; - * - *
-     * Optional. A default log resource name for those log entries in `entries`
-     * that do not specify their own `logName`.  Example:
-     * `"projects/my-project/logs/syslog"`.  See
-     * [LogEntry][google.logging.v2.LogEntry].
-     * 
- */ - public Builder clearLogName() { - - logName_ = getDefaultInstance().getLogName(); - onChanged(); - return this; - } - /** - * optional string log_name = 1; - * - *
-     * Optional. A default log resource name for those log entries in `entries`
-     * that do not specify their own `logName`.  Example:
-     * `"projects/my-project/logs/syslog"`.  See
-     * [LogEntry][google.logging.v2.LogEntry].
-     * 
- */ - public Builder setLogNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - logName_ = value; - onChanged(); - return this; - } - - private com.google.api.MonitoredResource resource_ = null; - private com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> resourceBuilder_; - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public boolean hasResource() { - return resourceBuilder_ != null || resource_ != null; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public com.google.api.MonitoredResource getResource() { - if (resourceBuilder_ == null) { - return resource_ == null ? com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } else { - return resourceBuilder_.getMessage(); - } - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public Builder setResource(com.google.api.MonitoredResource value) { - if (resourceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - resource_ = value; - onChanged(); - } else { - resourceBuilder_.setMessage(value); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public Builder setResource( - com.google.api.MonitoredResource.Builder builderForValue) { - if (resourceBuilder_ == null) { - resource_ = builderForValue.build(); - onChanged(); - } else { - resourceBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public Builder mergeResource(com.google.api.MonitoredResource value) { - if (resourceBuilder_ == null) { - if (resource_ != null) { - resource_ = - com.google.api.MonitoredResource.newBuilder(resource_).mergeFrom(value).buildPartial(); - } else { - resource_ = value; - } - onChanged(); - } else { - resourceBuilder_.mergeFrom(value); - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public Builder clearResource() { - if (resourceBuilder_ == null) { - resource_ = null; - onChanged(); - } else { - resource_ = null; - resourceBuilder_ = null; - } - - return this; - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public com.google.api.MonitoredResource.Builder getResourceBuilder() { - - onChanged(); - return getResourceFieldBuilder().getBuilder(); - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - public com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder() { - if (resourceBuilder_ != null) { - return resourceBuilder_.getMessageOrBuilder(); - } else { - return resource_ == null ? - com.google.api.MonitoredResource.getDefaultInstance() : resource_; - } - } - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-     * Optional. A default monitored resource for those log entries in `entries`
-     * that do not specify their own `resource`.
-     * 
- */ - private com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder> - getResourceFieldBuilder() { - if (resourceBuilder_ == null) { - resourceBuilder_ = new com.google.protobuf.SingleFieldBuilder< - com.google.api.MonitoredResource, com.google.api.MonitoredResource.Builder, com.google.api.MonitoredResourceOrBuilder>( - getResource(), - getParentForChildren(), - isClean()); - resource_ = null; - } - return resourceBuilder_; - } - - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> labels_; - private com.google.protobuf.MapField - internalGetLabels() { - if (labels_ == null) { - return com.google.protobuf.MapField.emptyMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - return labels_; - } - private com.google.protobuf.MapField - internalGetMutableLabels() { - onChanged();; - if (labels_ == null) { - labels_ = com.google.protobuf.MapField.newMapField( - LabelsDefaultEntryHolder.defaultEntry); - } - if (!labels_.isMutable()) { - labels_ = labels_.copy(); - } - return labels_; - } - /** - * map<string, string> labels = 3; - * - *
-     * Optional. User-defined `key:value` items that are added to
-     * the `labels` field of each log entry in `entries`, except when a log
-     * entry specifies its own 'key:value' item with the same key.
-     * Example: `{ "size": "large", "color":"red" }`
-     * 
- */ - public java.util.Map getLabels() { - return internalGetLabels().getMap(); - } - /** - * map<string, string> labels = 3; - * - *
-     * Optional. User-defined `key:value` items that are added to
-     * the `labels` field of each log entry in `entries`, except when a log
-     * entry specifies its own 'key:value' item with the same key.
-     * Example: `{ "size": "large", "color":"red" }`
-     * 
- */ - public java.util.Map - getMutableLabels() { - return internalGetMutableLabels().getMutableMap(); - } - /** - * map<string, string> labels = 3; - * - *
-     * Optional. User-defined `key:value` items that are added to
-     * the `labels` field of each log entry in `entries`, except when a log
-     * entry specifies its own 'key:value' item with the same key.
-     * Example: `{ "size": "large", "color":"red" }`
-     * 
- */ - public Builder putAllLabels( - java.util.Map values) { - getMutableLabels().putAll(values); - return this; - } - - private java.util.List entries_ = - java.util.Collections.emptyList(); - private void ensureEntriesIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - entries_ = new java.util.ArrayList(entries_); - bitField0_ |= 0x00000008; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> entriesBuilder_; - - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public java.util.List getEntriesList() { - if (entriesBuilder_ == null) { - return java.util.Collections.unmodifiableList(entries_); - } else { - return entriesBuilder_.getMessageList(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public int getEntriesCount() { - if (entriesBuilder_ == null) { - return entries_.size(); - } else { - return entriesBuilder_.getCount(); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public com.google.logging.v2.LogEntry getEntries(int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); - } else { - return entriesBuilder_.getMessage(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.set(index, value); - onChanged(); - } else { - entriesBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder setEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.set(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder addEntries(com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(value); - onChanged(); - } else { - entriesBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry value) { - if (entriesBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureEntriesIsMutable(); - entries_.add(index, value); - onChanged(); - } else { - entriesBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder addEntries( - com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder addEntries( - int index, com.google.logging.v2.LogEntry.Builder builderForValue) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.add(index, builderForValue.build()); - onChanged(); - } else { - entriesBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder addAllEntries( - java.lang.Iterable values) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, entries_); - onChanged(); - } else { - entriesBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder clearEntries() { - if (entriesBuilder_ == null) { - entries_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - } else { - entriesBuilder_.clear(); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public Builder removeEntries(int index) { - if (entriesBuilder_ == null) { - ensureEntriesIsMutable(); - entries_.remove(index); - onChanged(); - } else { - entriesBuilder_.remove(index); - } - return this; - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder getEntriesBuilder( - int index) { - return getEntriesFieldBuilder().getBuilder(index); - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index) { - if (entriesBuilder_ == null) { - return entries_.get(index); } else { - return entriesBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public java.util.List - getEntriesOrBuilderList() { - if (entriesBuilder_ != null) { - return entriesBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(entries_); - } - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder() { - return getEntriesFieldBuilder().addBuilder( - com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public com.google.logging.v2.LogEntry.Builder addEntriesBuilder( - int index) { - return getEntriesFieldBuilder().addBuilder( - index, com.google.logging.v2.LogEntry.getDefaultInstance()); - } - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-     * Required. The log entries to write. The log entries must have values for
-     * all required fields.
-     * 
- */ - public java.util.List - getEntriesBuilderList() { - return getEntriesFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder> - getEntriesFieldBuilder() { - if (entriesBuilder_ == null) { - entriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - com.google.logging.v2.LogEntry, com.google.logging.v2.LogEntry.Builder, com.google.logging.v2.LogEntryOrBuilder>( - entries_, - ((bitField0_ & 0x00000008) == 0x00000008), - getParentForChildren(), - isClean()); - entries_ = null; - } - return entriesBuilder_; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.WriteLogEntriesRequest) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - private static final com.google.logging.v2.WriteLogEntriesRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.WriteLogEntriesRequest(); - } - - public static com.google.logging.v2.WriteLogEntriesRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public WriteLogEntriesRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new WriteLogEntriesRequest(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.WriteLogEntriesRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequestOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequestOrBuilder.java deleted file mode 100644 index 015655a8c602..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesRequestOrBuilder.java +++ /dev/null @@ -1,123 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface WriteLogEntriesRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.WriteLogEntriesRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * optional string log_name = 1; - * - *
-   * Optional. A default log resource name for those log entries in `entries`
-   * that do not specify their own `logName`.  Example:
-   * `"projects/my-project/logs/syslog"`.  See
-   * [LogEntry][google.logging.v2.LogEntry].
-   * 
- */ - java.lang.String getLogName(); - /** - * optional string log_name = 1; - * - *
-   * Optional. A default log resource name for those log entries in `entries`
-   * that do not specify their own `logName`.  Example:
-   * `"projects/my-project/logs/syslog"`.  See
-   * [LogEntry][google.logging.v2.LogEntry].
-   * 
- */ - com.google.protobuf.ByteString - getLogNameBytes(); - - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - boolean hasResource(); - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - com.google.api.MonitoredResource getResource(); - /** - * optional .google.api.MonitoredResource resource = 2; - * - *
-   * Optional. A default monitored resource for those log entries in `entries`
-   * that do not specify their own `resource`.
-   * 
- */ - com.google.api.MonitoredResourceOrBuilder getResourceOrBuilder(); - - /** - * map<string, string> labels = 3; - * - *
-   * Optional. User-defined `key:value` items that are added to
-   * the `labels` field of each log entry in `entries`, except when a log
-   * entry specifies its own 'key:value' item with the same key.
-   * Example: `{ "size": "large", "color":"red" }`
-   * 
- */ - java.util.Map - getLabels(); - - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - java.util.List - getEntriesList(); - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - com.google.logging.v2.LogEntry getEntries(int index); - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - int getEntriesCount(); - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - java.util.List - getEntriesOrBuilderList(); - /** - * repeated .google.logging.v2.LogEntry entries = 4; - * - *
-   * Required. The log entries to write. The log entries must have values for
-   * all required fields.
-   * 
- */ - com.google.logging.v2.LogEntryOrBuilder getEntriesOrBuilder( - int index); -} diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponse.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponse.java deleted file mode 100644 index 7e587c2f085c..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponse.java +++ /dev/null @@ -1,324 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -/** - * Protobuf type {@code google.logging.v2.WriteLogEntriesResponse} - * - *
- * Result returned from WriteLogEntries.
- * 
- */ -public final class WriteLogEntriesResponse extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:google.logging.v2.WriteLogEntriesResponse) - WriteLogEntriesResponseOrBuilder { - // Use WriteLogEntriesResponse.newBuilder() to construct. - private WriteLogEntriesResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private WriteLogEntriesResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); - } - private WriteLogEntriesResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) { - this(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!input.skipField(tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw new RuntimeException(e.setUnfinishedMessage(this)); - } catch (java.io.IOException e) { - throw new RuntimeException( - new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this)); - } finally { - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.WriteLogEntriesResponse.class, com.google.logging.v2.WriteLogEntriesResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - } - - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - memoizedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static com.google.logging.v2.WriteLogEntriesResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(com.google.logging.v2.WriteLogEntriesResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code google.logging.v2.WriteLogEntriesResponse} - * - *
-   * Result returned from WriteLogEntries.
-   * 
- */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:google.logging.v2.WriteLogEntriesResponse) - com.google.logging.v2.WriteLogEntriesResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - com.google.logging.v2.WriteLogEntriesResponse.class, com.google.logging.v2.WriteLogEntriesResponse.Builder.class); - } - - // Construct using com.google.logging.v2.WriteLogEntriesResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - public Builder clear() { - super.clear(); - return this; - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return com.google.logging.v2.LoggingProto.internal_static_google_logging_v2_WriteLogEntriesResponse_descriptor; - } - - public com.google.logging.v2.WriteLogEntriesResponse getDefaultInstanceForType() { - return com.google.logging.v2.WriteLogEntriesResponse.getDefaultInstance(); - } - - public com.google.logging.v2.WriteLogEntriesResponse build() { - com.google.logging.v2.WriteLogEntriesResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public com.google.logging.v2.WriteLogEntriesResponse buildPartial() { - com.google.logging.v2.WriteLogEntriesResponse result = new com.google.logging.v2.WriteLogEntriesResponse(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof com.google.logging.v2.WriteLogEntriesResponse) { - return mergeFrom((com.google.logging.v2.WriteLogEntriesResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(com.google.logging.v2.WriteLogEntriesResponse other) { - if (other == com.google.logging.v2.WriteLogEntriesResponse.getDefaultInstance()) return this; - onChanged(); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.logging.v2.WriteLogEntriesResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (com.google.logging.v2.WriteLogEntriesResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return this; - } - - - // @@protoc_insertion_point(builder_scope:google.logging.v2.WriteLogEntriesResponse) - } - - // @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - private static final com.google.logging.v2.WriteLogEntriesResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new com.google.logging.v2.WriteLogEntriesResponse(); - } - - public static com.google.logging.v2.WriteLogEntriesResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - public WriteLogEntriesResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - try { - return new WriteLogEntriesResponse(input, extensionRegistry); - } catch (RuntimeException e) { - if (e.getCause() instanceof - com.google.protobuf.InvalidProtocolBufferException) { - throw (com.google.protobuf.InvalidProtocolBufferException) - e.getCause(); - } - throw e; - } - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - public com.google.logging.v2.WriteLogEntriesResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponseOrBuilder.java b/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponseOrBuilder.java deleted file mode 100644 index f034c806d8a3..000000000000 --- a/gcloud-java-logging/generated/src/main/java/com/google/logging/v2/WriteLogEntriesResponseOrBuilder.java +++ /dev/null @@ -1,9 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: google/logging/v2/logging.proto - -package com.google.logging.v2; - -public interface WriteLogEntriesResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:google.logging.v2.WriteLogEntriesResponse) - com.google.protobuf.MessageOrBuilder { -} diff --git a/gcloud-java-logging/pom.xml b/gcloud-java-logging/pom.xml index f8ddd17e8083..745a2a2f394e 100644 --- a/gcloud-java-logging/pom.xml +++ b/gcloud-java-logging/pom.xml @@ -11,16 +11,31 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.4 gcloud-java-logging - ${project.groupId} - gcloud-java-gax - ${project.version} + com.google.api + gax + 0.0.4 + + + com.google.api.grpc + grpc-core-proto + 0.0.2 + + + com.google.api.grpc + grpc-logging-type + 0.0.1 + + + com.google.api.grpc + grpc-logging-v2 + 0.0.1 io.grpc diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java index 51bbbc34f2a6..5cd5202961ab 100644 --- a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Api.java @@ -30,9 +30,12 @@ * * Happy editing! */ + package com.google.gcloud.logging.spi.v2; -import com.google.logging.v2.ConfigServiceV2Grpc; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; import com.google.logging.v2.CreateSinkRequest; import com.google.logging.v2.DeleteSinkRequest; import com.google.logging.v2.GetSinkRequest; @@ -41,115 +44,124 @@ import com.google.logging.v2.LogSink; import com.google.logging.v2.UpdateSinkRequest; import com.google.protobuf.Empty; -import io.gapi.gax.grpc.ApiCallable; -import io.gapi.gax.grpc.PageDescriptor; -import io.gapi.gax.grpc.ServiceApiSettings; -import io.gapi.gax.internal.ApiUtils; -import io.gapi.gax.protobuf.PathTemplate; import io.grpc.ManagedChannel; +import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; // Manually-added imports: add custom (non-generated) imports after this point. - - // AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. /** - * See //google/logging/v2/logging.proto for documentation + * Service Description: See src/api/google/logging/v2/logging.proto for documentation * * * */ -@javax.annotation.Generated("by API code generation") +@javax.annotation.Generated("by GAPIC") public class ConfigServiceV2Api implements AutoCloseable { - // ========= - // Constants - // ========= - - /** - * The default address of the service. - * - * - * - */ - public static final String SERVICE_ADDRESS = "logging.googleapis.com"; - - /** - * The default port of the service. - * - * - * - */ - public static final int DEFAULT_SERVICE_PORT = 443; - - - private static final ApiCallable - LIST_SINKS = ApiCallable.create(ConfigServiceV2Grpc.METHOD_LIST_SINKS); - private static final ApiCallable - GET_SINK = ApiCallable.create(ConfigServiceV2Grpc.METHOD_GET_SINK); - private static final ApiCallable - CREATE_SINK = ApiCallable.create(ConfigServiceV2Grpc.METHOD_CREATE_SINK); - private static final ApiCallable - UPDATE_SINK = ApiCallable.create(ConfigServiceV2Grpc.METHOD_UPDATE_SINK); - private static final ApiCallable - DELETE_SINK = ApiCallable.create(ConfigServiceV2Grpc.METHOD_DELETE_SINK); - - private static PageDescriptor LIST_SINKS_PAGE_DESC = - new PageDescriptor() { - @Override - public Object emptyToken() { - return ""; - } - @Override - public ListSinksRequest injectToken( - ListSinksRequest payload, Object token) { - return ListSinksRequest - .newBuilder(payload) - .setPageToken((String) token) - .build(); - } - @Override - public Object extractNextToken(ListSinksResponse payload) { - return payload.getNextPageToken(); - } - @Override - public Iterable extractResources(ListSinksResponse payload) { - return payload.getSinksList(); - } - }; - - private static String ALL_SCOPES[] = { - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/logging.admin" - }; - - /** - * A PathTemplate representing the fully-qualified path to represent - * a project_name resource. - * - * - * - */ - private static final PathTemplate PROJECT_NAME_PATH_TEMPLATE = - PathTemplate.create("/projects/{project}"); - /** - * A PathTemplate representing the fully-qualified path to represent - * a sink_name resource. - * - * - * - */ - private static final PathTemplate SINK_NAME_PATH_TEMPLATE = - PathTemplate.create("/projects/{project}/sinks/{sink}"); + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a project resource. + * + * + * + */ + private static final PathTemplate PROJECT_PATH_TEMPLATE = + PathTemplate.create("projects/{project}"); + + /** + * A PathTemplate representing the fully-qualified path to represent + * a sink resource. + * + * + * + */ + private static final PathTemplate SINK_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/sinks/{sink}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a project resource. + * + * + * + */ + public static final String formatProjectPath(String project) { + return PROJECT_PATH_TEMPLATE.instantiate("project", project); + } + + /** + * Formats a string containing the fully-qualified path to represent + * a sink resource. + * + * + * + */ + public static final String formatSinkPath(String project, String sink) { + return SINK_PATH_TEMPLATE.instantiate("project", project, "sink", sink); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a project resource. + * + * + * + */ + public static final String parseProjectFromProjectPath(String projectPath) { + return PROJECT_PATH_TEMPLATE.parse(projectPath).get("project"); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a sink resource. + * + * + * + */ + public static final String parseProjectFromSinkPath(String sinkPath) { + return SINK_PATH_TEMPLATE.parse(sinkPath).get("project"); + } + + /** + * Parses the sink from the given fully-qualified path which + * represents a sink resource. + * + * + * + */ + public static final String parseSinkFromSinkPath(String sinkPath) { + return SINK_PATH_TEMPLATE.parse(sinkPath).get("sink"); + } + } // ======== // Members // ======== private final ManagedChannel channel; - private final ServiceApiSettings settings; + private final List closeables = new ArrayList<>(); + + private final ApiCallable listSinksCallable; + private final ApiCallable> listSinksIterableCallable; + private final ApiCallable getSinkCallable; + private final ApiCallable createSinkCallable; + private final ApiCallable updateSinkCallable; + private final ApiCallable deleteSinkCallable; // =============== // Factory Methods @@ -162,7 +174,7 @@ public Iterable extractResources(ListSinksResponse payload) { * */ public static ConfigServiceV2Api create() throws IOException { - return create(new ServiceApiSettings()); + return create(ConfigServiceV2Settings.create()); } /** @@ -172,7 +184,7 @@ public static ConfigServiceV2Api create() throws IOException { * * */ - public static ConfigServiceV2Api create(ServiceApiSettings settings) throws IOException { + public static ConfigServiceV2Api create(ConfigServiceV2Settings settings) throws IOException { return new ConfigServiceV2Api(settings); } @@ -183,76 +195,25 @@ public static ConfigServiceV2Api create(ServiceApiSettings settings) throws IOEx * * */ - protected ConfigServiceV2Api(ServiceApiSettings settings) throws IOException { - ServiceApiSettings internalSettings = ApiUtils.populateSettings(settings, - SERVICE_ADDRESS, DEFAULT_SERVICE_PORT, ALL_SCOPES); - this.settings = internalSettings; - this.channel = internalSettings.getChannel(); - } - - // ============================== - // Resource Name Helper Functions - // ============================== - - /** - * Creates a string containing the fully-qualified path to represent - * a project_name resource. - * - * - * - */ - public static final String createProjectNamePath(String project) { - return PROJECT_NAME_PATH_TEMPLATE.instantiate( - "project", project); - } - - /** - * Creates a string containing the fully-qualified path to represent - * a sink_name resource. - * - * - * - */ - public static final String createSinkNamePath(String project, String sink) { - return SINK_NAME_PATH_TEMPLATE.instantiate( - "project", project,"sink", sink); - } - - - /** - * Extracts the project from the given fully-qualified path which - * represents a projectName resource. - * - * - * - */ - public static final String extractProjectFromProjectNamePath(String projectNamePath) { - return PROJECT_NAME_PATH_TEMPLATE.parse(projectNamePath).get("project"); + protected ConfigServiceV2Api(ConfigServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.listSinksCallable = settings.listSinksMethod().build(settings); + this.listSinksIterableCallable = settings.listSinksMethod().buildPageStreaming(settings); + this.getSinkCallable = settings.getSinkMethod().build(settings); + this.createSinkCallable = settings.createSinkMethod().build(settings); + this.updateSinkCallable = settings.updateSinkMethod().build(settings); + this.deleteSinkCallable = settings.deleteSinkMethod().build(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); } - /** - * Extracts the project from the given fully-qualified path which - * represents a sinkName resource. - * - * - * - */ - public static final String extractProjectFromSinkNamePath(String sinkNamePath) { - return SINK_NAME_PATH_TEMPLATE.parse(sinkNamePath).get("project"); - } - - /** - * Extracts the sink from the given fully-qualified path which - * represents a sinkName resource. - * - * - * - */ - public static final String extractSinkFromSinkNamePath(String sinkNamePath) { - return SINK_NAME_PATH_TEMPLATE.parse(sinkNamePath).get("sink"); - } - - // ============= // Service Calls // ============= @@ -267,10 +228,7 @@ public static final String extractSinkFromSinkNamePath(String sinkNamePath) { * */ public Iterable listSinks(String projectName) { - ListSinksRequest request = - ListSinksRequest.newBuilder() - .setProjectName(projectName) - .build(); + ListSinksRequest request = ListSinksRequest.newBuilder().setProjectName(projectName).build(); return listSinks(request); } @@ -284,8 +242,7 @@ public Iterable listSinks(String projectName) { * @param request The request object containing all of the parameters for the API call. */ public Iterable listSinks(ListSinksRequest request) { - return listSinksStreamingCallable() - .iterableResponseStreamCall(request); + return listSinksIterableCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -295,8 +252,8 @@ public Iterable listSinks(ListSinksRequest request) { * * */ - public ApiCallable listSinksStreamingCallable() { - return listSinksCallable().pageStreaming(LIST_SINKS_PAGE_DESC); + public ApiCallable> listSinksIterableCallable() { + return listSinksIterableCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -307,7 +264,7 @@ public ApiCallable listSinksStreamingCallable() { * */ public ApiCallable listSinksCallable() { - return ApiUtils.prepareIdempotentCallable(LIST_SINKS, settings).bind(channel); + return listSinksCallable; } // ----- getSink ----- @@ -323,10 +280,7 @@ public ApiCallable listSinksCallable() { * Example: `"projects/my-project-id/sinks/my-sink-id"`. */ public LogSink getSink(String sinkName) { - GetSinkRequest request = - GetSinkRequest.newBuilder() - .setSinkName(sinkName) - .build(); + GetSinkRequest request = GetSinkRequest.newBuilder().setSinkName(sinkName).build(); return getSink(request); } @@ -340,7 +294,7 @@ public LogSink getSink(String sinkName) { * * @param request The request object containing all of the parameters for the API call. */ - public LogSink getSink(GetSinkRequest request) { + private LogSink getSink(GetSinkRequest request) { return getSinkCallable().call(request); } @@ -352,7 +306,7 @@ public LogSink getSink(GetSinkRequest request) { * */ public ApiCallable getSinkCallable() { - return ApiUtils.prepareIdempotentCallable(GET_SINK, settings).bind(channel); + return getSinkCallable; } // ----- createSink ----- @@ -373,10 +327,7 @@ public ApiCallable getSinkCallable() { */ public LogSink createSink(String projectName, LogSink sink) { CreateSinkRequest request = - CreateSinkRequest.newBuilder() - .setProjectName(projectName) - .setSink(sink) - .build(); + CreateSinkRequest.newBuilder().setProjectName(projectName).setSink(sink).build(); return createSink(request); } @@ -402,7 +353,7 @@ public LogSink createSink(CreateSinkRequest request) { * */ public ApiCallable createSinkCallable() { - return CREATE_SINK.bind(channel); + return createSinkCallable; } // ----- updateSink ----- @@ -426,10 +377,7 @@ public ApiCallable createSinkCallable() { */ public LogSink updateSink(String sinkName, LogSink sink) { UpdateSinkRequest request = - UpdateSinkRequest.newBuilder() - .setSinkName(sinkName) - .setSink(sink) - .build(); + UpdateSinkRequest.newBuilder().setSinkName(sinkName).setSink(sink).build(); return updateSink(request); } @@ -455,7 +403,7 @@ public LogSink updateSink(UpdateSinkRequest request) { * */ public ApiCallable updateSinkCallable() { - return ApiUtils.prepareIdempotentCallable(UPDATE_SINK, settings).bind(channel); + return updateSinkCallable; } // ----- deleteSink ----- @@ -471,10 +419,7 @@ public ApiCallable updateSinkCallable() { * Example: `"projects/my-project-id/sinks/my-sink-id"`. */ public void deleteSink(String sinkName) { - DeleteSinkRequest request = - DeleteSinkRequest.newBuilder() - .setSinkName(sinkName) - .build(); + DeleteSinkRequest request = DeleteSinkRequest.newBuilder().setSinkName(sinkName).build(); deleteSink(request); } @@ -488,7 +433,7 @@ public void deleteSink(String sinkName) { * * @param request The request object containing all of the parameters for the API call. */ - public void deleteSink(DeleteSinkRequest request) { + private void deleteSink(DeleteSinkRequest request) { deleteSinkCallable().call(request); } @@ -500,10 +445,9 @@ public void deleteSink(DeleteSinkRequest request) { * */ public ApiCallable deleteSinkCallable() { - return ApiUtils.prepareIdempotentCallable(DELETE_SINK, settings).bind(channel); + return deleteSinkCallable; } - // ======== // Cleanup // ======== @@ -516,16 +460,12 @@ public ApiCallable deleteSinkCallable() { * */ @Override - public void close() { - // Manually-added shutdown code - - // Auto-generated shutdown code - channel.shutdown(); - - // Manually-added shutdown code + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } } - // ======== // Manually-added methods: add custom (non-generated) methods after this point. // ======== diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java new file mode 100644 index 000000000000..e4571fcd200b --- /dev/null +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/ConfigServiceV2Settings.java @@ -0,0 +1,289 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_config.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.ConfigServiceV2Grpc; +import com.google.logging.v2.CreateSinkRequest; +import com.google.logging.v2.DeleteSinkRequest; +import com.google.logging.v2.GetSinkRequest; +import com.google.logging.v2.ListSinksRequest; +import com.google.logging.v2.ListSinksResponse; +import com.google.logging.v2.LogSink; +import com.google.logging.v2.UpdateSinkRequest; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class ConfigServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final PageStreamingApiCallableBuilder + listSinksMethod; + private final ApiCallableBuilder getSinkMethod; + private final ApiCallableBuilder createSinkMethod; + private final ApiCallableBuilder updateSinkMethod; + private final ApiCallableBuilder deleteSinkMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + listSinksMethod = + new PageStreamingApiCallableBuilder<>( + ConfigServiceV2Grpc.METHOD_LIST_SINKS, LIST_SINKS_PAGE_STR_DESC); + listSinksMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listSinksMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + getSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_GET_SINK); + getSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + getSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + createSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_CREATE_SINK); + createSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + createSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + updateSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_UPDATE_SINK); + updateSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + updateSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + deleteSinkMethod = new ApiCallableBuilder<>(ConfigServiceV2Grpc.METHOD_DELETE_SINK); + deleteSinkMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteSinkMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + listSinksMethod, + getSinkMethod, + createSinkMethod, + updateSinkMethod, + deleteSinkMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of ConfigServiceV2Settings with default settings. + * + * + * + */ + public static ConfigServiceV2Settings create() { + ConfigServiceV2Settings settings = new ConfigServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of ConfigServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected ConfigServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listSinks. + * + * + * + */ + public PageStreamingApiCallableBuilder + listSinksMethod() { + return methods.listSinksMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method getSink. + * + * + * + */ + public ApiCallableBuilder getSinkMethod() { + return methods.getSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method createSink. + * + * + * + */ + public ApiCallableBuilder createSinkMethod() { + return methods.createSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method updateSink. + * + * + * + */ + public ApiCallableBuilder updateSinkMethod() { + return methods.updateSinkMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteSink. + * + * + * + */ + public ApiCallableBuilder deleteSinkMethod() { + return methods.deleteSinkMethod; + } + + private static PageDescriptor + LIST_SINKS_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListSinksRequest injectToken(ListSinksRequest payload, Object token) { + return ListSinksRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListSinksResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListSinksResponse payload) { + return payload.getSinksList(); + } + }; +} diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java index 86285b90bfdc..1c705866d051 100644 --- a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Api.java @@ -30,172 +30,121 @@ * * Happy editing! */ + package com.google.gcloud.logging.spi.v2; import com.google.api.MonitoredResource; import com.google.api.MonitoredResourceDescriptor; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; import com.google.logging.v2.DeleteLogRequest; import com.google.logging.v2.ListLogEntriesRequest; import com.google.logging.v2.ListLogEntriesResponse; import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest; import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse; import com.google.logging.v2.LogEntry; -import com.google.logging.v2.LoggingServiceV2Grpc; import com.google.logging.v2.ReadLogEntriesRequest; import com.google.logging.v2.ReadLogEntriesResponse; import com.google.logging.v2.WriteLogEntriesRequest; import com.google.logging.v2.WriteLogEntriesResponse; import com.google.protobuf.Empty; -import io.gapi.gax.grpc.ApiCallable; -import io.gapi.gax.grpc.PageDescriptor; -import io.gapi.gax.grpc.ServiceApiSettings; -import io.gapi.gax.internal.ApiUtils; -import io.gapi.gax.protobuf.PathTemplate; import io.grpc.ManagedChannel; +import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; // Manually-added imports: add custom (non-generated) imports after this point. - - // AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. /** - * Service for ingesting and querying logs. + * Service Description: Service for ingesting and querying logs. * * * */ -@javax.annotation.Generated("by API code generation") +@javax.annotation.Generated("by GAPIC") public class LoggingServiceV2Api implements AutoCloseable { - // ========= - // Constants - // ========= - - /** - * The default address of the service. - * - * - * - */ - public static final String SERVICE_ADDRESS = "logging.googleapis.com"; - - /** - * The default port of the service. - * - * - * - */ - public static final int DEFAULT_SERVICE_PORT = 443; - - - private static final ApiCallable - DELETE_LOG = ApiCallable.create(LoggingServiceV2Grpc.METHOD_DELETE_LOG); - private static final ApiCallable - WRITE_LOG_ENTRIES = ApiCallable.create(LoggingServiceV2Grpc.METHOD_WRITE_LOG_ENTRIES); - private static final ApiCallable - LIST_LOG_ENTRIES = ApiCallable.create(LoggingServiceV2Grpc.METHOD_LIST_LOG_ENTRIES); - private static final ApiCallable - READ_LOG_ENTRIES = ApiCallable.create(LoggingServiceV2Grpc.METHOD_READ_LOG_ENTRIES); - private static final ApiCallable - LIST_MONITORED_RESOURCE_DESCRIPTORS = ApiCallable.create(LoggingServiceV2Grpc.METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS); - - private static PageDescriptor LIST_LOG_ENTRIES_PAGE_DESC = - new PageDescriptor() { - @Override - public Object emptyToken() { - return ""; - } - @Override - public ListLogEntriesRequest injectToken( - ListLogEntriesRequest payload, Object token) { - return ListLogEntriesRequest - .newBuilder(payload) - .setPageToken((String) token) - .build(); - } - @Override - public Object extractNextToken(ListLogEntriesResponse payload) { - return payload.getNextPageToken(); - } - @Override - public Iterable extractResources(ListLogEntriesResponse payload) { - return payload.getEntriesList(); - } - }; - - private static PageDescriptor READ_LOG_ENTRIES_PAGE_DESC = - new PageDescriptor() { - @Override - public Object emptyToken() { - return ""; - } - @Override - public ReadLogEntriesRequest injectToken( - ReadLogEntriesRequest payload, Object token) { - return ReadLogEntriesRequest - .newBuilder(payload) - .setResumeToken((String) token) - .build(); - } - @Override - public Object extractNextToken(ReadLogEntriesResponse payload) { - return payload.getResumeToken(); - } - @Override - public Iterable extractResources(ReadLogEntriesResponse payload) { - return payload.getEntriesList(); - } - }; - - private static PageDescriptor LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_DESC = - new PageDescriptor() { - @Override - public Object emptyToken() { - return ""; - } - @Override - public ListMonitoredResourceDescriptorsRequest injectToken( - ListMonitoredResourceDescriptorsRequest payload, Object token) { - return ListMonitoredResourceDescriptorsRequest - .newBuilder(payload) - .setPageToken((String) token) - .build(); - } - @Override - public Object extractNextToken(ListMonitoredResourceDescriptorsResponse payload) { - return payload.getNextPageToken(); - } - @Override - public Iterable extractResources(ListMonitoredResourceDescriptorsResponse payload) { - return payload.getResourceDescriptorsList(); - } - }; - - private static String ALL_SCOPES[] = { - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/logging.admin" - }; - - /** - * A PathTemplate representing the fully-qualified path to represent - * a log_name resource. - * - * - * - */ - private static final PathTemplate LOG_NAME_PATH_TEMPLATE = - PathTemplate.create("/projects/{project}/logs/{log}"); + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a log resource. + * + * + * + */ + private static final PathTemplate LOG_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/logs/{log}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a log resource. + * + * + * + */ + public static final String formatLogPath(String project, String log) { + return LOG_PATH_TEMPLATE.instantiate("project", project, "log", log); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a log resource. + * + * + * + */ + public static final String parseProjectFromLogPath(String logPath) { + return LOG_PATH_TEMPLATE.parse(logPath).get("project"); + } + + /** + * Parses the log from the given fully-qualified path which + * represents a log resource. + * + * + * + */ + public static final String parseLogFromLogPath(String logPath) { + return LOG_PATH_TEMPLATE.parse(logPath).get("log"); + } + } // ======== // Members // ======== private final ManagedChannel channel; - private final ServiceApiSettings settings; + private final List closeables = new ArrayList<>(); + + private final ApiCallable deleteLogCallable; + private final ApiCallable + writeLogEntriesCallable; + private final ApiCallable listLogEntriesCallable; + private final ApiCallable> + listLogEntriesIterableCallable; + private final ApiCallable readLogEntriesCallable; + private final ApiCallable> + readLogEntriesIterableCallable; + private final ApiCallable< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> + listMonitoredResourceDescriptorsCallable; + private final ApiCallable< + ListMonitoredResourceDescriptorsRequest, Iterable> + listMonitoredResourceDescriptorsIterableCallable; // =============== // Factory Methods @@ -208,7 +157,7 @@ public Iterable extractResources(ListMonitoredResou * */ public static LoggingServiceV2Api create() throws IOException { - return create(new ServiceApiSettings()); + return create(LoggingServiceV2Settings.create()); } /** @@ -218,7 +167,7 @@ public static LoggingServiceV2Api create() throws IOException { * * */ - public static LoggingServiceV2Api create(ServiceApiSettings settings) throws IOException { + public static LoggingServiceV2Api create(LoggingServiceV2Settings settings) throws IOException { return new LoggingServiceV2Api(settings); } @@ -229,53 +178,31 @@ public static LoggingServiceV2Api create(ServiceApiSettings settings) throws IOE * * */ - protected LoggingServiceV2Api(ServiceApiSettings settings) throws IOException { - ServiceApiSettings internalSettings = ApiUtils.populateSettings(settings, - SERVICE_ADDRESS, DEFAULT_SERVICE_PORT, ALL_SCOPES); - this.settings = internalSettings; - this.channel = internalSettings.getChannel(); - } - - // ============================== - // Resource Name Helper Functions - // ============================== - - /** - * Creates a string containing the fully-qualified path to represent - * a log_name resource. - * - * - * - */ - public static final String createLogNamePath(String project, String log) { - return LOG_NAME_PATH_TEMPLATE.instantiate( - "project", project,"log", log); - } - - - /** - * Extracts the project from the given fully-qualified path which - * represents a logName resource. - * - * - * - */ - public static final String extractProjectFromLogNamePath(String logNamePath) { - return LOG_NAME_PATH_TEMPLATE.parse(logNamePath).get("project"); - } - - /** - * Extracts the log from the given fully-qualified path which - * represents a logName resource. - * - * - * - */ - public static final String extractLogFromLogNamePath(String logNamePath) { - return LOG_NAME_PATH_TEMPLATE.parse(logNamePath).get("log"); + protected LoggingServiceV2Api(LoggingServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.deleteLogCallable = settings.deleteLogMethod().build(settings); + this.writeLogEntriesCallable = settings.writeLogEntriesMethod().build(settings); + this.listLogEntriesCallable = settings.listLogEntriesMethod().build(settings); + this.listLogEntriesIterableCallable = + settings.listLogEntriesMethod().buildPageStreaming(settings); + this.readLogEntriesCallable = settings.readLogEntriesMethod().build(settings); + this.readLogEntriesIterableCallable = + settings.readLogEntriesMethod().buildPageStreaming(settings); + this.listMonitoredResourceDescriptorsCallable = + settings.listMonitoredResourceDescriptorsMethod().build(settings); + this.listMonitoredResourceDescriptorsIterableCallable = + settings.listMonitoredResourceDescriptorsMethod().buildPageStreaming(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); } - // ============= // Service Calls // ============= @@ -294,10 +221,7 @@ public static final String extractLogFromLogNamePath(String logNamePath) { * `"projects/my-project/logs/syslog"`. */ public void deleteLog(String logName) { - DeleteLogRequest request = - DeleteLogRequest.newBuilder() - .setLogName(logName) - .build(); + DeleteLogRequest request = DeleteLogRequest.newBuilder().setLogName(logName).build(); deleteLog(request); } @@ -312,7 +236,7 @@ public void deleteLog(String logName) { * * @param request The request object containing all of the parameters for the API call. */ - public void deleteLog(DeleteLogRequest request) { + private void deleteLog(DeleteLogRequest request) { deleteLogCallable().call(request); } @@ -325,7 +249,7 @@ public void deleteLog(DeleteLogRequest request) { * */ public ApiCallable deleteLogCallable() { - return ApiUtils.prepareIdempotentCallable(DELETE_LOG, settings).bind(channel); + return deleteLogCallable; } // ----- writeLogEntries ----- @@ -351,14 +275,18 @@ public ApiCallable deleteLogCallable() { * @param entries Required. The log entries to write. The log entries must have values for * all required fields. */ - public WriteLogEntriesResponse writeLogEntries(String logName, MonitoredResource resource, Map labels, List entries) { + public WriteLogEntriesResponse writeLogEntries( + String logName, + MonitoredResource resource, + Map labels, + List entries) { WriteLogEntriesRequest request = WriteLogEntriesRequest.newBuilder() - .setLogName(logName) - .setResource(resource) - .putAllLabels(labels) - .addAllEntries(entries) - .build(); + .setLogName(logName) + .setResource(resource) + .putAllLabels(labels) + .addAllEntries(entries) + .build(); return writeLogEntries(request); } @@ -386,7 +314,7 @@ public WriteLogEntriesResponse writeLogEntries(WriteLogEntriesRequest request) { * */ public ApiCallable writeLogEntriesCallable() { - return WRITE_LOG_ENTRIES.bind(channel); + return writeLogEntriesCallable; } // ----- listLogEntries ----- @@ -404,10 +332,10 @@ public ApiCallable writeLogEntr public Iterable listLogEntries(List projectIds, String filter, String orderBy) { ListLogEntriesRequest request = ListLogEntriesRequest.newBuilder() - .addAllProjectIds(projectIds) - .setFilter(filter) - .setOrderBy(orderBy) - .build(); + .addAllProjectIds(projectIds) + .setFilter(filter) + .setOrderBy(orderBy) + .build(); return listLogEntries(request); } @@ -424,8 +352,7 @@ public Iterable listLogEntries(List projectIds, String filter, * @param request The request object containing all of the parameters for the API call. */ public Iterable listLogEntries(ListLogEntriesRequest request) { - return listLogEntriesStreamingCallable() - .iterableResponseStreamCall(request); + return listLogEntriesIterableCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -438,8 +365,8 @@ public Iterable listLogEntries(ListLogEntriesRequest request) { * * */ - public ApiCallable listLogEntriesStreamingCallable() { - return listLogEntriesCallable().pageStreaming(LIST_LOG_ENTRIES_PAGE_DESC); + public ApiCallable> listLogEntriesIterableCallable() { + return listLogEntriesIterableCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -453,7 +380,7 @@ public ApiCallable listLogEntriesStreamingCalla * */ public ApiCallable listLogEntriesCallable() { - return LIST_LOG_ENTRIES.bind(channel); + return listLogEntriesCallable; } // ----- readLogEntries ----- @@ -469,10 +396,10 @@ public ApiCallable listLogEntries public Iterable readLogEntries(List projectIds, String filter, String orderBy) { ReadLogEntriesRequest request = ReadLogEntriesRequest.newBuilder() - .addAllProjectIds(projectIds) - .setFilter(filter) - .setOrderBy(orderBy) - .build(); + .addAllProjectIds(projectIds) + .setFilter(filter) + .setOrderBy(orderBy) + .build(); return readLogEntries(request); } @@ -487,8 +414,7 @@ public Iterable readLogEntries(List projectIds, String filter, * @param request The request object containing all of the parameters for the API call. */ public Iterable readLogEntries(ReadLogEntriesRequest request) { - return readLogEntriesStreamingCallable() - .iterableResponseStreamCall(request); + return readLogEntriesIterableCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -499,8 +425,8 @@ public Iterable readLogEntries(ReadLogEntriesRequest request) { * * */ - public ApiCallable readLogEntriesStreamingCallable() { - return readLogEntriesCallable().pageStreaming(READ_LOG_ENTRIES_PAGE_DESC); + public ApiCallable> readLogEntriesIterableCallable() { + return readLogEntriesIterableCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -512,7 +438,7 @@ public ApiCallable readLogEntriesStreamingCalla * */ public ApiCallable readLogEntriesCallable() { - return READ_LOG_ENTRIES.bind(channel); + return readLogEntriesCallable; } // ----- listMonitoredResourceDescriptors ----- @@ -526,9 +452,9 @@ public ApiCallable readLogEntries * * @param request The request object containing all of the parameters for the API call. */ - public Iterable listMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest request) { - return listMonitoredResourceDescriptorsStreamingCallable() - .iterableResponseStreamCall(request); + public Iterable listMonitoredResourceDescriptors( + ListMonitoredResourceDescriptorsRequest request) { + return listMonitoredResourceDescriptorsIterableCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -538,8 +464,9 @@ public Iterable listMonitoredResourceDescriptors(Li * * */ - public ApiCallable listMonitoredResourceDescriptorsStreamingCallable() { - return listMonitoredResourceDescriptorsCallable().pageStreaming(LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_DESC); + public ApiCallable> + listMonitoredResourceDescriptorsIterableCallable() { + return listMonitoredResourceDescriptorsIterableCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -549,11 +476,12 @@ public ApiCallable * */ - public ApiCallable listMonitoredResourceDescriptorsCallable() { - return ApiUtils.prepareIdempotentCallable(LIST_MONITORED_RESOURCE_DESCRIPTORS, settings).bind(channel); + public ApiCallable< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> + listMonitoredResourceDescriptorsCallable() { + return listMonitoredResourceDescriptorsCallable; } - // ======== // Cleanup // ======== @@ -566,16 +494,12 @@ public ApiCallable */ @Override - public void close() { - // Manually-added shutdown code - - // Auto-generated shutdown code - channel.shutdown(); - - // Manually-added shutdown code + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } } - // ======== // Manually-added methods: add custom (non-generated) methods after this point. // ======== diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java new file mode 100644 index 000000000000..a7e5035bc1e5 --- /dev/null +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2Settings.java @@ -0,0 +1,370 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.MonitoredResourceDescriptor; +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.DeleteLogRequest; +import com.google.logging.v2.ListLogEntriesRequest; +import com.google.logging.v2.ListLogEntriesResponse; +import com.google.logging.v2.ListMonitoredResourceDescriptorsRequest; +import com.google.logging.v2.ListMonitoredResourceDescriptorsResponse; +import com.google.logging.v2.LogEntry; +import com.google.logging.v2.LoggingServiceV2Grpc; +import com.google.logging.v2.ReadLogEntriesRequest; +import com.google.logging.v2.ReadLogEntriesResponse; +import com.google.logging.v2.WriteLogEntriesRequest; +import com.google.logging.v2.WriteLogEntriesResponse; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class LoggingServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final ApiCallableBuilder deleteLogMethod; + private final ApiCallableBuilder + writeLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ListLogEntriesRequest, ListLogEntriesResponse, LogEntry> + listLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ReadLogEntriesRequest, ReadLogEntriesResponse, LogEntry> + readLogEntriesMethod; + private final PageStreamingApiCallableBuilder< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + listMonitoredResourceDescriptorsMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + deleteLogMethod = new ApiCallableBuilder<>(LoggingServiceV2Grpc.METHOD_DELETE_LOG); + deleteLogMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteLogMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + writeLogEntriesMethod = + new ApiCallableBuilder<>(LoggingServiceV2Grpc.METHOD_WRITE_LOG_ENTRIES); + writeLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + writeLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + listLogEntriesMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_LIST_LOG_ENTRIES, LIST_LOG_ENTRIES_PAGE_STR_DESC); + listLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + readLogEntriesMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_READ_LOG_ENTRIES, READ_LOG_ENTRIES_PAGE_STR_DESC); + readLogEntriesMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + readLogEntriesMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + listMonitoredResourceDescriptorsMethod = + new PageStreamingApiCallableBuilder<>( + LoggingServiceV2Grpc.METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, + LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC); + listMonitoredResourceDescriptorsMethod.setRetryableCodes( + RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listMonitoredResourceDescriptorsMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + deleteLogMethod, + writeLogEntriesMethod, + listLogEntriesMethod, + readLogEntriesMethod, + listMonitoredResourceDescriptorsMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of LoggingServiceV2Settings with default settings. + * + * + * + */ + public static LoggingServiceV2Settings create() { + LoggingServiceV2Settings settings = new LoggingServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of LoggingServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected LoggingServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteLog. + * + * + * + */ + public ApiCallableBuilder deleteLogMethod() { + return methods.deleteLogMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method writeLogEntries. + * + * + * + */ + public ApiCallableBuilder + writeLogEntriesMethod() { + return methods.writeLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listLogEntries. + * + * + * + */ + public PageStreamingApiCallableBuilder + listLogEntriesMethod() { + return methods.listLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method readLogEntries. + * + * + * + */ + public PageStreamingApiCallableBuilder + readLogEntriesMethod() { + return methods.readLogEntriesMethod; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listMonitoredResourceDescriptors. + * + * + * + */ + public PageStreamingApiCallableBuilder< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + listMonitoredResourceDescriptorsMethod() { + return methods.listMonitoredResourceDescriptorsMethod; + } + + private static PageDescriptor + LIST_LOG_ENTRIES_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListLogEntriesRequest injectToken(ListLogEntriesRequest payload, Object token) { + return ListLogEntriesRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListLogEntriesResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListLogEntriesResponse payload) { + return payload.getEntriesList(); + } + }; + + private static PageDescriptor + READ_LOG_ENTRIES_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ReadLogEntriesRequest injectToken(ReadLogEntriesRequest payload, Object token) { + return ReadLogEntriesRequest.newBuilder(payload) + .setResumeToken((String) token) + .build(); + } + + @Override + public Object extractNextToken(ReadLogEntriesResponse payload) { + return payload.getResumeToken(); + } + + @Override + public Iterable extractResources(ReadLogEntriesResponse payload) { + return payload.getEntriesList(); + } + }; + + private static PageDescriptor< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor> + LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC = + new PageDescriptor< + ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, + MonitoredResourceDescriptor>() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListMonitoredResourceDescriptorsRequest injectToken( + ListMonitoredResourceDescriptorsRequest payload, Object token) { + return ListMonitoredResourceDescriptorsRequest.newBuilder(payload) + .setPageToken((String) token) + .build(); + } + + @Override + public Object extractNextToken(ListMonitoredResourceDescriptorsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources( + ListMonitoredResourceDescriptorsResponse payload) { + return payload.getResourceDescriptorsList(); + } + }; +} diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java index 78d408550ffb..fb919e1e017f 100644 --- a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Api.java @@ -30,126 +30,139 @@ * * Happy editing! */ + package com.google.gcloud.logging.spi.v2; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable; +import com.google.api.gax.protobuf.PathTemplate; import com.google.logging.v2.CreateLogMetricRequest; import com.google.logging.v2.DeleteLogMetricRequest; import com.google.logging.v2.GetLogMetricRequest; import com.google.logging.v2.ListLogMetricsRequest; import com.google.logging.v2.ListLogMetricsResponse; import com.google.logging.v2.LogMetric; -import com.google.logging.v2.MetricsServiceV2Grpc; import com.google.logging.v2.UpdateLogMetricRequest; import com.google.protobuf.Empty; -import io.gapi.gax.grpc.ApiCallable; -import io.gapi.gax.grpc.PageDescriptor; -import io.gapi.gax.grpc.ServiceApiSettings; -import io.gapi.gax.internal.ApiUtils; -import io.gapi.gax.protobuf.PathTemplate; import io.grpc.ManagedChannel; +import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; // Manually-added imports: add custom (non-generated) imports after this point. - - // AUTO-GENERATED DOCUMENTATION AND SERVICE - see instructions at the top of the file for editing. /** - * See //google/logging/v1/logging.proto for documentation + * Service Description: See src/api/google/logging/v1/logging.proto for documentation * * * */ -@javax.annotation.Generated("by API code generation") +@javax.annotation.Generated("by GAPIC") public class MetricsServiceV2Api implements AutoCloseable { - // ========= - // Constants - // ========= - - /** - * The default address of the service. - * - * - * - */ - public static final String SERVICE_ADDRESS = "logging.googleapis.com"; - - /** - * The default port of the service. - * - * - * - */ - public static final int DEFAULT_SERVICE_PORT = 443; - - - private static final ApiCallable - LIST_LOG_METRICS = ApiCallable.create(MetricsServiceV2Grpc.METHOD_LIST_LOG_METRICS); - private static final ApiCallable - GET_LOG_METRIC = ApiCallable.create(MetricsServiceV2Grpc.METHOD_GET_LOG_METRIC); - private static final ApiCallable - CREATE_LOG_METRIC = ApiCallable.create(MetricsServiceV2Grpc.METHOD_CREATE_LOG_METRIC); - private static final ApiCallable - UPDATE_LOG_METRIC = ApiCallable.create(MetricsServiceV2Grpc.METHOD_UPDATE_LOG_METRIC); - private static final ApiCallable - DELETE_LOG_METRIC = ApiCallable.create(MetricsServiceV2Grpc.METHOD_DELETE_LOG_METRIC); - - private static PageDescriptor LIST_LOG_METRICS_PAGE_DESC = - new PageDescriptor() { - @Override - public Object emptyToken() { - return ""; - } - @Override - public ListLogMetricsRequest injectToken( - ListLogMetricsRequest payload, Object token) { - return ListLogMetricsRequest - .newBuilder(payload) - .setPageToken((String) token) - .build(); - } - @Override - public Object extractNextToken(ListLogMetricsResponse payload) { - return payload.getNextPageToken(); - } - @Override - public Iterable extractResources(ListLogMetricsResponse payload) { - return payload.getMetricsList(); - } - }; - - private static String ALL_SCOPES[] = { - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.write", - "https://www.googleapis.com/auth/logging.admin" - }; - - /** - * A PathTemplate representing the fully-qualified path to represent - * a project_name resource. - * - * - * - */ - private static final PathTemplate PROJECT_NAME_PATH_TEMPLATE = - PathTemplate.create("/projects/{project}"); - /** - * A PathTemplate representing the fully-qualified path to represent - * a metric_name resource. - * - * - * - */ - private static final PathTemplate METRIC_NAME_PATH_TEMPLATE = - PathTemplate.create("/projects/{project}/metrics/{metric}"); + public static class ResourceNames { + private ResourceNames() {} + + // ======================= + // ResourceNames Constants + // ======================= + + /** + * A PathTemplate representing the fully-qualified path to represent + * a project resource. + * + * + * + */ + private static final PathTemplate PROJECT_PATH_TEMPLATE = + PathTemplate.create("projects/{project}"); + + /** + * A PathTemplate representing the fully-qualified path to represent + * a metric resource. + * + * + * + */ + private static final PathTemplate METRIC_PATH_TEMPLATE = + PathTemplate.create("projects/{project}/metrics/{metric}"); + + // ============================== + // Resource Name Helper Functions + // ============================== + + /** + * Formats a string containing the fully-qualified path to represent + * a project resource. + * + * + * + */ + public static final String formatProjectPath(String project) { + return PROJECT_PATH_TEMPLATE.instantiate("project", project); + } + + /** + * Formats a string containing the fully-qualified path to represent + * a metric resource. + * + * + * + */ + public static final String formatMetricPath(String project, String metric) { + return METRIC_PATH_TEMPLATE.instantiate("project", project, "metric", metric); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a project resource. + * + * + * + */ + public static final String parseProjectFromProjectPath(String projectPath) { + return PROJECT_PATH_TEMPLATE.parse(projectPath).get("project"); + } + + /** + * Parses the project from the given fully-qualified path which + * represents a metric resource. + * + * + * + */ + public static final String parseProjectFromMetricPath(String metricPath) { + return METRIC_PATH_TEMPLATE.parse(metricPath).get("project"); + } + + /** + * Parses the metric from the given fully-qualified path which + * represents a metric resource. + * + * + * + */ + public static final String parseMetricFromMetricPath(String metricPath) { + return METRIC_PATH_TEMPLATE.parse(metricPath).get("metric"); + } + } // ======== // Members // ======== private final ManagedChannel channel; - private final ServiceApiSettings settings; + private final List closeables = new ArrayList<>(); + + private final ApiCallable listLogMetricsCallable; + private final ApiCallable> + listLogMetricsIterableCallable; + private final ApiCallable getLogMetricCallable; + private final ApiCallable createLogMetricCallable; + private final ApiCallable updateLogMetricCallable; + private final ApiCallable deleteLogMetricCallable; // =============== // Factory Methods @@ -162,7 +175,7 @@ public Iterable extractResources(ListLogMetricsResponse payload) { * */ public static MetricsServiceV2Api create() throws IOException { - return create(new ServiceApiSettings()); + return create(MetricsServiceV2Settings.create()); } /** @@ -172,7 +185,7 @@ public static MetricsServiceV2Api create() throws IOException { * * */ - public static MetricsServiceV2Api create(ServiceApiSettings settings) throws IOException { + public static MetricsServiceV2Api create(MetricsServiceV2Settings settings) throws IOException { return new MetricsServiceV2Api(settings); } @@ -183,76 +196,26 @@ public static MetricsServiceV2Api create(ServiceApiSettings settings) throws IOE * * */ - protected MetricsServiceV2Api(ServiceApiSettings settings) throws IOException { - ServiceApiSettings internalSettings = ApiUtils.populateSettings(settings, - SERVICE_ADDRESS, DEFAULT_SERVICE_PORT, ALL_SCOPES); - this.settings = internalSettings; - this.channel = internalSettings.getChannel(); - } - - // ============================== - // Resource Name Helper Functions - // ============================== - - /** - * Creates a string containing the fully-qualified path to represent - * a project_name resource. - * - * - * - */ - public static final String createProjectNamePath(String project) { - return PROJECT_NAME_PATH_TEMPLATE.instantiate( - "project", project); - } - - /** - * Creates a string containing the fully-qualified path to represent - * a metric_name resource. - * - * - * - */ - public static final String createMetricNamePath(String project, String metric) { - return METRIC_NAME_PATH_TEMPLATE.instantiate( - "project", project,"metric", metric); - } - - - /** - * Extracts the project from the given fully-qualified path which - * represents a projectName resource. - * - * - * - */ - public static final String extractProjectFromProjectNamePath(String projectNamePath) { - return PROJECT_NAME_PATH_TEMPLATE.parse(projectNamePath).get("project"); + protected MetricsServiceV2Api(MetricsServiceV2Settings settings) throws IOException { + this.channel = settings.getChannel(); + + this.listLogMetricsCallable = settings.listLogMetricsMethod().build(settings); + this.listLogMetricsIterableCallable = + settings.listLogMetricsMethod().buildPageStreaming(settings); + this.getLogMetricCallable = settings.getLogMetricMethod().build(settings); + this.createLogMetricCallable = settings.createLogMetricMethod().build(settings); + this.updateLogMetricCallable = settings.updateLogMetricMethod().build(settings); + this.deleteLogMetricCallable = settings.deleteLogMetricMethod().build(settings); + + closeables.add( + new Closeable() { + @Override + public void close() throws IOException { + channel.shutdown(); + } + }); } - /** - * Extracts the project from the given fully-qualified path which - * represents a metricName resource. - * - * - * - */ - public static final String extractProjectFromMetricNamePath(String metricNamePath) { - return METRIC_NAME_PATH_TEMPLATE.parse(metricNamePath).get("project"); - } - - /** - * Extracts the metric from the given fully-qualified path which - * represents a metricName resource. - * - * - * - */ - public static final String extractMetricFromMetricNamePath(String metricNamePath) { - return METRIC_NAME_PATH_TEMPLATE.parse(metricNamePath).get("metric"); - } - - // ============= // Service Calls // ============= @@ -268,9 +231,7 @@ public static final String extractMetricFromMetricNamePath(String metricNamePath */ public Iterable listLogMetrics(String projectName) { ListLogMetricsRequest request = - ListLogMetricsRequest.newBuilder() - .setProjectName(projectName) - .build(); + ListLogMetricsRequest.newBuilder().setProjectName(projectName).build(); return listLogMetrics(request); } @@ -284,8 +245,7 @@ public Iterable listLogMetrics(String projectName) { * @param request The request object containing all of the parameters for the API call. */ public Iterable listLogMetrics(ListLogMetricsRequest request) { - return listLogMetricsStreamingCallable() - .iterableResponseStreamCall(request); + return listLogMetricsIterableCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -295,8 +255,8 @@ public Iterable listLogMetrics(ListLogMetricsRequest request) { * * */ - public ApiCallable listLogMetricsStreamingCallable() { - return listLogMetricsCallable().pageStreaming(LIST_LOG_METRICS_PAGE_DESC); + public ApiCallable> listLogMetricsIterableCallable() { + return listLogMetricsIterableCallable; } // AUTO-GENERATED DOCUMENTATION AND METHOD - see instructions at the top of the file for editing. @@ -307,7 +267,7 @@ public ApiCallable listLogMetricsStreamingCall * */ public ApiCallable listLogMetricsCallable() { - return ApiUtils.prepareIdempotentCallable(LIST_LOG_METRICS, settings).bind(channel); + return listLogMetricsCallable; } // ----- getLogMetric ----- @@ -324,9 +284,7 @@ public ApiCallable listLogMetrics */ public LogMetric getLogMetric(String metricName) { GetLogMetricRequest request = - GetLogMetricRequest.newBuilder() - .setMetricName(metricName) - .build(); + GetLogMetricRequest.newBuilder().setMetricName(metricName).build(); return getLogMetric(request); } @@ -340,7 +298,7 @@ public LogMetric getLogMetric(String metricName) { * * @param request The request object containing all of the parameters for the API call. */ - public LogMetric getLogMetric(GetLogMetricRequest request) { + private LogMetric getLogMetric(GetLogMetricRequest request) { return getLogMetricCallable().call(request); } @@ -352,7 +310,7 @@ public LogMetric getLogMetric(GetLogMetricRequest request) { * */ public ApiCallable getLogMetricCallable() { - return ApiUtils.prepareIdempotentCallable(GET_LOG_METRIC, settings).bind(channel); + return getLogMetricCallable; } // ----- createLogMetric ----- @@ -373,10 +331,7 @@ public ApiCallable getLogMetricCallable() { */ public LogMetric createLogMetric(String projectName, LogMetric metric) { CreateLogMetricRequest request = - CreateLogMetricRequest.newBuilder() - .setProjectName(projectName) - .setMetric(metric) - .build(); + CreateLogMetricRequest.newBuilder().setProjectName(projectName).setMetric(metric).build(); return createLogMetric(request); } @@ -402,7 +357,7 @@ public LogMetric createLogMetric(CreateLogMetricRequest request) { * */ public ApiCallable createLogMetricCallable() { - return CREATE_LOG_METRIC.bind(channel); + return createLogMetricCallable; } // ----- updateLogMetric ----- @@ -426,10 +381,7 @@ public ApiCallable createLogMetricCallable() */ public LogMetric updateLogMetric(String metricName, LogMetric metric) { UpdateLogMetricRequest request = - UpdateLogMetricRequest.newBuilder() - .setMetricName(metricName) - .setMetric(metric) - .build(); + UpdateLogMetricRequest.newBuilder().setMetricName(metricName).setMetric(metric).build(); return updateLogMetric(request); } @@ -455,7 +407,7 @@ public LogMetric updateLogMetric(UpdateLogMetricRequest request) { * */ public ApiCallable updateLogMetricCallable() { - return ApiUtils.prepareIdempotentCallable(UPDATE_LOG_METRIC, settings).bind(channel); + return updateLogMetricCallable; } // ----- deleteLogMetric ----- @@ -472,9 +424,7 @@ public ApiCallable updateLogMetricCallable() */ public void deleteLogMetric(String metricName) { DeleteLogMetricRequest request = - DeleteLogMetricRequest.newBuilder() - .setMetricName(metricName) - .build(); + DeleteLogMetricRequest.newBuilder().setMetricName(metricName).build(); deleteLogMetric(request); } @@ -488,7 +438,7 @@ public void deleteLogMetric(String metricName) { * * @param request The request object containing all of the parameters for the API call. */ - public void deleteLogMetric(DeleteLogMetricRequest request) { + private void deleteLogMetric(DeleteLogMetricRequest request) { deleteLogMetricCallable().call(request); } @@ -500,10 +450,9 @@ public void deleteLogMetric(DeleteLogMetricRequest request) { * */ public ApiCallable deleteLogMetricCallable() { - return ApiUtils.prepareIdempotentCallable(DELETE_LOG_METRIC, settings).bind(channel); + return deleteLogMetricCallable; } - // ======== // Cleanup // ======== @@ -516,16 +465,12 @@ public ApiCallable deleteLogMetricCallable() { * */ @Override - public void close() { - // Manually-added shutdown code - - // Auto-generated shutdown code - channel.shutdown(); - - // Manually-added shutdown code + public void close() throws Exception { + for (AutoCloseable closeable : closeables) { + closeable.close(); + } } - // ======== // Manually-added methods: add custom (non-generated) methods after this point. // ======== diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java new file mode 100644 index 000000000000..e2c93e20a3f4 --- /dev/null +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/MetricsServiceV2Settings.java @@ -0,0 +1,293 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file was generated from the file + * https://github.com/google/googleapis/blob/master/google/logging/v2/logging_metrics.proto + * and updates to that file get reflected here through a refresh process. + * For the short term, the refresh process will only be runnable by Google engineers. + * Manual additions are allowed because the refresh process performs + * a 3-way merge in order to preserve those manual additions. In order to not + * break the refresh process, only certain types of modifications are + * allowed. + * + * Allowed modifications - currently these are the only types allowed: + * 1. New methods (these should be added to the end of the class) + * 2. New imports + * 3. Additional documentation between "manual edit" demarcations + * + * Happy editing! + */ + +package com.google.gcloud.logging.spi.v2; + +import com.google.api.gax.core.BackoffParams; +import com.google.api.gax.core.ConnectionSettings; +import com.google.api.gax.core.RetryParams; +import com.google.api.gax.grpc.ApiCallSettings; +import com.google.api.gax.grpc.ApiCallable.ApiCallableBuilder; +import com.google.api.gax.grpc.ApiCallable.PageStreamingApiCallableBuilder; +import com.google.api.gax.grpc.PageDescriptor; +import com.google.api.gax.grpc.ServiceApiSettings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.logging.v2.CreateLogMetricRequest; +import com.google.logging.v2.DeleteLogMetricRequest; +import com.google.logging.v2.GetLogMetricRequest; +import com.google.logging.v2.ListLogMetricsRequest; +import com.google.logging.v2.ListLogMetricsResponse; +import com.google.logging.v2.LogMetric; +import com.google.logging.v2.MetricsServiceV2Grpc; +import com.google.logging.v2.UpdateLogMetricRequest; +import com.google.protobuf.Empty; +import io.grpc.Status; + +// Manually-added imports: add custom (non-generated) imports after this point. + +// AUTO-GENERATED DOCUMENTATION AND CLASS - see instructions at the top of the file for editing. +@javax.annotation.Generated("by GAPIC") +public class MetricsServiceV2Settings extends ServiceApiSettings { + + // ========= + // Constants + // ========= + + /** + * The default address of the service. + * + * + * + */ + public static final String DEFAULT_SERVICE_ADDRESS = "logging.googleapis.com"; + + /** + * The default port of the service. + * + * + * + */ + public static final int DEFAULT_SERVICE_PORT = 443; + + /** + * The default scopes of the service. + */ + public static final ImmutableList DEFAULT_SERVICE_SCOPES = + ImmutableList.builder() + .add("https://www.googleapis.com/auth/logging.write") + .add("https://www.googleapis.com/auth/logging.admin") + .add("https://www.googleapis.com/auth/logging.read") + .add("https://www.googleapis.com/auth/cloud-platform.read-only") + .add("https://www.googleapis.com/auth/cloud-platform") + .build(); + + private static final ImmutableMap> RETRYABLE_CODE_DEFINITIONS; + + static { + ImmutableMap.Builder> definitions = ImmutableMap.builder(); + definitions.put( + "idempotent", + Sets.immutableEnumSet( + Lists.newArrayList( + Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); + definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.newArrayList())); + RETRYABLE_CODE_DEFINITIONS = definitions.build(); + } + + private static final ImmutableMap RETRY_PARAM_DEFINITIONS; + + static { + ImmutableMap.Builder definitions = ImmutableMap.builder(); + RetryParams params = null; + params = + RetryParams.newBuilder() + .setRetryBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(100L) + .setDelayMultiplier(1.2) + .setMaxDelayMillis(1000L) + .build()) + .setTimeoutBackoff( + BackoffParams.newBuilder() + .setInitialDelayMillis(300L) + .setDelayMultiplier(1.3) + .setMaxDelayMillis(3000L) + .build()) + .setTotalTimeout(30000L) + .build(); + definitions.put("default", params); + RETRY_PARAM_DEFINITIONS = definitions.build(); + } + + private static class MethodBuilders { + private final PageStreamingApiCallableBuilder< + ListLogMetricsRequest, ListLogMetricsResponse, LogMetric> + listLogMetricsMethod; + private final ApiCallableBuilder getLogMetricMethod; + private final ApiCallableBuilder createLogMetricMethod; + private final ApiCallableBuilder updateLogMetricMethod; + private final ApiCallableBuilder deleteLogMetricMethod; + private final ImmutableList allMethods; + + public MethodBuilders() { + listLogMetricsMethod = + new PageStreamingApiCallableBuilder<>( + MetricsServiceV2Grpc.METHOD_LIST_LOG_METRICS, LIST_LOG_METRICS_PAGE_STR_DESC); + listLogMetricsMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + listLogMetricsMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + getLogMetricMethod = new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_GET_LOG_METRIC); + getLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + getLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + createLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_CREATE_LOG_METRIC); + createLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + createLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + updateLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_UPDATE_LOG_METRIC); + updateLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")); + updateLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + deleteLogMetricMethod = + new ApiCallableBuilder<>(MetricsServiceV2Grpc.METHOD_DELETE_LOG_METRIC); + deleteLogMetricMethod.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")); + deleteLogMetricMethod.setRetryParams(RETRY_PARAM_DEFINITIONS.get("default")); + + allMethods = + ImmutableList.builder() + .add( + listLogMetricsMethod, + getLogMetricMethod, + createLogMetricMethod, + updateLogMetricMethod, + deleteLogMetricMethod) + .build(); + } + } + + private final MethodBuilders methods; + + // =============== + // Factory Methods + // =============== + + /** + * Constructs an instance of MetricsServiceV2Settings with default settings. + * + * + * + */ + public static MetricsServiceV2Settings create() { + MetricsServiceV2Settings settings = new MetricsServiceV2Settings(new MethodBuilders()); + settings.provideChannelWith( + ConnectionSettings.builder() + .setServiceAddress(DEFAULT_SERVICE_ADDRESS) + .setPort(DEFAULT_SERVICE_PORT) + .provideCredentialsWith(DEFAULT_SERVICE_SCOPES) + .build()); + return settings; + } + + /** + * Constructs an instance of MetricsServiceV2Settings with default settings. This is protected so that it + * easy to make a subclass, but otherwise, the static factory methods should be preferred. + * + * + * + */ + protected MetricsServiceV2Settings(MethodBuilders methods) { + super(methods.allMethods); + this.methods = methods; + } + + /** + * Returns the PageStreamingApiCallableBuilder for the API method listLogMetrics. + * + * + * + */ + public PageStreamingApiCallableBuilder + listLogMetricsMethod() { + return methods.listLogMetricsMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method getLogMetric. + * + * + * + */ + public ApiCallableBuilder getLogMetricMethod() { + return methods.getLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method createLogMetric. + * + * + * + */ + public ApiCallableBuilder createLogMetricMethod() { + return methods.createLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method updateLogMetric. + * + * + * + */ + public ApiCallableBuilder updateLogMetricMethod() { + return methods.updateLogMetricMethod; + } + + /** + * Returns the ApiCallableBuilder for the API method deleteLogMetric. + * + * + * + */ + public ApiCallableBuilder deleteLogMetricMethod() { + return methods.deleteLogMetricMethod; + } + + private static PageDescriptor + LIST_LOG_METRICS_PAGE_STR_DESC = + new PageDescriptor() { + @Override + public Object emptyToken() { + return ""; + } + + @Override + public ListLogMetricsRequest injectToken(ListLogMetricsRequest payload, Object token) { + return ListLogMetricsRequest.newBuilder(payload).setPageToken((String) token).build(); + } + + @Override + public Object extractNextToken(ListLogMetricsResponse payload) { + return payload.getNextPageToken(); + } + + @Override + public Iterable extractResources(ListLogMetricsResponse payload) { + return payload.getMetricsList(); + } + }; +} diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/testing/LocalLoggingHelper.java b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/testing/LocalLoggingHelper.java similarity index 97% rename from gcloud-java-logging/src/main/java/com/google/gcloud/logging/testing/LocalLoggingHelper.java rename to gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/testing/LocalLoggingHelper.java index 4653112fed52..af32010f4a04 100644 --- a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/testing/LocalLoggingHelper.java +++ b/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/testing/LocalLoggingHelper.java @@ -1,4 +1,4 @@ -package com.google.gcloud.logging.testing; +package com.google.gcloud.logging.spi.v2.testing; import com.google.gcloud.pubsub.spi.v2.testing.LocalLoggingImpl; import com.google.logging.v2.LoggingServiceV2Grpc; diff --git a/gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/testing/LocalLoggingImpl.java b/gcloud-java-logging/src/main/java/com/google/gcloud/pubsub/spi/v2/testing/LocalLoggingImpl.java similarity index 100% rename from gcloud-java-logging/src/main/java/com/google/gcloud/logging/spi/v2/testing/LocalLoggingImpl.java rename to gcloud-java-logging/src/main/java/com/google/gcloud/pubsub/spi/v2/testing/LocalLoggingImpl.java diff --git a/gcloud-java-logging/src/test/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2ApiTest.java b/gcloud-java-logging/src/test/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2ApiTest.java index fb55edc053b4..e290fe79dadc 100644 --- a/gcloud-java-logging/src/test/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2ApiTest.java +++ b/gcloud-java-logging/src/test/java/com/google/gcloud/logging/spi/v2/LoggingServiceV2ApiTest.java @@ -16,16 +16,9 @@ import com.google.api.MonitoredResource; import com.google.common.collect.Iterables; -import com.google.gcloud.logging.testing.LocalLoggingHelper; +import com.google.gcloud.logging.spi.v2.testing.LocalLoggingHelper; import com.google.logging.v2.LogEntry; -import io.gapi.gax.grpc.ServiceApiSettings; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; @@ -33,6 +26,11 @@ import org.junit.BeforeClass; import org.junit.Test; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + public class LoggingServiceV2ApiTest { private static LocalLoggingHelper loggingHelper; private LoggingServiceV2Api loggingApi; @@ -51,19 +49,19 @@ public static void stopServer() { @Before public void setUp() throws IOException { loggingHelper.reset(); - ServiceApiSettings settings = new ServiceApiSettings(); - settings.setChannel(loggingHelper.createChannel()); + LoggingServiceV2Settings settings = LoggingServiceV2Settings.create(); + settings.provideChannelWith(loggingHelper.createChannel()); loggingApi = LoggingServiceV2Api.create(settings); } @After - public void tearDown() { + public void tearDown() throws Exception { loggingApi.close(); } @Test public void testWriteLog() { - String logName = LoggingServiceV2Api.createLogNamePath("my-project", "my-log"); + String logName = LoggingServiceV2Api.ResourceNames.formatLogPath("my-project", "my-log"); MonitoredResource resource = MonitoredResource.newBuilder().build(); List entries = new ArrayList<>(); entries.add(LogEntry.newBuilder().setLogName(logName).setTextPayload("foobar").build()); @@ -72,7 +70,7 @@ public void testWriteLog() { @Test public void testListLog() { - String logName = LoggingServiceV2Api.createLogNamePath("my-project", "my-log"); + String logName = LoggingServiceV2Api.ResourceNames.formatLogPath("my-project", "my-log"); MonitoredResource resource = MonitoredResource.newBuilder().build(); List entries = new ArrayList<>(); entries.add(LogEntry.newBuilder().setLogName(logName).setTextPayload("foobar").build()); @@ -92,7 +90,7 @@ public void testListNoLog() { @Test public void testDeleteLog() { - String logName = LoggingServiceV2Api.createLogNamePath("my-project", "my-log"); + String logName = LoggingServiceV2Api.ResourceNames.formatLogPath("my-project", "my-log"); MonitoredResource resource = MonitoredResource.newBuilder().build(); List entries = new ArrayList<>(); entries.add(LogEntry.newBuilder().setLogName(logName).setTextPayload("foobar").build()); From 74a74ec86df4e829cdd62c012d2ed65bde4eca2f Mon Sep 17 00:00:00 2001 From: Shin Fan Date: Fri, 18 Mar 2016 15:09:45 -0700 Subject: [PATCH 2/2] Merge from branch pubsub-alpha --- .travis.yml | 3 - CONTRIBUTING.md | 18 + README.md | 257 +++- RELEASING.md | 18 +- TESTING.md | 72 +- codacy-conf.json | 1 + gcloud-java-bigquery/README.md | 258 ++++ gcloud-java-bigquery/pom.xml | 55 + .../java/com/google/gcloud/bigquery/Acl.java | 446 +++++++ .../com/google/gcloud/bigquery/BigQuery.java | 668 ++++++++++ .../google/gcloud/bigquery/BigQueryError.java | 125 ++ .../gcloud/bigquery/BigQueryException.java | 87 ++ .../gcloud/bigquery/BigQueryFactory.java | 19 +- .../google/gcloud/bigquery/BigQueryImpl.java | 631 +++++++++ .../gcloud/bigquery/BigQueryOptions.java | 114 ++ .../gcloud/bigquery/CopyJobConfiguration.java | 258 ++++ .../google/gcloud/bigquery/CsvOptions.java | 271 ++++ .../com/google/gcloud/bigquery/Dataset.java | 253 ++++ .../com/google/gcloud/bigquery/DatasetId.java | 97 ++ .../google/gcloud/bigquery/DatasetInfo.java | 473 +++++++ .../bigquery/ExternalTableDefinition.java | 408 ++++++ .../bigquery/ExtractJobConfiguration.java | 294 +++++ .../com/google/gcloud/bigquery/Field.java | 375 ++++++ .../google/gcloud/bigquery/FieldValue.java | 266 ++++ .../google/gcloud/bigquery/FormatOptions.java | 95 ++ .../gcloud/bigquery/InsertAllRequest.java | 456 +++++++ .../gcloud/bigquery/InsertAllResponse.java | 121 ++ .../java/com/google/gcloud/bigquery/Job.java | 203 +++ .../gcloud/bigquery/JobConfiguration.java | 145 +++ .../com/google/gcloud/bigquery/JobId.java | 91 ++ .../com/google/gcloud/bigquery/JobInfo.java | 386 ++++++ .../google/gcloud/bigquery/JobStatistics.java | 516 ++++++++ .../com/google/gcloud/bigquery/JobStatus.java | 130 ++ .../gcloud/bigquery/LoadConfiguration.java | 165 +++ .../gcloud/bigquery/LoadJobConfiguration.java | 390 ++++++ .../com/google/gcloud/bigquery/Option.java | 72 ++ .../bigquery/QueryJobConfiguration.java | 537 ++++++++ .../google/gcloud/bigquery/QueryRequest.java | 318 +++++ .../google/gcloud/bigquery/QueryResponse.java | 196 +++ .../google/gcloud/bigquery/QueryResult.java | 176 +++ .../google/gcloud/bigquery/QueryStage.java | 444 +++++++ .../com/google/gcloud/bigquery/Schema.java | 159 +++ .../bigquery/StandardTableDefinition.java | 282 ++++ .../com/google/gcloud/bigquery/Table.java | 344 +++++ .../bigquery/TableDataWriteChannel.java | 95 ++ .../gcloud/bigquery/TableDefinition.java | 182 +++ .../com/google/gcloud/bigquery/TableId.java | 122 ++ .../com/google/gcloud/bigquery/TableInfo.java | 390 ++++++ .../gcloud/bigquery/UserDefinedFunction.java | 151 +++ .../gcloud/bigquery/ViewDefinition.java | 233 ++++ .../bigquery/WriteChannelConfiguration.java | 321 +++++ .../google/gcloud/bigquery/package-info.java | 47 + .../gcloud/bigquery/spi/BigQueryRpc.java | 254 ++++ .../bigquery/spi/BigQueryRpcFactory.java | 27 + .../bigquery/spi/DefaultBigQueryRpc.java | 468 +++++++ .../testing/RemoteBigQueryHelper.java | 140 ++ .../gcloud/bigquery/testing/package-info.java | 38 + .../com/google/gcloud/bigquery/AclTest.java | 95 ++ .../gcloud/bigquery/BigQueryErrorTest.java | 45 + .../bigquery/BigQueryExceptionTest.java | 115 ++ .../gcloud/bigquery/BigQueryImplTest.java | 1080 ++++++++++++++++ .../bigquery/CopyJobConfigurationTest.java | 130 ++ .../gcloud/bigquery/CsvOptionsTest.java | 87 ++ .../google/gcloud/bigquery/DatasetIdTest.java | 59 + .../gcloud/bigquery/DatasetInfoTest.java | 136 ++ .../google/gcloud/bigquery/DatasetTest.java | 373 ++++++ .../bigquery/ExternalTableDefinitionTest.java | 109 ++ .../bigquery/ExtractJobConfigurationTest.java | 139 ++ .../com/google/gcloud/bigquery/FieldTest.java | 106 ++ .../gcloud/bigquery/FieldValueTest.java | 111 ++ .../gcloud/bigquery/FormatOptionsTest.java | 52 + .../gcloud/bigquery/InsertAllRequestTest.java | 223 ++++ .../bigquery/InsertAllResponseTest.java | 77 ++ .../com/google/gcloud/bigquery/JobIdTest.java | 56 + .../google/gcloud/bigquery/JobInfoTest.java | 370 ++++++ .../gcloud/bigquery/JobStatisticsTest.java | 204 +++ .../google/gcloud/bigquery/JobStatusTest.java | 67 + .../com/google/gcloud/bigquery/JobTest.java | 258 ++++ .../bigquery/LoadJobConfigurationTest.java | 140 ++ .../google/gcloud/bigquery/OptionTest.java | 38 + .../bigquery/QueryJobConfigurationTest.java | 169 +++ .../gcloud/bigquery/QueryRequestTest.java | 108 ++ .../gcloud/bigquery/QueryResponseTest.java | 107 ++ .../gcloud/bigquery/QueryResultTest.java | 91 ++ .../gcloud/bigquery/QueryStageTest.java | 131 ++ .../bigquery/RemoteBigQueryHelperTest.java | 92 ++ .../google/gcloud/bigquery/SchemaTest.java | 77 ++ .../gcloud/bigquery/SerializationTest.java | 304 +++++ .../bigquery/TableDataWriteChannelTest.java | 249 ++++ .../gcloud/bigquery/TableDefinitionTest.java | 103 ++ .../google/gcloud/bigquery/TableIdTest.java | 61 + .../google/gcloud/bigquery/TableInfoTest.java | 207 +++ .../com/google/gcloud/bigquery/TableTest.java | 407 ++++++ .../bigquery/UserDefinedFunctionTest.java | 56 + .../gcloud/bigquery/ViewDefinitionTest.java | 75 ++ .../WriteChannelConfigurationTest.java | 125 ++ .../gcloud/bigquery/it/ITBigQueryTest.java | 955 ++++++++++++++ gcloud-java-contrib/README.md | 64 + gcloud-java-contrib/pom.xml | 36 + gcloud-java-core/README.md | 24 +- gcloud-java-core/pom.xml | 29 +- .../com/google/gcloud/AuthCredentials.java | 245 ++-- .../java/com/google/gcloud/BaseService.java | 30 + .../google/gcloud/BaseServiceException.java | 230 ++++ .../com/google/gcloud/BaseWriteChannel.java | 294 +++++ .../com/google/gcloud/ExceptionHandler.java | 10 +- .../java/com/google/gcloud/IamPolicy.java | 256 ++++ .../main/java/com/google/gcloud/Identity.java | 225 ++++ .../src/main/java/com/google/gcloud/Page.java | 69 + .../main/java/com/google/gcloud/PageImpl.java | 144 +++ .../java/com/google/gcloud/ReadChannel.java | 57 + .../java/com/google/gcloud/Restorable.java | 6 +- .../com/google/gcloud/RestorableState.java | 2 + .../java/com/google/gcloud/RetryHelper.java | 16 +- .../java/com/google/gcloud/RetryParams.java | 30 +- .../main/java/com/google/gcloud/Service.java | 5 + .../com/google/gcloud/ServiceFactory.java | 6 +- .../com/google/gcloud/ServiceOptions.java | 193 +-- .../java/com/google/gcloud/WriteChannel.java | 48 + .../java/com/google/gcloud/package-info.java | 20 + .../google/gcloud/spi/ServiceRpcFactory.java | 1 + .../gcloud/BaseServiceExceptionTest.java | 155 +++ .../google/gcloud/BaseWriteChannelTest.java | 144 +++ .../google/gcloud/ExceptionHandlerTest.java | 2 +- .../java/com/google/gcloud/IamPolicyTest.java | 180 +++ .../java/com/google/gcloud/IdentityTest.java | 105 ++ .../java/com/google/gcloud/PageImplTest.java | 61 + .../com/google/gcloud/RetryHelperTest.java | 4 +- .../com/google/gcloud/RetryParamsTest.java | 34 +- .../com/google/gcloud/ServiceOptionsTest.java | 241 ++++ gcloud-java-datastore/README.md | 123 +- gcloud-java-datastore/pom.xml | 13 +- .../datastore/BaseDatastoreBatchWriter.java | 2 +- .../google/gcloud/datastore/BaseEntity.java | 280 +++- .../com/google/gcloud/datastore/BaseKey.java | 13 +- .../com/google/gcloud/datastore/Batch.java | 16 +- .../com/google/gcloud/datastore/Blob.java | 6 +- .../com/google/gcloud/datastore/Cursor.java | 27 +- .../gcloud/datastore/DatastoreException.java | 149 +-- .../gcloud/datastore/DatastoreImpl.java | 46 +- .../gcloud/datastore/DatastoreOptions.java | 39 +- .../gcloud/datastore/DatastoreReader.java | 6 +- .../com/google/gcloud/datastore/DateTime.java | 4 +- .../google/gcloud/datastore/EntityQuery.java | 67 + .../google/gcloud/datastore/FullEntity.java | 3 +- .../com/google/gcloud/datastore/GqlQuery.java | 46 +- .../gcloud/datastore/IncompleteKey.java | 25 +- .../java/com/google/gcloud/datastore/Key.java | 2 +- .../google/gcloud/datastore/KeyFactory.java | 2 +- .../com/google/gcloud/datastore/KeyQuery.java | 68 + .../google/gcloud/datastore/ListValue.java | 7 +- .../google/gcloud/datastore/PathElement.java | 4 +- .../datastore/ProjectionEntityQuery.java | 112 ++ .../com/google/gcloud/datastore/Query.java | 41 +- .../google/gcloud/datastore/QueryResults.java | 9 +- .../gcloud/datastore/QueryResultsImpl.java | 6 +- .../google/gcloud/datastore/Serializable.java | 6 +- .../gcloud/datastore/StructuredQuery.java | 266 ++-- .../google/gcloud/datastore/Transaction.java | 28 +- .../com/google/gcloud/datastore/Value.java | 4 +- .../google/gcloud/datastore/ValueBuilder.java | 4 + .../google/gcloud/datastore/package-info.java | 61 +- .../gcloud/datastore/spi/DatastoreRpc.java | 80 ++ .../spi/DatastoreRpcFactory.java | 3 +- .../spi/DefaultDatastoreRpc.java | 71 +- .../datastore/testing/LocalGcdHelper.java | 31 +- .../com/google/gcloud/spi/DatastoreRpc.java | 118 -- .../gcloud/datastore/BaseEntityTest.java | 25 +- .../google/gcloud/datastore/BaseKeyTest.java | 5 + .../datastore/DatastoreExceptionTest.java | 82 +- .../datastore/DatastoreOptionsTest.java | 8 +- .../gcloud/datastore/DatastoreTest.java | 149 ++- .../gcloud/datastore/IncompleteKeyTest.java | 28 +- .../gcloud/datastore/LocalGcdHelperTest.java | 4 +- .../gcloud/datastore/SerializationTest.java | 4 +- .../gcloud/datastore/StructuredQueryTest.java | 172 +++ .../google/gcloud/datastore/ValueTest.java | 3 + gcloud-java-examples/README.md | 90 +- gcloud-java-examples/pom.xml | 3 +- .../examples/bigquery/BigQueryExample.java | 792 ++++++++++++ .../snippets/CreateTableAndLoadData.java | 64 + .../snippets/InsertDataAndQueryTable.java | 102 ++ .../{ => datastore}/DatastoreExample.java | 55 +- .../snippets/AddEntitiesAndRunQuery.java | 84 ++ .../datastore/snippets/CreateEntity.java | 48 + .../datastore/snippets/UpdateEntity.java | 50 + .../ResourceManagerExample.java | 224 ++++ .../snippets/GetOrCreateProject.java | 49 + .../snippets/UpdateAndListProjects.java | 62 + .../{ => storage}/StorageExample.java | 99 +- .../CreateAndListBucketsAndBlobs.java | 70 + .../examples/storage/snippets/CreateBlob.java | 44 + .../examples/storage/snippets/UpdateBlob.java | 53 + gcloud-java-resourcemanager/README.md | 218 ++++ gcloud-java-resourcemanager/pom.xml | 49 + .../google/gcloud/resourcemanager/Option.java | 72 ++ .../google/gcloud/resourcemanager/Policy.java | 226 ++++ .../gcloud/resourcemanager/Project.java | 227 ++++ .../gcloud/resourcemanager/ProjectInfo.java | 394 ++++++ .../resourcemanager/ResourceManager.java | 387 ++++++ .../ResourceManagerException.java | 75 ++ .../ResourceManagerFactory.java | 25 + .../resourcemanager/ResourceManagerImpl.java | 252 ++++ .../ResourceManagerOptions.java | 123 ++ .../gcloud/resourcemanager/package-info.java | 61 + .../spi/DefaultResourceManagerRpc.java | 167 +++ .../spi/ResourceManagerRpc.java | 149 +++ .../spi/ResourceManagerRpcFactory.java | 28 + .../testing/LocalResourceManagerHelper.java | 725 +++++++++++ .../resourcemanager/testing/package-info.java | 32 + .../LocalResourceManagerHelperTest.java | 727 +++++++++++ .../gcloud/resourcemanager/PolicyTest.java | 79 ++ .../resourcemanager/ProjectInfoTest.java | 109 ++ .../gcloud/resourcemanager/ProjectTest.java | 223 ++++ .../ResourceManagerExceptionTest.java | 94 ++ .../ResourceManagerImplTest.java | 446 +++++++ .../resourcemanager/SerializationTest.java | 99 ++ gcloud-java-storage/README.md | 126 +- gcloud-java-storage/pom.xml | 19 +- .../java/com/google/gcloud/storage/Acl.java | 102 +- .../google/gcloud/storage/BaseListResult.java | 77 -- .../google/gcloud/storage/BatchRequest.java | 11 +- .../google/gcloud/storage/BatchResponse.java | 12 +- .../java/com/google/gcloud/storage/Blob.java | 487 ++++--- .../com/google/gcloud/storage/BlobId.java | 51 +- .../com/google/gcloud/storage/BlobInfo.java | 375 +++++- .../google/gcloud/storage/BlobListResult.java | 77 -- .../gcloud/storage/BlobReadChannel.java | 286 +++- .../gcloud/storage/BlobReadChannelImpl.java | 259 ---- .../gcloud/storage/BlobWriteChannel.java | 93 +- .../gcloud/storage/BlobWriteChannelImpl.java | 274 ---- .../com/google/gcloud/storage/Bucket.java | 612 ++++++++- .../com/google/gcloud/storage/BucketInfo.java | 476 ++++--- .../com/google/gcloud/storage/CopyWriter.java | 21 +- .../java/com/google/gcloud/storage/Cors.java | 53 + .../com/google/gcloud/storage/Option.java | 4 +- .../com/google/gcloud/storage/Storage.java | 737 +++++++++-- .../gcloud/storage/StorageException.java | 55 +- .../google/gcloud/storage/StorageImpl.java | 356 +++-- .../google/gcloud/storage/StorageOptions.java | 43 +- .../google/gcloud/storage/package-info.java | 35 +- .../{ => storage}/spi/DefaultStorageRpc.java | 224 ++-- .../gcloud/{ => storage}/spi/StorageRpc.java | 176 ++- .../{ => storage}/spi/StorageRpcFactory.java | 3 +- .../storage/testing/RemoteGcsHelper.java | 122 +- .../gcloud/storage/testing/package-info.java | 3 +- .../com/google/gcloud/storage/AclTest.java | 4 +- .../gcloud/storage/BaseListResultTest.java | 47 - .../gcloud/storage/BatchRequestTest.java | 28 +- .../gcloud/storage/BatchResponseTest.java | 38 +- .../google/gcloud/storage/BlobInfoTest.java | 79 +- .../gcloud/storage/BlobListResultTest.java | 95 -- ...ImplTest.java => BlobReadChannelTest.java} | 100 +- .../com/google/gcloud/storage/BlobTest.java | 360 ++++-- ...mplTest.java => BlobWriteChannelTest.java} | 80 +- .../google/gcloud/storage/BucketInfoTest.java | 25 +- .../com/google/gcloud/storage/BucketTest.java | 420 +++++- .../google/gcloud/storage/CopyWriterTest.java | 19 +- .../google/gcloud/storage/ITStorageTest.java | 674 ---------- .../com/google/gcloud/storage/OptionTest.java | 2 +- .../gcloud/storage/RemoteGcsHelperTest.java | 144 ++- .../gcloud/storage/SerializationTest.java | 58 +- .../gcloud/storage/StorageExceptionTest.java | 125 ++ .../gcloud/storage/StorageImplTest.java | 607 ++++++--- .../gcloud/storage/it/ITStorageTest.java | 1152 +++++++++++++++++ gcloud-java/README.md | 24 +- gcloud-java/pom.xml | 13 +- pom.xml | 80 +- src/site/resources/index.html | 21 +- src/site/site.xml | 2 +- utilities/after_success.sh | 58 +- utilities/integration_test_env.sh | 4 +- utilities/update_docs_version.sh | 4 +- utilities/verify.sh | 5 +- 274 files changed, 37673 insertions(+), 4422 deletions(-) create mode 100644 codacy-conf.json create mode 100644 gcloud-java-bigquery/README.md create mode 100644 gcloud-java-bigquery/pom.xml create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java rename gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java => gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java (62%) create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableDefinition.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/StandardTableDefinition.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDefinition.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewDefinition.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java create mode 100644 gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalTableDefinitionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDefinitionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ViewDefinitionTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java create mode 100644 gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java create mode 100644 gcloud-java-contrib/README.md create mode 100644 gcloud-java-contrib/pom.xml create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/Identity.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/Page.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java create mode 100644 gcloud-java-core/src/main/java/com/google/gcloud/package-info.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java create mode 100644 gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java create mode 100644 gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java create mode 100644 gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java create mode 100644 gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java create mode 100644 gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java rename gcloud-java-datastore/src/main/java/com/google/gcloud/{ => datastore}/spi/DatastoreRpcFactory.java (90%) rename gcloud-java-datastore/src/main/java/com/google/gcloud/{ => datastore}/spi/DefaultDatastoreRpc.java (68%) delete mode 100644 gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java create mode 100644 gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/CreateTableAndLoadData.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java rename gcloud-java-examples/src/main/java/com/google/gcloud/examples/{ => datastore}/DatastoreExample.java (84%) create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/AddEntitiesAndRunQuery.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/CreateEntity.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/UpdateEntity.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/GetOrCreateProject.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java rename gcloud-java-examples/src/main/java/com/google/gcloud/examples/{ => storage}/StorageExample.java (86%) create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateAndListBucketsAndBlobs.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateBlob.java create mode 100644 gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/UpdateBlob.java create mode 100644 gcloud-java-resourcemanager/README.md create mode 100644 gcloud-java-resourcemanager/pom.xml create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java create mode 100644 gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java create mode 100644 gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java delete mode 100644 gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java rename gcloud-java-storage/src/main/java/com/google/gcloud/{ => storage}/spi/DefaultStorageRpc.java (72%) rename gcloud-java-storage/src/main/java/com/google/gcloud/{ => storage}/spi/StorageRpc.java (71%) rename gcloud-java-storage/src/main/java/com/google/gcloud/{ => storage}/spi/StorageRpcFactory.java (91%) delete mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java delete mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java rename gcloud-java-storage/src/test/java/com/google/gcloud/storage/{BlobReadChannelImplTest.java => BlobReadChannelTest.java} (62%) rename gcloud-java-storage/src/test/java/com/google/gcloud/storage/{BlobWriteChannelImplTest.java => BlobWriteChannelTest.java} (79%) delete mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java create mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java create mode 100644 gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java diff --git a/.travis.yml b/.travis.yml index ab421366db6a..c023116917a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,6 @@ before_install: install: mvn install -DskipTests=true -Dgpg.skip=true script: - utilities/verify.sh -branches: - only: - - master after_success: - utilities/after_success.sh env: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 167bf18e5082..3d93f2d032c7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,6 +15,24 @@ Using maven for build/test After you cloned the repository use Maven for building and running the tests. Maven 3.0+ is required. +When downloading the source, we recommend you obtain service account credentials. +These credentials will allow you to run integration tests using `mvn verify` in command line. +Follow step 2 of the [authentication instructions](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) to generate and use JSON service account credentials. + +It's also important to test that changes don't break compatibility with App/Compute Engine and when running elsewhere. +To run tests on different platforms, try deploying the apps available on the [gcloud-java-examples](https://github.com/GoogleCloudPlatform/gcloud-java-examples) repository. +End-to-end tests should ensure that gcloud-java works when running on the + +* App Engine production environment (see the docs for [uploading your app to production App Engine](https://cloud.google.com/appengine/docs/java/tools/maven#uploading_your_app_to_production_app_engine)) +* App Engine development server (see the docs for [testing your app with the development server](https://cloud.google.com/appengine/docs/java/tools/maven#testing_your_app_with_the_development_server)) +* Compute Engine (see the [Getting Started Guide](https://cloud.google.com/compute/docs/quickstart), and be sure to [enable the appropriate APIs](https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication#on-google-compute-engine)) +* Your desktop (using `mvn exec:java`, for example) + +When changes are made to authentication and project ID-related code, authentication and project ID inference should be tested using all relevant methods detailed in the [authentication docs](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and [project ID docs](https://github.com/GoogleCloudPlatform/gcloud-java#specifying-a-project-id). + +Known issue: If you have installed the Google Cloud SDK, be sure to log in (using `gcloud auth login`) before running tests. Though the Datastore tests use a local Datastore emulator that doesn't require authentication, they will not run if you have the Google Cloud SDK installed but aren't authenticated. + +**Please, do not use your production projects for executing integration tests.** While we do our best to make our tests independent of your project's state and content, they do perform create, modify and deletes, and you do not want to have your production data accidentally modified. Adding Features --------------- diff --git a/README.md b/README.md index 1b9867fd198f..68c624c37489 100644 --- a/README.md +++ b/README.md @@ -6,13 +6,17 @@ Java idiomatic client for [Google Cloud Platform][cloud-platform] services. [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) - [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) - [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs) This client supports the following Google Cloud Platform services: +- [Google Cloud BigQuery] (#google-cloud-bigquery-alpha) (Alpha) - [Google Cloud Datastore] (#google-cloud-datastore) +- [Google Cloud Resource Manager] (#google-cloud-resource-manager-alpha) (Alpha) - [Google Cloud Storage] (#google-cloud-storage) > Note: This client is a work-in-progress, and may occasionally @@ -20,32 +24,142 @@ This client supports the following Google Cloud Platform services: Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java - 0.0.10 + 0.1.5 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.5" +``` Example Applications -------------------- -- [`DatastoreExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java) - A simple command line interface for the Cloud Datastore - - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/DatastoreExample.html). -- [`StorageExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java) - A simple command line interface providing some of Cloud Storage's functionality - - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/StorageExample.html). +- [`BigQueryExample`](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java) - A simple command line interface providing some of Cloud BigQuery's functionality + - Read more about using this application on the [`BigQueryExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/bigquery/BigQueryExample.html). +- [`Bookshelf`](https://github.com/GoogleCloudPlatform/getting-started-java/tree/master/bookshelf) - An App Engine app that manages a virtual bookshelf. + - This app uses `gcloud-java` to interface with Cloud Datastore and Cloud Storage. It also uses Cloud SQL, another Google Cloud Platform service. +- [`DatastoreExample`](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/DatastoreExample.java) - A simple command line interface for the Cloud Datastore + - Read more about using this application on the [`DatastoreExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/datastore/DatastoreExample.html). +- [`ResourceManagerExample`](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java) - A simple command line interface providing some of Cloud Resource Manager's functionality + - Read more about using this application on the [`ResourceManagerExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/resourcemanager/ResourceManagerExample.html). +- [`SparkDemo`](https://github.com/GoogleCloudPlatform/java-docs-samples/blob/master/managed_vms/sparkjava) - An example of using gcloud-java-datastore from within the SparkJava and App Engine Managed VM frameworks. + - Read about how it works on the example's [README page](https://github.com/GoogleCloudPlatform/java-docs-samples/tree/master/managed_vms/sparkjava#how-does-it-work). +- [`StorageExample`](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java) - A simple command line interface providing some of Cloud Storage's functionality + - Read more about using this application on the [`StorageExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/storage/StorageExample.html). + +Specifying a Project ID +----------------------- + +Most `gcloud-java` libraries require a project ID. There are multiple ways to specify this project ID. + +1. When using `gcloud-java` libraries from within Compute/App Engine, there's no need to specify a project ID. It is automatically inferred from the production environment. +2. When using `gcloud-java` elsewhere, you can do one of the following: + * Supply the project ID when building the service options. For example, to use Datastore from a project with ID "PROJECT_ID", you can write: + + ```java + Datastore datastore = DatastoreOptions.builder().projectId("PROJECT_ID").build().service(); + ``` + * Specify the environment variable `GCLOUD_PROJECT` to be your desired project ID. + * Set the project ID using the [Google Cloud SDK](https://cloud.google.com/sdk/?hl=en). To use the SDK, [download the SDK](https://cloud.google.com/sdk/?hl=en) if you haven't already, and set the project ID from the command line. For example: + + ``` + gcloud config set project PROJECT_ID + ``` + +`gcloud-java` determines the project ID from the following sources in the listed order, stopping once it finds a value: + +1. Project ID supplied when building the service options +2. Project ID specified by the environment variable `GCLOUD_PROJECT` +3. App Engine project ID +4. Google Cloud SDK project ID +5. Compute Engine project ID Authentication -------------- -There are multiple ways to authenticate to use Google Cloud services. +First, ensure that the necessary Google Cloud APIs are enabled for your project. To do this, follow the instructions on the [authentication document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/authentication/readme.md#authentication) shared by all the gcloud language libraries. + +Next, choose a method for authenticating API requests from within your project: 1. When using `gcloud-java` libraries from within Compute/App Engine, no additional authentication steps are necessary. 2. When using `gcloud-java` libraries elsewhere, there are two options: - * [Generate a JSON service account key](https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts). Supply a path to the downloaded JSON credentials file when building the options supplied to datastore/storage constructor. - * If running locally for development/testing, you can use use [Google Cloud SDK](https://cloud.google.com/sdk/?hl=en). To use the SDK authentication, [download the SDK](https://cloud.google.com/sdk/?hl=en) if you haven't already. Then login using the SDK (`gcloud auth login` in command line), and set your current project using `gcloud config set project PROJECT_ID`. + * [Generate a JSON service account key](https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts). After downloading that key, you must do one of the following: + * Define the environment variable GOOGLE_APPLICATION_CREDENTIALS to be the location of the key. For example: + ```bash + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/my/key.json + ``` + * Supply the JSON credentials file when building the service options. For example, this Storage object has the necessary permissions to interact with your Google Cloud Storage data: + ```java + Storage storage = StorageOptions.builder() + .authCredentials(AuthCredentials.createForJson(new FileInputStream("/path/to/my/key.json")) + .build() + .service(); + ``` + * If running locally for development/testing, you can use use Google Cloud SDK. Download the SDK if you haven't already, then login using the SDK (`gcloud auth login` in command line). Be sure to set your project ID as described above. + +`gcloud-java` looks for credentials in the following order, stopping once it finds credentials: + +1. Credentials supplied when building the service options +2. App Engine credentials +3. Key file pointed to by the GOOGLE_APPLICATION_CREDENTIALS environment variable +4. Google Cloud SDK credentials +5. Compute Engine credentials + +Google Cloud BigQuery (Alpha) +---------------------- + +- [API Documentation][bigquery-api] +- [Official Documentation][cloud-bigquery-docs] + +#### Preview + +Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you +must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. +Complete source code can be found at +[CreateTableAndLoadData.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/CreateTableAndLoadData.java). + +```java +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FormatOptions; +import com.google.gcloud.bigquery.Job; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +TableId tableId = TableId.of("dataset", "table"); +Table table = bigquery.getTable(tableId); +if (table == null) { + System.out.println("Creating table " + tableId); + Field integerField = Field.of("fieldName", Field.Type.integer()); + Schema schema = Schema.of(integerField); + table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema))); +} +System.out.println("Loading data into table " + tableId); +Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path"); +while (!loadJob.isDone()) { + Thread.sleep(1000L); +} +if (loadJob.status().error() != null) { + System.out.println("Job completed with errors"); +} else { + System.out.println("Job succeeded"); +} +``` Google Cloud Datastore ---------------------- @@ -57,7 +171,10 @@ Google Cloud Datastore #### Preview -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. +Here are two code snippets showing simple usage examples from within Compute/App Engine. Note that you must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. + +The first snippet shows how to create a Datastore entity. Complete source code can be found at +[CreateEntity.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/CreateEntity.java). ```java import com.google.gcloud.datastore.Datastore; @@ -67,18 +184,32 @@ import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; -Datastore datastore = DatastoreOptions.getDefaultInstance().service(); -KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); -Key key = keyFactory.newKey(keyName); +Datastore datastore = DatastoreOptions.defaultInstance().service(); +KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind"); +Key key = keyFactory.newKey("keyName"); +Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 30) + .set("access_time", DateTime.now()) + .build(); +datastore.put(entity); +``` +The second snippet shows how to update a Datastore entity if it exists. Complete source code can be +found at +[UpdateEntity.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/UpdateEntity.java). +```java +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.DateTime; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; + +Datastore datastore = DatastoreOptions.defaultInstance().service(); +KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind"); +Key key = keyFactory.newKey("keyName"); Entity entity = datastore.get(key); -if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Do") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); -} else { +if (entity != null) { System.out.println("Updating access_time for " + entity.getString("name")); entity = Entity.builder(entity) .set("access_time", DateTime.now()) @@ -87,6 +218,41 @@ if (entity == null) { } ``` +Google Cloud Resource Manager (Alpha) +---------------------- + +- [API Documentation][resourcemanager-api] +- [Official Documentation][cloud-resourcemanager-docs] + +#### Preview + +Here is a code snippet showing a simple usage example. Note that you must supply Google SDK credentials for this service, not other forms of authentication listed in the [Authentication section](#authentication). +Complete source code can be found at +[UpdateAndListProjects.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java). +```java +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Iterator; + +ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); +Project project = resourceManager.get("some-project-id"); // Use an existing project's ID +if (project != null) { + Project newProject = project.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); + System.out.println("Updated the labels of project " + newProject.projectId() + + " to be " + newProject.labels()); +} +Iterator projectIterator = resourceManager.list().iterateAll(); +System.out.println("Projects I can view:"); +while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); +} +``` + Google Cloud Storage ---------------------- @@ -97,11 +263,31 @@ Google Cloud Storage #### Preview -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. +Here are two code snippets showing simple usage examples from within Compute/App Engine. Note that you must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. + +The first snippet shows how to create a Storage blob. Complete source code can be found at +[CreateBlob.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateBlob.java). ```java import static java.nio.charset.StandardCharsets.UTF_8; +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +Storage storage = StorageOptions.defaultInstance().service(); +BlobId blobId = BlobId.of("bucket", "blob_name"); +BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); +Blob blob = storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); +``` +The second snippet shows how to update a Storage blob if it exists. Complete source code can be +found at +[UpdateBlob.java](./gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/UpdateBlob.java). +```java +import static java.nio.charset.StandardCharsets.UTF_8; + import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.Storage; @@ -110,15 +296,10 @@ import com.google.gcloud.storage.StorageOptions; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; -StorageOptions options = StorageOptions.builder().projectId("project").build(); -Storage storage = options.service(); +Storage storage = StorageOptions.defaultInstance().service(); BlobId blobId = BlobId.of("bucket", "blob_name"); -Blob blob = Blob.load(storage, blobId); -if (blob == null) { - BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); - storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); -} else { - System.out.println("Updating content for " + blobId.name()); +Blob blob = storage.get(blobId); +if (blob != null) { byte[] prevContent = blob.content(); System.out.println(new String(prevContent, UTF_8)); WritableByteChannel channel = blob.writer(); @@ -127,6 +308,11 @@ if (blob == null) { } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -153,7 +339,7 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. @@ -164,7 +350,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md -[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md [cloud-platform]: https://cloud.google.com/ @@ -181,3 +367,10 @@ Apache 2.0 - See [LICENSE] for more information. [cloud-storage-create-bucket]: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets [cloud-storage-activation]: https://cloud.google.com/storage/docs/signup [storage-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html + +[resourcemanager-api]:http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html +[cloud-resourcemanager-docs]:https://cloud.google.com/resource-manager/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-bigquery-docs]: https://cloud.google.com/bigquery/docs/overview +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html diff --git a/RELEASING.md b/RELEASING.md index 419f723fe328..5e2d6202062e 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -10,13 +10,23 @@ This script takes an optional argument denoting the new version. By default, if 2. Create a PR to update the pom.xml version. The PR should look something like [#225](https://github.com/GoogleCloudPlatform/gcloud-java/pull/225). After this PR is merged into GoogleCloudPlatform/gcloud-java, Travis CI will push a new website to GoogleCloudPlatform/gh-pages, push a new artifact to the Maven Central Repository, and update versions in the README files. -3. Create a release on Github manually. -Go to the [releases page](https://github.com/GoogleCloudPlatform/gcloud-java/releases) and click "Draft a new release." Use `vX.Y.Z` as the "Tag Version" and `X.Y.Z` as the "Release Title", where `X.Y.Z` is the release version as listed in the `pom.xml` files. +3. Before moving on, verify that the artifacts have successfully been pushed to the Maven Central Repository. Open Travis CI, click the ["Build History" tab](https://travis-ci.org/GoogleCloudPlatform/gcloud-java/builds), and open the second build's logs for Step 2's PR. Be sure that you are not opening the "Pull Request" build logs. When the build finishes, scroll to the end of the log and verify that the artifacts were successfully staged and deployed. You can also search for `gcloud-java` on the [Sonatype website](https://oss.sonatype.org/#nexus-search;quick~gcloud-java) and check the latest version number. If the deployment didn't succeed because of a flaky test, rerun the build. -4. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). +4. Publish a release on Github manually. +Go to the [releases page](https://github.com/GoogleCloudPlatform/gcloud-java/releases) and open the appropriate release draft. Make sure the "Tag Version" is `vX.Y.Z` and the "Release Title" is `X.Y.Z`, where `X.Y.Z` is the release version as listed in the `pom.xml` files. The draft should already have all changes that impact users since the previous release. To double check this, you can use the `git log` command and look through the merged master branch pull requests. Here is an example of the log command to get non-merge commits between v0.0.12 and v0.1.0: + + ``` + git --no-pager log v0.0.12..v0.1.0 --pretty=oneline --abbrev-commit --no-merges + ``` + + Ensure that the format is consistent with previous releases (for an example, see the [0.1.0 release](https://github.com/GoogleCloudPlatform/gcloud-java/releases/tag/v0.1.0)). After adding any missing updates and reformatting as necessary, publish the draft. Finally, create a new draft for the next release. + +5. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). As mentioned before, there is an optional version argument. By default, the script will update the version from "X.Y.Z" to "X.Y.Z+1-SNAPSHOT". Suppose a different version is desired, for example X+1.0.0-SNAPSHOT. Then the appropriate command to run would be `utilities/update_pom_version.sh X+1.0.0-SNAPSHOT`. -5. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). +6. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). + +7. Be sure to update App Engine documentation and [java-docs-samples](https://github.com/GoogleCloudPlatform/java-docs-samples) code as necessary. See directions [here](https://docs.google.com/a/google.com/document/d/1SS3xNn2v0qW7EadGUPBUAPIQAH5VY6WSFmT17ZjjUVE/edit?usp=sharing). ### To push a snapshot version diff --git a/TESTING.md b/TESTING.md index 02a3d14ab0bf..3ad181310b17 100644 --- a/TESTING.md +++ b/TESTING.md @@ -1,6 +1,11 @@ ## gcloud-java tools for testing -This library provides tools to help write tests for code that uses gcloud-java services. +This library provides tools to help write tests for code that uses the following gcloud-java services: + +- [Datastore] (#testing-code-that-uses-datastore) +- [Storage] (#testing-code-that-uses-storage) +- [Resource Manager] (#testing-code-that-uses-resource-manager) +- [BigQuery] (#testing-code-that-uses-bigquery) ### Testing code that uses Datastore @@ -51,7 +56,8 @@ Currently, there isn't an emulator for Google Cloud Storage, so an alternative i 3. Create a `RemoteGcsHelper` object using your project ID and JSON key. Here is an example that uses the `RemoteGcsHelper` to create a bucket. ```java - RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(PROJECT_ID, "/path/to/my/JSON/key.json"); + RemoteGcsHelper gcsHelper = + RemoteGcsHelper.create(PROJECT_ID, new FileInputStream("/path/to/my/JSON/key.json")); Storage storage = gcsHelper.options().service(); String bucket = RemoteGcsHelper.generateBucketName(); storage.create(BucketInfo.of(bucket)); @@ -65,5 +71,67 @@ Here is an example that clears the bucket created in Step 3 with a timeout of 5 RemoteGcsHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS); ``` +### Testing code that uses Resource Manager + +#### On your machine + +You can test against a temporary local Resource Manager by following these steps: + +1. Before running your testing code, start the Resource Manager emulator `LocalResourceManagerHelper`. This can be done as follows: + + ```java + import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; + + LocalResourceManagerHelper helper = LocalResourceManagerHelper.create(); + helper.start(); + ``` + + This will spawn a server thread that listens to `localhost` at an ephemeral port for Resource Manager requests. + +2. In your program, create and use a Resource Manager service object whose host is set to `localhost` at the appropriate port. For example: + + ```java + ResourceManager resourceManager = LocalResourceManagerHelper.options().service(); + ``` + +3. Run your tests. + +4. Stop the Resource Manager emulator. + + ```java + helper.stop(); + ``` + + This method will block until the server thread has been terminated. + +### Testing code that uses BigQuery + +Currently, there isn't an emulator for Google BigQuery, so an alternative is to create a test +project. `RemoteBigQueryHelper` contains convenience methods to make setting up and cleaning up the +test project easier. To use this class, follow the steps below: + +1. Create a test Google Cloud project. + +2. Download a [JSON service account credentials file][create-service-account] from the Google +Developer's Console. + +3. Create a `RemoteBigQueryHelper` object using your project ID and JSON key. +Here is an example that uses the `RemoteBigQueryHelper` to create a dataset. + ```java + RemoteBigQueryHelper bigqueryHelper = + RemoteBigQueryHelper.create(PROJECT_ID, new FileInputStream("/path/to/my/JSON/key.json")); + BigQuery bigquery = bigqueryHelper.options().service(); + String dataset = RemoteBigQueryHelper.generateDatasetName(); + bigquery.create(DatasetInfo.builder(dataset).build()); + ``` + +4. Run your tests. + +5. Clean up the test project by using `forceDelete` to clear any datasets used. +Here is an example that clears the dataset created in Step 3. + ```java + RemoteBigQueryHelper.forceDelete(bigquery, dataset); + ``` [cloud-platform-storage-authentication]:https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts +[create-service-account]:https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount \ No newline at end of file diff --git a/codacy-conf.json b/codacy-conf.json new file mode 100644 index 000000000000..e8c819684c9c --- /dev/null +++ b/codacy-conf.json @@ -0,0 +1 @@ +{"patterns":[{"patternId":"Custom_Javascript_Scopes","enabled":true},{"patternId":"Custom_Javascript_EvalWith","enabled":true},{"patternId":"Custom_Javascript_TryCatch","enabled":true},{"patternId":"Custom_Scala_NonFatal","enabled":true},{"patternId":"bitwise","enabled":true},{"patternId":"maxparams","enabled":true},{"patternId":"CSSLint_universal_selector","enabled":true},{"patternId":"CSSLint_unqualified_attributes","enabled":true},{"patternId":"CSSLint_zero_units","enabled":true},{"patternId":"CSSLint_overqualified_elements","enabled":true},{"patternId":"CSSLint_shorthand","enabled":true},{"patternId":"CSSLint_duplicate_background_images","enabled":true},{"patternId":"CSSLint_box_model","enabled":true},{"patternId":"CSSLint_compatible_vendor_prefixes","enabled":true},{"patternId":"CSSLint_display_property_grouping","enabled":true},{"patternId":"CSSLint_duplicate_properties","enabled":true},{"patternId":"CSSLint_empty_rules","enabled":true},{"patternId":"CSSLint_errors","enabled":true},{"patternId":"CSSLint_gradients","enabled":true},{"patternId":"CSSLint_important","enabled":true},{"patternId":"CSSLint_known_properties","enabled":true},{"patternId":"CSSLint_text_indent","enabled":true},{"patternId":"CSSLint_unique_headings","enabled":true},{"patternId":"PyLint_E0100","enabled":true},{"patternId":"PyLint_E0101","enabled":true},{"patternId":"PyLint_E0102","enabled":true},{"patternId":"PyLint_E0103","enabled":true},{"patternId":"PyLint_E0104","enabled":true},{"patternId":"PyLint_E0105","enabled":true},{"patternId":"PyLint_E0106","enabled":true},{"patternId":"PyLint_E0107","enabled":true},{"patternId":"PyLint_E0108","enabled":true},{"patternId":"PyLint_E0202","enabled":true},{"patternId":"PyLint_E0203","enabled":true},{"patternId":"PyLint_E0211","enabled":true},{"patternId":"PyLint_E0601","enabled":true},{"patternId":"PyLint_E0603","enabled":true},{"patternId":"PyLint_E0604","enabled":true},{"patternId":"PyLint_E0701","enabled":true},{"patternId":"PyLint_E0702","enabled":true},{"patternId":"PyLint_E0710","enabled":true},{"patternId":"PyLint_E0711","enabled":true},{"patternId":"PyLint_E0712","enabled":true},{"patternId":"PyLint_E1003","enabled":true},{"patternId":"PyLint_E1102","enabled":true},{"patternId":"PyLint_E1111","enabled":true},{"patternId":"PyLint_E1120","enabled":true},{"patternId":"PyLint_E1121","enabled":true},{"patternId":"PyLint_E1123","enabled":true},{"patternId":"PyLint_E1124","enabled":true},{"patternId":"PyLint_E1200","enabled":true},{"patternId":"PyLint_E1201","enabled":true},{"patternId":"PyLint_E1205","enabled":true},{"patternId":"PyLint_E1206","enabled":true},{"patternId":"PyLint_E1300","enabled":true},{"patternId":"PyLint_E1301","enabled":true},{"patternId":"PyLint_E1302","enabled":true},{"patternId":"PyLint_E1303","enabled":true},{"patternId":"PyLint_E1304","enabled":true},{"patternId":"PyLint_E1305","enabled":true},{"patternId":"PyLint_E1306","enabled":true},{"patternId":"rulesets-codesize.xml-CyclomaticComplexity","enabled":true},{"patternId":"rulesets-codesize.xml-NPathComplexity","enabled":true},{"patternId":"rulesets-codesize.xml-ExcessiveMethodLength","enabled":true},{"patternId":"rulesets-codesize.xml-ExcessiveClassLength","enabled":true},{"patternId":"rulesets-codesize.xml-ExcessiveParameterList","enabled":true},{"patternId":"rulesets-codesize.xml-ExcessivePublicCount","enabled":true},{"patternId":"rulesets-codesize.xml-TooManyFields","enabled":true},{"patternId":"rulesets-codesize.xml-TooManyMethods","enabled":true},{"patternId":"rulesets-codesize.xml-ExcessiveClassComplexity","enabled":true},{"patternId":"rulesets-controversial.xml-Superglobals","enabled":true},{"patternId":"rulesets-design.xml-ExitExpression","enabled":true},{"patternId":"rulesets-design.xml-EvalExpression","enabled":true},{"patternId":"rulesets-design.xml-GotoStatement","enabled":true},{"patternId":"rulesets-design.xml-NumberOfChildren","enabled":true},{"patternId":"rulesets-design.xml-DepthOfInheritance","enabled":true},{"patternId":"rulesets-unusedcode.xml-UnusedPrivateField","enabled":true},{"patternId":"rulesets-unusedcode.xml-UnusedLocalVariable","enabled":true},{"patternId":"rulesets-unusedcode.xml-UnusedPrivateMethod","enabled":true},{"patternId":"rulesets-unusedcode.xml-UnusedFormalParameter","enabled":true},{"patternId":"PyLint_C0303","enabled":true},{"patternId":"PyLint_C1001","enabled":true},{"patternId":"rulesets-naming.xml-ShortVariable","enabled":true},{"patternId":"rulesets-naming.xml-LongVariable","enabled":true},{"patternId":"rulesets-naming.xml-ShortMethodName","enabled":true},{"patternId":"rulesets-naming.xml-ConstantNamingConventions","enabled":true},{"patternId":"rulesets-naming.xml-BooleanGetMethodName","enabled":true},{"patternId":"PyLint_W0101","enabled":true},{"patternId":"PyLint_W0102","enabled":true},{"patternId":"PyLint_W0104","enabled":true},{"patternId":"PyLint_W0105","enabled":true},{"patternId":"Custom_Scala_GetCalls","enabled":true},{"patternId":"ScalaStyle_EqualsHashCodeChecker","enabled":true},{"patternId":"ScalaStyle_ParameterNumberChecker","enabled":true},{"patternId":"ScalaStyle_ReturnChecker","enabled":true},{"patternId":"ScalaStyle_NullChecker","enabled":true},{"patternId":"ScalaStyle_NoCloneChecker","enabled":true},{"patternId":"ScalaStyle_NoFinalizeChecker","enabled":true},{"patternId":"ScalaStyle_CovariantEqualsChecker","enabled":true},{"patternId":"ScalaStyle_StructuralTypeChecker","enabled":true},{"patternId":"ScalaStyle_MethodLengthChecker","enabled":true},{"patternId":"ScalaStyle_NumberOfMethodsInTypeChecker","enabled":true},{"patternId":"ScalaStyle_WhileChecker","enabled":true},{"patternId":"ScalaStyle_VarFieldChecker","enabled":true},{"patternId":"ScalaStyle_VarLocalChecker","enabled":true},{"patternId":"ScalaStyle_RedundantIfChecker","enabled":true},{"patternId":"ScalaStyle_DeprecatedJavaChecker","enabled":true},{"patternId":"ScalaStyle_EmptyClassChecker","enabled":true},{"patternId":"ScalaStyle_NotImplementedErrorUsage","enabled":true},{"patternId":"Custom_Scala_GroupImports","enabled":true},{"patternId":"Custom_Scala_ReservedKeywords","enabled":true},{"patternId":"Custom_Scala_ElseIf","enabled":true},{"patternId":"Custom_Scala_CallByNameAsLastArguments","enabled":true},{"patternId":"Custom_Scala_WildcardImportOnMany","enabled":true},{"patternId":"Custom_Scala_UtilTryForTryCatch","enabled":true},{"patternId":"Custom_Scala_ProhibitObjectName","enabled":true},{"patternId":"Custom_Scala_ImportsAtBeginningOfPackage","enabled":true},{"patternId":"Custom_Scala_NameResultsAndParameters","enabled":true},{"patternId":"Custom_Scala_IncompletePatternMatching","enabled":true},{"patternId":"Custom_Scala_UsefulTypeAlias","enabled":true},{"patternId":"Custom_Scala_JavaThreads","enabled":true},{"patternId":"Custom_Scala_DirectPromiseCreation","enabled":true},{"patternId":"Custom_Scala_StructuralTypes","enabled":true},{"patternId":"Custom_Scala_CollectionLastHead","enabled":true},{"patternId":"PyLint_W0106","enabled":true},{"patternId":"PyLint_W0107","enabled":true},{"patternId":"PyLint_W0108","enabled":true},{"patternId":"PyLint_W0109","enabled":true},{"patternId":"PyLint_W0110","enabled":true},{"patternId":"PyLint_W0120","enabled":true},{"patternId":"PyLint_W0122","enabled":true},{"patternId":"PyLint_W0150","enabled":true},{"patternId":"PyLint_W0199","enabled":true},{"patternId":"rulesets-cleancode.xml-ElseExpression","enabled":true},{"patternId":"rulesets-cleancode.xml-StaticAccess","enabled":true},{"patternId":"ScalaStyle_NonASCIICharacterChecker","enabled":true},{"patternId":"ScalaStyle_FieldNamesChecker","enabled":true},{"patternId":"Custom_Scala_WithNameCalls","enabled":true},{"patternId":"strictexception_AvoidRethrowingException","enabled":true},{"patternId":"strings_AppendCharacterWithChar","enabled":true},{"patternId":"braces_IfElseStmtsMustUseBraces","enabled":true},{"patternId":"basic_AvoidDecimalLiteralsInBigDecimalConstructor","enabled":true},{"patternId":"basic_CheckSkipResult","enabled":true},{"patternId":"javabeans_MissingSerialVersionUID","enabled":true},{"patternId":"migrating_ShortInstantiation","enabled":true},{"patternId":"design_AvoidInstanceofChecksInCatchClause","enabled":true},{"patternId":"naming_LongVariable","enabled":true},{"patternId":"migrating_ReplaceEnumerationWithIterator","enabled":true},{"patternId":"j2ee_DoNotCallSystemExit","enabled":true},{"patternId":"unusedcode_UnusedLocalVariable","enabled":true},{"patternId":"strings_InefficientStringBuffering","enabled":true},{"patternId":"basic_DontUseFloatTypeForLoopIndices","enabled":true},{"patternId":"basic_AvoidBranchingStatementAsLastInLoop","enabled":true},{"patternId":"migrating_JUnit4TestShouldUseTestAnnotation","enabled":true},{"patternId":"optimizations_AddEmptyString","enabled":true},{"patternId":"logging-jakarta-commons_ProperLogger","enabled":true},{"patternId":"optimizations_RedundantFieldInitializer","enabled":true},{"patternId":"logging-java_AvoidPrintStackTrace","enabled":true},{"patternId":"empty_EmptyFinallyBlock","enabled":true},{"patternId":"design_CompareObjectsWithEquals","enabled":true},{"patternId":"basic_ClassCastExceptionWithToArray","enabled":true},{"patternId":"strictexception_DoNotExtendJavaLangError","enabled":true},{"patternId":"junit_UnnecessaryBooleanAssertion","enabled":true},{"patternId":"design_SimplifyBooleanExpressions","enabled":true},{"patternId":"basic_ForLoopShouldBeWhileLoop","enabled":true},{"patternId":"basic_BigIntegerInstantiation","enabled":true},{"patternId":"optimizations_UseArrayListInsteadOfVector","enabled":true},{"patternId":"optimizations_UnnecessaryWrapperObjectCreation","enabled":true},{"patternId":"strings_StringBufferInstantiationWithChar","enabled":true},{"patternId":"basic_JumbledIncrementer","enabled":true},{"patternId":"design_SwitchStmtsShouldHaveDefault","enabled":true},{"patternId":"strictexception_AvoidThrowingRawExceptionTypes","enabled":true},{"patternId":"migrating_LongInstantiation","enabled":true},{"patternId":"design_SimplifyBooleanReturns","enabled":true},{"patternId":"empty_EmptyInitializer","enabled":true},{"patternId":"design_FieldDeclarationsShouldBeAtStartOfClass","enabled":true},{"patternId":"unnecessary_UnnecessaryConversionTemporary","enabled":true},{"patternId":"design_AvoidProtectedFieldInFinalClass","enabled":true},{"patternId":"junit_UseAssertTrueInsteadOfAssertEquals","enabled":true},{"patternId":"naming_PackageCase","enabled":true},{"patternId":"migrating_JUnitUseExpected","enabled":true},{"patternId":"controversial_UnnecessaryConstructor","enabled":true},{"patternId":"naming_MethodNamingConventions","enabled":true},{"patternId":"design_DefaultLabelNotLastInSwitchStmt","enabled":true},{"patternId":"basic_UnconditionalIfStatement","enabled":true},{"patternId":"design_SingularField","enabled":true},{"patternId":"design_AssignmentToNonFinalStatic","enabled":true},{"patternId":"braces_WhileLoopsMustUseBraces","enabled":true},{"patternId":"logging-java_SystemPrintln","enabled":true},{"patternId":"strings_UseStringBufferLength","enabled":true},{"patternId":"controversial_AvoidUsingNativeCode","enabled":true},{"patternId":"strictexception_AvoidLosingExceptionInformation","enabled":true},{"patternId":"imports_ImportFromSamePackage","enabled":true},{"patternId":"finalizers_AvoidCallingFinalize","enabled":true},{"patternId":"finalizers_FinalizeOverloaded","enabled":true},{"patternId":"naming_ClassNamingConventions","enabled":true},{"patternId":"logging-java_LoggerIsNotStaticFinal","enabled":true},{"patternId":"finalizers_FinalizeOnlyCallsSuperFinalize","enabled":true},{"patternId":"unnecessary_UselessOverridingMethod","enabled":true},{"patternId":"naming_SuspiciousConstantFieldName","enabled":true},{"patternId":"design_OptimizableToArrayCall","enabled":true},{"patternId":"imports_UnnecessaryFullyQualifiedName","enabled":true},{"patternId":"migrating_ReplaceHashtableWithMap","enabled":true},{"patternId":"unusedcode_UnusedPrivateField","enabled":true},{"patternId":"strings_UnnecessaryCaseChange","enabled":true},{"patternId":"migrating_IntegerInstantiation","enabled":true},{"patternId":"design_NonStaticInitializer","enabled":true},{"patternId":"design_MissingBreakInSwitch","enabled":true},{"patternId":"design_AvoidReassigningParameters","enabled":true},{"patternId":"basic_AvoidThreadGroup","enabled":true},{"patternId":"empty_EmptyCatchBlock","parameters":{"allowCommentedBlocks":"true"},"enabled":true},{"patternId":"codesize_ExcessiveParameterList","parameters":{"minimum":"8","violationSuppressRegex":"\"\"","violationSuppressXPath":"\"\""},"enabled":true},{"patternId":"naming_SuspiciousHashcodeMethodName","enabled":true},{"patternId":"migrating_JUnit4TestShouldUseBeforeAnnotation","enabled":true},{"patternId":"design_UncommentedEmptyMethodBody","enabled":true},{"patternId":"basic_BrokenNullCheck","enabled":true},{"patternId":"strings_ConsecutiveLiteralAppends","enabled":true},{"patternId":"strings_StringInstantiation","enabled":true},{"patternId":"design_EqualsNull","enabled":true},{"patternId":"basic_OverrideBothEqualsAndHashcode","enabled":true},{"patternId":"design_InstantiationToGetClass","enabled":true},{"patternId":"basic_BooleanInstantiation","enabled":true},{"patternId":"strings_AvoidStringBufferField","enabled":true},{"patternId":"basic_ReturnFromFinallyBlock","enabled":true},{"patternId":"empty_EmptyTryBlock","enabled":true},{"patternId":"naming_SuspiciousEqualsMethodName","enabled":true},{"patternId":"basic_ExtendsObject","enabled":true},{"patternId":"strings_UselessStringValueOf","enabled":true},{"patternId":"design_UnsynchronizedStaticDateFormatter","enabled":true},{"patternId":"design_UseCollectionIsEmpty","enabled":true},{"patternId":"controversial_AvoidFinalLocalVariable","enabled":true},{"patternId":"strictexception_AvoidThrowingNullPointerException","enabled":true},{"patternId":"design_AvoidProtectedMethodInFinalClassNotExtending","enabled":true},{"patternId":"optimizations_PrematureDeclaration","enabled":true},{"patternId":"empty_EmptySwitchStatements","enabled":true},{"patternId":"basic_MisplacedNullCheck","enabled":true},{"patternId":"optimizations_UseStringBufferForStringAppends","enabled":true},{"patternId":"strings_StringToString","enabled":true},{"patternId":"naming_MethodWithSameNameAsEnclosingClass","enabled":true},{"patternId":"migrating_ReplaceVectorWithList","enabled":true},{"patternId":"imports_UnusedImports","enabled":true},{"patternId":"unnecessary_UnnecessaryFinalModifier","enabled":true},{"patternId":"basic_AvoidMultipleUnaryOperators","enabled":true},{"patternId":"junit_SimplifyBooleanAssertion","enabled":true},{"patternId":"unnecessary_UselessParentheses","enabled":true},{"patternId":"design_IdempotentOperations","enabled":true},{"patternId":"braces_IfStmtsMustUseBraces","enabled":true},{"patternId":"strings_UseIndexOfChar","enabled":true},{"patternId":"naming_NoPackage","enabled":true},{"patternId":"finalizers_FinalizeDoesNotCallSuperFinalize","enabled":true},{"patternId":"design_UseVarargs","enabled":true},{"patternId":"unusedcode_UnusedFormalParameter","enabled":true},{"patternId":"design_ReturnEmptyArrayRatherThanNull","enabled":true},{"patternId":"junit_UseAssertNullInsteadOfAssertTrue","enabled":true},{"patternId":"design_UseUtilityClass","enabled":true},{"patternId":"design_AvoidDeeplyNestedIfStmts","enabled":true},{"patternId":"empty_EmptyStatementNotInLoop","enabled":true},{"patternId":"junit_UseAssertSameInsteadOfAssertTrue","enabled":true},{"patternId":"braces_ForLoopsMustUseBraces","enabled":true},{"patternId":"controversial_DoNotCallGarbageCollectionExplicitly","enabled":true},{"patternId":"naming_GenericsNaming","enabled":true},{"patternId":"strings_UseEqualsToCompareStrings","enabled":true},{"patternId":"optimizations_AvoidArrayLoops","enabled":true},{"patternId":"empty_EmptyStaticInitializer","enabled":true},{"patternId":"design_UncommentedEmptyConstructor","enabled":true},{"patternId":"empty_EmptyStatementBlock","enabled":true},{"patternId":"basic_CollapsibleIfStatements","enabled":true},{"patternId":"design_FinalFieldCouldBeStatic","enabled":true},{"patternId":"logging-java_MoreThanOneLogger","enabled":true},{"patternId":"codesize_ExcessiveClassLength","enabled":true},{"patternId":"design_ImmutableField","enabled":true},{"patternId":"controversial_OneDeclarationPerLine","enabled":true},{"patternId":"empty_EmptyWhileStmt","enabled":true},{"patternId":"unnecessary_UnnecessaryReturn","enabled":true},{"patternId":"strings_InefficientEmptyStringCheck","enabled":true},{"patternId":"design_UseNotifyAllInsteadOfNotify","enabled":true},{"patternId":"strictexception_DoNotThrowExceptionInFinally","enabled":true},{"patternId":"junit_UseAssertEqualsInsteadOfAssertTrue","enabled":true},{"patternId":"typeresolution_CloneMethodMustImplementCloneable","enabled":true},{"patternId":"codesize_NPathComplexity","enabled":true},{"patternId":"imports_DontImportJavaLang","enabled":true},{"patternId":"empty_EmptySynchronizedBlock","enabled":true},{"patternId":"migrating_JUnit4TestShouldUseAfterAnnotation","enabled":true},{"patternId":"design_AvoidConstantsInterface","enabled":true},{"patternId":"unnecessary_UselessOperationOnImmutable","enabled":true},{"patternId":"design_PositionLiteralsFirstInComparisons","enabled":true},{"patternId":"migrating_ByteInstantiation","enabled":true},{"patternId":"junit_JUnitSpelling","enabled":true},{"patternId":"junit_JUnitTestsShouldIncludeAssert","enabled":true},{"patternId":"finalizers_EmptyFinalizer","enabled":true},{"patternId":"design_NonCaseLabelInSwitchStatement","enabled":true},{"patternId":"android_DoNotHardCodeSDCard","enabled":true},{"patternId":"design_LogicInversion","enabled":true},{"patternId":"unusedcode_UnusedPrivateMethod","enabled":true},{"patternId":"naming_AvoidDollarSigns","enabled":true},{"patternId":"finalizers_FinalizeShouldBeProtected","enabled":true},{"patternId":"clone_ProperCloneImplementation","enabled":true},{"patternId":"basic_CheckResultSet","enabled":true},{"patternId":"controversial_AvoidPrefixingMethodParameters","enabled":true},{"patternId":"migrating_JUnit4SuitesShouldUseSuiteAnnotation","enabled":true},{"patternId":"empty_EmptyIfStmt","enabled":true},{"patternId":"basic_DontCallThreadRun","enabled":true},{"patternId":"junit_JUnitStaticSuite","enabled":true},{"patternId":"optimizations_UseArraysAsList","enabled":true},{"patternId":"design_MissingStaticMethodInNonInstantiatableClass","enabled":true},{"patternId":"unusedcode_UnusedModifier","enabled":true},{"patternId":"Style_MethodName","enabled":true},{"patternId":"Metrics_CyclomaticComplexity","enabled":true},{"patternId":"Lint_DuplicateMethods","enabled":true},{"patternId":"Style_Lambda","enabled":true},{"patternId":"Lint_UselessSetterCall","enabled":true},{"patternId":"Style_VariableName","enabled":true},{"patternId":"Lint_AmbiguousOperator","enabled":true},{"patternId":"Style_LeadingCommentSpace","enabled":true},{"patternId":"Style_CaseEquality","enabled":true},{"patternId":"Lint_StringConversionInInterpolation","enabled":true},{"patternId":"Performance_ReverseEach","enabled":true},{"patternId":"Lint_LiteralInCondition","enabled":true},{"patternId":"Performance_Sample","enabled":true},{"patternId":"Style_NonNilCheck","enabled":true},{"patternId":"Lint_RescueException","enabled":true},{"patternId":"Lint_UselessElseWithoutRescue","enabled":true},{"patternId":"Style_ConstantName","enabled":true},{"patternId":"Lint_LiteralInInterpolation","enabled":true},{"patternId":"Lint_NestedMethodDefinition","enabled":true},{"patternId":"Style_DoubleNegation","enabled":true},{"patternId":"Lint_SpaceBeforeFirstArg","enabled":true},{"patternId":"Lint_Debugger","enabled":true},{"patternId":"Style_ClassVars","enabled":true},{"patternId":"Lint_EmptyEnsure","enabled":true},{"patternId":"Style_MultilineBlockLayout","enabled":true},{"patternId":"Lint_UnusedBlockArgument","enabled":true},{"patternId":"Lint_UselessAccessModifier","enabled":true},{"patternId":"Performance_Size","enabled":true},{"patternId":"Lint_EachWithObjectArgument","enabled":true},{"patternId":"Style_Alias","enabled":true},{"patternId":"Lint_Loop","enabled":true},{"patternId":"Style_NegatedWhile","enabled":true},{"patternId":"Style_ColonMethodCall","enabled":true},{"patternId":"Lint_AmbiguousRegexpLiteral","enabled":true},{"patternId":"Lint_UnusedMethodArgument","enabled":true},{"patternId":"Style_MultilineIfThen","enabled":true},{"patternId":"Lint_EnsureReturn","enabled":true},{"patternId":"Style_NegatedIf","enabled":true},{"patternId":"Lint_Eval","enabled":true},{"patternId":"Style_NilComparison","enabled":true},{"patternId":"Style_ArrayJoin","enabled":true},{"patternId":"Lint_ConditionPosition","enabled":true},{"patternId":"Lint_UnreachableCode","enabled":true},{"patternId":"Performance_Count","enabled":true},{"patternId":"Lint_EmptyInterpolation","enabled":true},{"patternId":"Style_LambdaCall","enabled":true},{"patternId":"Lint_HandleExceptions","enabled":true},{"patternId":"Lint_ShadowingOuterLocalVariable","enabled":true},{"patternId":"Lint_EndAlignment","enabled":true},{"patternId":"Style_MultilineTernaryOperator","enabled":true},{"patternId":"Style_AutoResourceCleanup","enabled":true},{"patternId":"Lint_ElseLayout","enabled":true},{"patternId":"Style_NestedTernaryOperator","enabled":true},{"patternId":"Style_OneLineConditional","enabled":true},{"patternId":"Style_EmptyElse","enabled":true},{"patternId":"Lint_UselessComparison","enabled":true},{"patternId":"Metrics_PerceivedComplexity","enabled":true},{"patternId":"Style_InfiniteLoop","enabled":true},{"patternId":"Rails_Date","enabled":true},{"patternId":"Style_EvenOdd","enabled":true},{"patternId":"Style_IndentationConsistency","enabled":true},{"patternId":"Style_ModuleFunction","enabled":true},{"patternId":"Lint_UselessAssignment","enabled":true},{"patternId":"Style_EachWithObject","enabled":true},{"patternId":"Performance_Detect","enabled":true},{"patternId":"duplicate_key","enabled":true},{"patternId":"no_interpolation_in_single_quotes","enabled":true},{"patternId":"no_backticks","enabled":true},{"patternId":"no_unnecessary_fat_arrows","enabled":true},{"patternId":"indentation","enabled":true},{"patternId":"ensure_comprehensions","enabled":true},{"patternId":"no_stand_alone_at","enabled":true},{"patternId":"cyclomatic_complexity","enabled":true},{"patternId":"Deserialize","enabled":true},{"patternId":"SymbolDoS","enabled":true},{"patternId":"SkipBeforeFilter","enabled":true},{"patternId":"SanitizeMethods","enabled":true},{"patternId":"SelectTag","enabled":true},{"patternId":"XMLDoS","enabled":true},{"patternId":"SimpleFormat","enabled":true},{"patternId":"Evaluation","enabled":true},{"patternId":"BasicAuth","enabled":true},{"patternId":"JRubyXML","enabled":true},{"patternId":"RenderInline","enabled":true},{"patternId":"YAMLParsing","enabled":true},{"patternId":"Redirect","enabled":true},{"patternId":"UnsafeReflection","enabled":true},{"patternId":"SSLVerify","enabled":true},{"patternId":"HeaderDoS","enabled":true},{"patternId":"TranslateBug","enabled":true},{"patternId":"Execute","enabled":true},{"patternId":"JSONParsing","enabled":true},{"patternId":"LinkTo","enabled":true},{"patternId":"FileDisclosure","enabled":true},{"patternId":"SafeBufferManipulation","enabled":true},{"patternId":"ModelAttributes","enabled":true},{"patternId":"ResponseSplitting","enabled":true},{"patternId":"DigestDoS","enabled":true},{"patternId":"Send","enabled":true},{"patternId":"MailTo","enabled":true},{"patternId":"SymbolDoSCVE","enabled":true},{"patternId":"StripTags","enabled":true},{"patternId":"MassAssignment","enabled":true},{"patternId":"RegexDoS","enabled":true},{"patternId":"SelectVulnerability","enabled":true},{"patternId":"FileAccess","enabled":true},{"patternId":"ContentTag","enabled":true},{"patternId":"SessionSettings","enabled":true},{"patternId":"FilterSkipping","enabled":true},{"patternId":"CreateWith","enabled":true},{"patternId":"JSONEncoding","enabled":true},{"patternId":"SQLCVEs","enabled":true},{"patternId":"ForgerySetting","enabled":true},{"patternId":"QuoteTableName","enabled":true},{"patternId":"I18nXSS","enabled":true},{"patternId":"WithoutProtection","enabled":true},{"patternId":"CrossSiteScripting","enabled":true},{"patternId":"SingleQuotes","enabled":true},{"patternId":"NestedAttributes","enabled":true},{"patternId":"DetailedExceptions","enabled":true},{"patternId":"LinkToHref","enabled":true},{"patternId":"RenderDoS","enabled":true},{"patternId":"ModelSerialize","enabled":true},{"patternId":"SQL","enabled":true},{"patternId":"Render","enabled":true},{"patternId":"UnscopedFind","enabled":true},{"patternId":"ValidationRegex","enabled":true},{"patternId":"EscapeFunction","enabled":true},{"patternId":"Custom_Scala_FieldNamesChecker","enabled":true},{"patternId":"Custom_Scala_ObjDeserialization","enabled":true},{"patternId":"Custom_Scala_RSAPadding","enabled":true},{"patternId":"ESLint_no-extra-boolean-cast","enabled":true},{"patternId":"ESLint_no-iterator","enabled":true},{"patternId":"ESLint_no-invalid-regexp","enabled":true},{"patternId":"ESLint_no-obj-calls","enabled":true},{"patternId":"ESLint_no-sparse-arrays","enabled":true},{"patternId":"ESLint_no-unreachable","enabled":true},{"patternId":"ESLint_no-dupe-keys","enabled":true},{"patternId":"ESLint_no-multi-str","enabled":true},{"patternId":"ESLint_no-extend-native","enabled":true},{"patternId":"ESLint_guard-for-in","enabled":true},{"patternId":"ESLint_no-func-assign","enabled":true},{"patternId":"ESLint_no-extra-semi","enabled":true},{"patternId":"ESLint_camelcase","enabled":true},{"patternId":"ESLint_no-mixed-spaces-and-tabs","enabled":true},{"patternId":"ESLint_no-undef","enabled":true},{"patternId":"ESLint_semi","enabled":true},{"patternId":"ESLint_no-empty-character-class","enabled":true},{"patternId":"ESLint_complexity","enabled":true},{"patternId":"ESLint_no-dupe-class-members","enabled":true},{"patternId":"ESLint_no-debugger","enabled":true},{"patternId":"ESLint_block-scoped-var","enabled":true},{"patternId":"ESLint_no-loop-func","enabled":true},{"patternId":"ESLint_no-use-before-define","enabled":true},{"patternId":"ESLint_no-console","enabled":true},{"patternId":"ESLint_require-yield","enabled":true},{"patternId":"ESLint_no-redeclare","enabled":true},{"patternId":"ESLint_no-undefined","enabled":true},{"patternId":"ESLint_use-isnan","enabled":true},{"patternId":"ESLint_no-control-regex","enabled":true},{"patternId":"ESLint_no-const-assign","enabled":true},{"patternId":"ESLint_no-new","enabled":true},{"patternId":"ESLint_new-cap","enabled":true},{"patternId":"ESLint_no-irregular-whitespace","enabled":true},{"patternId":"ESLint_object-shorthand","enabled":true},{"patternId":"ESLint_no-ex-assign","enabled":true},{"patternId":"ESLint_wrap-iife","enabled":true},{"patternId":"ESLint_arrow-parens","enabled":true},{"patternId":"ESLint_no-constant-condition","enabled":true},{"patternId":"ESLint_no-octal","enabled":true},{"patternId":"ESLint_no-dupe-args","enabled":true},{"patternId":"ESLint_quotes","enabled":true},{"patternId":"ESLint_no-fallthrough","enabled":true},{"patternId":"ESLint_no-delete-var","enabled":true},{"patternId":"ESLint_no-caller","enabled":true},{"patternId":"ESLint_no-cond-assign","enabled":true},{"patternId":"ESLint_no-this-before-super","enabled":true},{"patternId":"ESLint_no-negated-in-lhs","enabled":true},{"patternId":"ESLint_no-inner-declarations","enabled":true},{"patternId":"ESLint_eqeqeq","enabled":true},{"patternId":"ESLint_curly","enabled":true},{"patternId":"ESLint_arrow-spacing","enabled":true},{"patternId":"ESLint_no-empty","enabled":true},{"patternId":"ESLint_no-unused-vars","enabled":true},{"patternId":"ESLint_generator-star-spacing","enabled":true},{"patternId":"ESLint_no-duplicate-case","enabled":true},{"patternId":"ESLint_valid-typeof","enabled":true},{"patternId":"ESLint_no-regex-spaces","enabled":true},{"patternId":"ESLint_no-class-assign","enabled":true},{"patternId":"PyLint_W0221","enabled":true},{"patternId":"PyLint_E0117","enabled":true},{"patternId":"PyLint_E0001","enabled":true},{"patternId":"PyLint_E0241","enabled":true},{"patternId":"PyLint_W0404","enabled":true},{"patternId":"PyLint_E0704","enabled":true},{"patternId":"PyLint_E0703","enabled":true},{"patternId":"PyLint_E0302","enabled":true},{"patternId":"PyLint_W1301","enabled":true},{"patternId":"PyLint_R0201","enabled":true},{"patternId":"PyLint_E0113","enabled":true},{"patternId":"PyLint_W0410","enabled":true},{"patternId":"PyLint_C0123","enabled":true},{"patternId":"PyLint_E0115","enabled":true},{"patternId":"PyLint_E0114","enabled":true},{"patternId":"PyLint_E1126","enabled":true},{"patternId":"PyLint_W0702","enabled":true},{"patternId":"PyLint_W1303","enabled":true},{"patternId":"PyLint_W0622","enabled":true},{"patternId":"PyLint_W0222","enabled":true},{"patternId":"PyLint_W0233","enabled":true},{"patternId":"PyLint_W1305","enabled":true},{"patternId":"PyLint_E1127","enabled":true},{"patternId":"PyLint_E0112","enabled":true},{"patternId":"PyLint_W0611","enabled":true},{"patternId":"PyLint_W0601","enabled":true},{"patternId":"PyLint_W1300","enabled":true},{"patternId":"PyLint_W0124","enabled":true},{"patternId":"PyLint_R0203","enabled":true},{"patternId":"PyLint_E0236","enabled":true},{"patternId":"PyLint_W0612","enabled":true},{"patternId":"PyLint_W0604","enabled":true},{"patternId":"PyLint_W0705","enabled":true},{"patternId":"PyLint_E0238","enabled":true},{"patternId":"PyLint_W0602","enabled":true},{"patternId":"PyLint_R0102","enabled":true},{"patternId":"PyLint_R0202","enabled":true},{"patternId":"PyLint_E0240","enabled":true},{"patternId":"PyLint_W0623","enabled":true},{"patternId":"PyLint_W0711","enabled":true},{"patternId":"PyLint_E0116","enabled":true},{"patternId":"PyLint_E0239","enabled":true},{"patternId":"PyLint_E1132","enabled":true},{"patternId":"PyLint_W1307","enabled":true},{"patternId":"PyLint_C0200","enabled":true},{"patternId":"PyLint_E0301","enabled":true},{"patternId":"PyLint_W1306","enabled":true},{"patternId":"PyLint_W1302","enabled":true},{"patternId":"PyLint_E0110","enabled":true},{"patternId":"PyLint_E1125","enabled":true}]} \ No newline at end of file diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md new file mode 100644 index 000000000000..81b5db71bcac --- /dev/null +++ b/gcloud-java-bigquery/README.md @@ -0,0 +1,258 @@ +Google Cloud Java Client for BigQuery (Alpha) +==================================== + +Java idiomatic client for [Google Cloud BigQuery] (https://cloud.google.com/bigquery). + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) +[![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) + +- [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) +- [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html) + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-bigquery + 0.1.5 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-bigquery:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-bigquery" % "0.1.5" +``` + +Example Application +------------------- +- [`BigQueryExample`](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java) - A simple command line interface providing some of Cloud BigQuery's functionality. +Read more about using this application on the [`BigQueryExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/bigquery/BigQueryExample.html). + +Authentication +-------------- + +See the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) section in the base directory's README. + +About Google Cloud BigQuery +-------------------------- + +[Google Cloud BigQuery][cloud-bigquery] is a fully managed, NoOps, low cost data analytics service. +Data can be streamed into BigQuery at millions of rows per second to enable real-time analysis. +With BigQuery you can easily deploy Petabyte-scale Databases. + +Be sure to activate the Google Cloud BigQuery API on the Developer's Console to use BigQuery from +your project. + +See the ``gcloud-java`` API [bigquery documentation][bigquery-api] to learn how to interact +with Google Cloud BigQuery using this Client Library. + +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a +[Google Developers Console](https://console.developers.google.com/) project with the BigQuery API +enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to +use Google Cloud BigQuery. +[Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your +project set up. You will also need to set up the local development environment by [installing the +Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-bigquery` library. See the [Quickstart](#quickstart) section +to add `gcloud-java-bigquery` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud BigQuery, you must create a service object with +credentials. You can then make API calls by calling methods on the BigQuery service object. The +simplest way to authenticate is to use +[Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). +These credentials are automatically inferred from your environment, so you only need the following +code to create your service object: + +```java +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +``` + +For other authentication options, see the +[Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Creating a dataset +With BigQuery you can create datasets. A dataset is a grouping mechanism that holds zero or more +tables. Add the following import at the top of your file: + +```java +import com.google.gcloud.bigquery.DatasetInfo; +``` +Then, to create the dataset, use the following code: + +```java +// Create a dataset +String datasetId = "my_dataset_id"; +bigquery.create(DatasetInfo.builder(datasetId).build()); +``` + +#### Creating a table +With BigQuery you can create different types of tables: normal tables with an associated schema, +external tables backed by data stored on [Google Cloud Storage][cloud-storage] and view tables that +are created from a BigQuery SQL query. In this code snippet we show how to create a normal table +with only one string field. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +``` +Then add the following code to create the table: + +```java +TableId tableId = TableId.of(datasetId, "my_table_id"); +// Table field definition +Field stringField = Field.of("StringField", Field.Type.string()); +// Table schema definition +Schema schema = Schema.of(stringField); +// Create a table +StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema); +Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); +``` + +#### Loading data into a table +BigQuery provides several ways to load data into a table: streaming rows or loading data from a +Google Cloud Storage file. In this code snippet we show how to stream rows into a table. +Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; + +import java.util.HashMap; +import java.util.Map; +``` +Then add the following code to insert data: + +```java +Map firstRow = new HashMap<>(); +Map secondRow = new HashMap<>(); +firstRow.put("StringField", "value1"); +secondRow.put("StringField", "value2"); +// Create an insert request +InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); +// Insert rows +InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); +// Check if errors occurred +if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); +} +``` + +#### Querying data +BigQuery enables querying data by running queries and waiting for the result. Queries can be run +directly or through a Query Job. In this code snippet we show how to run a query directly and wait +for the result. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; + +import java.util.Iterator; +import java.util.List; +``` +Then add the following code to run the query and wait for the result: + +```java +// Create a query request +QueryRequest queryRequest = + QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .pageSize(1000L) + .build(); +// Request query to be executed and wait for results +QueryResponse queryResponse = bigquery.query(queryRequest); +while (!queryResponse.jobComplete()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); +} +// Read rows +Iterator> rowIterator = queryResponse.result().iterateAll(); +System.out.println("Table rows:"); +while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); +} +``` +#### Complete source code + +In +[InsertDataAndQueryTable.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java) +we put together all the code shown above into one program. The program assumes that you are +running on Compute Engine or from your own desktop. To run the example on App Engine, simply move +the code from the main method to your application's servlet class and change the print statements to +display on your webpage. + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*`[shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Testing +------- + +This library has tools to help make tests for code using Cloud BigQuery. + +See [TESTING] to read more about testing. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING] for more information on how to get started. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-bigquery +[cloud-platform]: https://cloud.google.com/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-storage]: https://cloud.google.com/storage/ +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html diff --git a/gcloud-java-bigquery/pom.xml b/gcloud-java-bigquery/pom.xml new file mode 100644 index 000000000000..9a2137cb987d --- /dev/null +++ b/gcloud-java-bigquery/pom.xml @@ -0,0 +1,55 @@ + + + 4.0.0 + gcloud-java-bigquery + jar + GCloud Java bigquery + + Java idiomatic client for Google Cloud BigQuery. + + + com.google.gcloud + gcloud-java-pom + 0.1.6-SNAPSHOT + + + gcloud-java-bigquery + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + ${project.groupId} + gcloud-java-storage + ${project.version} + test + + + com.google.apis + google-api-services-bigquery + v2-rev270-1.21.0 + compile + + + com.google.guava + guava-jdk5 + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.4 + test + + + diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java new file mode 100644 index 000000000000..b8e1a817c836 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java @@ -0,0 +1,446 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Dataset.Access; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Access Control for a BigQuery Dataset. BigQuery uses ACLs to manage permissions on datasets. ACLs + * are not directly supported on tables. A table inherits its ACL from the dataset that contains it. + * Project roles affect your ability to run jobs or manage the project, while dataset roles affect + * how you can access or modify the data inside of a project. + * + * @see Access Control + */ +public final class Acl implements Serializable { + + private static final long serialVersionUID = 8357269726277191556L; + + private final Entity entity; + private final Role role; + + /** + * Dataset roles supported by BigQuery. + * + * @see Dataset Roles + */ + public enum Role { + /** + * Can read, query, copy or export tables in the dataset. + */ + READER, + + /** + * Same as {@link #READER} plus can edit or append data in the dataset. + */ + WRITER, + + /** + * Same as {@link #WRITER} plus can update and delete the dataset. + */ + OWNER + } + + /** + * Base class for BigQuery entities that can be grant access to the dataset. + */ + public abstract static class Entity implements Serializable { + + private static final long serialVersionUID = 8111776788607959944L; + + private final Type type; + + /** + * Types of BigQuery entities. + */ + public enum Type { + DOMAIN, GROUP, USER, VIEW + } + + Entity(Type type) { + this.type = type; + } + + public Type type() { + return type; + } + + abstract Access toPb(); + + static Entity fromPb(Access access) { + if (access.getDomain() != null) { + return new Domain(access.getDomain()); + } + if (access.getGroupByEmail() != null) { + return new Group(access.getGroupByEmail()); + } + if (access.getSpecialGroup() != null) { + return new Group(access.getSpecialGroup()); + } + if (access.getUserByEmail() != null) { + return new User(access.getUserByEmail()); + } + if (access.getView() != null) { + return new View(TableId.fromPb(access.getView())); + } + // Unreachable + throw new BigQueryException(BigQueryException.UNKNOWN_CODE, + "Unrecognized access configuration"); + } + } + + /** + * Class for a BigQuery Domain entity. Objects of this class represent a domain to grant access + * to. Any users signed in with the domain specified will be granted the specified access. + */ + public static final class Domain extends Entity { + + private static final long serialVersionUID = -3033025857280447253L; + + private final String domain; + + /** + * Creates a Domain entity given the domain name. + */ + public Domain(String domain) { + super(Type.DOMAIN); + this.domain = domain; + } + + /** + * Returns the domain name. + */ + public String domain() { + return domain; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Domain domainEntity = (Domain) obj; + return Objects.equals(type(), domainEntity.type()) + && Objects.equals(domain, domainEntity.domain()); + } + + @Override + public int hashCode() { + return Objects.hash(type(), domain); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setDomain(domain); + } + } + + /** + * Class for a BigQuery Group entity. Objects of this class represent a group to granted access + * to. A Group entity can be created given the group's email or can be a special group: + * {@link #ofProjectOwners()}, {@link #ofProjectReaders()}, {@link #ofProjectWriters()} or + * {@link #ofAllAuthenticatedUsers()}. + */ + public static final class Group extends Entity { + + private static final String PROJECT_OWNERS = "projectOwners"; + private static final String PROJECT_READERS = "projectReaders"; + private static final String PROJECT_WRITERS = "projectWriters"; + private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers"; + private static final long serialVersionUID = 5146829352398103029L; + + private final String identifier; + + /** + * Creates a Group entity given its identifier. Identifier can be either a + * + * special group identifier or a group email. + */ + public Group(String identifier) { + super(Type.GROUP); + this.identifier = identifier; + } + + /** + * Returns group's identifier, can be either a + * + * special group identifier or a group email. + */ + public String identifier() { + return identifier; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Group group = (Group) obj; + return Objects.equals(type(), group.type()) && Objects.equals(identifier, group.identifier); + } + + @Override + public int hashCode() { + return Objects.hash(type(), identifier); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + switch (identifier) { + case PROJECT_OWNERS: + return new Access().setSpecialGroup(PROJECT_OWNERS); + case PROJECT_READERS: + return new Access().setSpecialGroup(PROJECT_READERS); + case PROJECT_WRITERS: + return new Access().setSpecialGroup(PROJECT_WRITERS); + case ALL_AUTHENTICATED_USERS: + return new Access().setSpecialGroup(ALL_AUTHENTICATED_USERS); + default: + return new Access().setGroupByEmail(identifier); + } + } + + /** + * Returns a Group entity representing all project's owners. + */ + public static Group ofProjectOwners() { + return new Group(PROJECT_OWNERS); + } + + /** + * Returns a Group entity representing all project's readers. + */ + public static Group ofProjectReaders() { + return new Group(PROJECT_READERS); + } + + /** + * Returns a Group entity representing all project's writers. + */ + public static Group ofProjectWriters() { + return new Group(PROJECT_WRITERS); + } + + /** + * Returns a Group entity representing all BigQuery authenticated users. + */ + public static Group ofAllAuthenticatedUsers() { + return new Group(ALL_AUTHENTICATED_USERS); + } + } + + /** + * Class for a BigQuery User entity. Objects of this class represent a user to grant access to + * given the email address. + */ + public static final class User extends Entity { + + private static final long serialVersionUID = -4942821351073996141L; + + private final String email; + + /** + * Creates a User entity given the user's email. + */ + public User(String email) { + super(Type.USER); + this.email = email; + } + + /** + * Returns user's email. + */ + public String email() { + return email; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + User user = (User) obj; + return Objects.equals(type(), user.type()) && Objects.equals(email, user.email); + } + + @Override + public int hashCode() { + return Objects.hash(type(), email); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setUserByEmail(email); + } + } + + /** + * Class for a BigQuery View entity. Objects of this class represent a view from a different + * dataset to grant access to. Queries executed against that view will have read access to tables + * in this dataset. The role field is not required when this field is set. If that view is updated + * by any user, access to the view needs to be granted again via an update operation. + */ + public static final class View extends Entity { + + private static final long serialVersionUID = -6851072781269419383L; + + private final TableId id; + + /** + * Creates a View entity given the view's id. + */ + public View(TableId id) { + super(Type.VIEW); + this.id = id; + } + + /** + * Returns table's identity. + */ + public TableId id() { + return id; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + View view = (View) obj; + return Objects.equals(type(), view.type()) && Objects.equals(id, view.id); + } + + @Override + public int hashCode() { + return Objects.hash(type(), id); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setView(id.toPb()); + } + } + + private Acl(Entity entity, Role role) { + this.entity = checkNotNull(entity); + this.role = role; + } + + /** + * Returns the entity for this ACL. + */ + public Entity entity() { + return entity; + } + + /** + * Returns the role specified by this ACL. + */ + public Role role() { + return role; + } + + /** + * Returns an Acl object. + * + * @param entity the entity for this ACL object + * @param role the role to associate to the {@code entity} object + */ + public static Acl of(Entity entity, Role role) { + return new Acl(entity, role); + } + + /** + * Returns an Acl object for a view entity. + */ + public static Acl of(View view) { + return new Acl(view, null); + } + + @Override + public int hashCode() { + return Objects.hash(entity, role); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final Acl other = (Acl) obj; + return Objects.equals(this.entity, other.entity) + && Objects.equals(this.role, other.role); + } + + Access toPb() { + Access accessPb = entity.toPb(); + if (role != null) { + accessPb.setRole(role.name()); + } + return accessPb; + } + + static Acl fromPb(Access access) { + return Acl.of(Entity.fromPb(access), + access.getRole() != null ? Role.valueOf(access.getRole()) : null); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java new file mode 100644 index 000000000000..e06c8d86ee5f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -0,0 +1,668 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.gcloud.Page; +import com.google.gcloud.Service; +import com.google.gcloud.bigquery.spi.BigQueryRpc; + +import java.util.List; +import java.util.Set; + +/** + * An interface for Google Cloud BigQuery. + * + * @see Google Cloud BigQuery + */ +public interface BigQuery extends Service { + + /** + * Fields of a BigQuery Dataset resource. + * + * @see Dataset + * Resource + */ + enum DatasetField { + ACCESS("access"), + CREATION_TIME("creationTime"), + DATASET_REFERENCE("datasetReference"), + DEFAULT_TABLE_EXPIRATION_MS("defaultTableExpirationMsS"), + DESCRIPTION("description"), + ETAG("etag"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + SELF_LINK("selfLink"); + + private final String selector; + + DatasetField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(DatasetField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(DATASET_REFERENCE.selector()); + for (DatasetField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Table resource. + * + * @see Table + * Resource + */ + enum TableField { + CREATION_TIME("creationTime"), + DESCRIPTION("description"), + ETAG("etag"), + EXPIRATION_TIME("expirationTime"), + EXTERNAL_DATA_CONFIGURATION("externalDataConfiguration"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + NUM_BYTES("numBytes"), + NUM_ROWS("numRows"), + SCHEMA("schema"), + SELF_LINK("selfLink"), + STREAMING_BUFFER("streamingBuffer"), + TABLE_REFERENCE("tableReference"), + TYPE("type"), + VIEW("view"); + + private final String selector; + + TableField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(TableField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(TABLE_REFERENCE.selector()); + fieldStrings.add(TYPE.selector()); + for (TableField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Job resource. + * + * @see Job Resource + * + */ + enum JobField { + CONFIGURATION("configuration"), + ETAG("etag"), + ID("id"), + JOB_REFERENCE("jobReference"), + SELF_LINK("selfLink"), + STATISTICS("statistics"), + STATUS("status"), + USER_EMAIL("user_email"); + + private final String selector; + + JobField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(JobField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(JOB_REFERENCE.selector()); + fieldStrings.add(CONFIGURATION.selector()); + for (JobField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying dataset list options. + */ + class DatasetListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private DatasetListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of datasets returned per page. + */ + public static DatasetListOption pageSize(long pageSize) { + return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); + } + + /** + * Returns an option to specify the page token from which to start listing datasets. + */ + public static DatasetListOption startPageToken(String pageToken) { + return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an options to list all datasets, even hidden ones. + */ + public static DatasetListOption all() { + return new DatasetListOption(BigQueryRpc.Option.ALL_DATASETS, true); + } + } + + /** + * Class for specifying dataset get, create and update options. + */ + class DatasetOption extends Option { + + private static final long serialVersionUID = 1674133909259913250L; + + private DatasetOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the dataset's fields to be returned by the RPC call. If this + * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can + * be used to specify only the fields of interest. {@link Dataset#datasetId()} is always + * returned, even if not specified. + */ + public static DatasetOption fields(DatasetField... fields) { + return new DatasetOption(BigQueryRpc.Option.FIELDS, DatasetField.selector(fields)); + } + } + + /** + * Class for specifying dataset delete options. + */ + class DatasetDeleteOption extends Option { + + private static final long serialVersionUID = -7166083569900951337L; + + private DatasetDeleteOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to delete a dataset even if non-empty. If not provided, attempting to + * delete a non-empty dataset will result in a {@link BigQueryException} being thrown. + */ + public static DatasetDeleteOption deleteContents() { + return new DatasetDeleteOption(BigQueryRpc.Option.DELETE_CONTENTS, true); + } + } + + /** + * Class for specifying table list options. + */ + class TableListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private TableListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of tables returned per page. + */ + public static TableListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); + } + + /** + * Returns an option to specify the page token from which to start listing tables. + */ + public static TableListOption startPageToken(String pageToken) { + return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + } + + /** + * Class for specifying table get, create and update options. + */ + class TableOption extends Option { + + private static final long serialVersionUID = -1723870134095936772L; + + private TableOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the table's fields to be returned by the RPC call. If this + * option is not provided all table's fields are returned. {@code TableOption.fields} can be + * used to specify only the fields of interest. {@link Table#tableId()} and type (which is part + * of {@link Table#definition()}) are always returned, even if not specified. + */ + public static TableOption fields(TableField... fields) { + return new TableOption(BigQueryRpc.Option.FIELDS, TableField.selector(fields)); + } + } + + /** + * Class for specifying table data list options. + */ + class TableDataListOption extends Option { + + private static final long serialVersionUID = 8488823381738864434L; + + private TableDataListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows returned per page. + */ + public static TableDataListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); + } + + /** + * Returns an option to specify the page token from which to start listing table data. + */ + public static TableDataListOption startPageToken(String pageToken) { + return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start listing table + * data. + */ + public static TableDataListOption startIndex(long index) { + checkArgument(index >= 0); + return new TableDataListOption(BigQueryRpc.Option.START_INDEX, index); + } + } + + /** + * Class for specifying job list options. + */ + class JobListOption extends Option { + + private static final long serialVersionUID = -8207122131226481423L; + + private JobListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to list all jobs, even the ones issued by other users. + */ + public static JobListOption allUsers() { + return new JobListOption(BigQueryRpc.Option.ALL_USERS, true); + } + + /** + * Returns an option to list only jobs that match the provided state filters. + */ + public static JobListOption stateFilter(JobStatus.State... stateFilters) { + List stringFilters = Lists.transform(ImmutableList.copyOf(stateFilters), + new Function() { + @Override + public String apply(JobStatus.State state) { + return state.name().toLowerCase(); + } + }); + return new JobListOption(BigQueryRpc.Option.STATE_FILTER, stringFilters); + } + + /** + * Returns an option to specify the maximum number of jobs returned per page. + */ + public static JobListOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); + } + + /** + * Returns an option to specify the page token from which to start listing jobs. + */ + public static JobListOption startPageToken(String pageToken) { + return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link Job#jobId()}, {@link JobStatus#state()}, + * {@link JobStatus#error()} as well as type-specific configuration (e.g. + * {@link QueryJobConfiguration#query()} for Query Jobs) are always returned, even if not + * specified. {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when + * listing jobs. + */ + public static JobListOption fields(JobField... fields) { + String selector = JobField.selector(fields); + StringBuilder builder = new StringBuilder(); + builder.append("etag,jobs(").append(selector).append(",state,errorResult),nextPageToken"); + return new JobListOption(BigQueryRpc.Option.FIELDS, builder.toString()); + } + } + + /** + * Class for specifying table get and create options. + */ + class JobOption extends Option { + + private static final long serialVersionUID = -3111736712316353665L; + + private JobOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link Job#jobId()} as well as type-specific + * configuration (e.g. {@link QueryJobConfiguration#query()} for Query Jobs) are always + * returned, even if not specified. + */ + public static JobOption fields(JobField... fields) { + return new JobOption(BigQueryRpc.Option.FIELDS, JobField.selector(fields)); + } + } + + /** + * Class for specifying query results options. + */ + class QueryResultsOption extends Option { + + private static final long serialVersionUID = 3788898503226985525L; + + private QueryResultsOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows returned per page. + */ + public static QueryResultsOption pageSize(long pageSize) { + checkArgument(pageSize >= 0); + return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, pageSize); + } + + /** + * Returns an option to specify the page token from which to start getting query results. + */ + public static QueryResultsOption startPageToken(String pageToken) { + return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start getting query + * results. + */ + public static QueryResultsOption startIndex(long startIndex) { + checkArgument(startIndex >= 0); + return new QueryResultsOption(BigQueryRpc.Option.START_INDEX, startIndex); + } + + /** + * Returns an option that sets how long to wait for the query to complete, in milliseconds, + * before returning. Default is 10 seconds. If the timeout passes before the job completes, + * {@link QueryResponse#jobCompleted()} will be {@code false}. + */ + public static QueryResultsOption maxWaitTime(long maxWaitTime) { + checkArgument(maxWaitTime >= 0); + return new QueryResultsOption(BigQueryRpc.Option.TIMEOUT, maxWaitTime); + } + } + + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + Dataset create(DatasetInfo dataset, DatasetOption... options); + + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + Table create(TableInfo table, TableOption... options); + + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + Job create(JobInfo job, JobOption... options); + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Dataset getDataset(String datasetId, DatasetOption... options); + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Dataset getDataset(DatasetId datasetId, DatasetOption... options); + + /** + * Lists the project's datasets. This method returns partial information on each dataset + * ({@link Dataset#datasetId()}, {@link Dataset#friendlyName()} and {@link Dataset#id()}). To get + * complete information use either {@link #getDataset(String, DatasetOption...)} or + * {@link #getDataset(DatasetId, DatasetOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listDatasets(DatasetListOption... options); + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, DatasetDeleteOption... options); + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(DatasetId datasetId, DatasetDeleteOption... options); + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, String tableId); + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(TableId tableId); + + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + Dataset update(DatasetInfo dataset, DatasetOption... options); + + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + Table update(TableInfo table, TableOption... options); + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Table getTable(String datasetId, String tableId, TableOption... options); + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Table getTable(TableId tableId, TableOption... options); + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link Table#tableId()}, {@link Table#friendlyName()}, {@link Table#id()} and type, which + * is part of {@link Table#definition()}). To get complete information use either + * {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listTables(String datasetId, TableListOption... options); + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link Table#tableId()}, {@link Table#friendlyName()}, {@link Table#id()} and type, which + * is part of {@link Table#definition()}). To get complete information use either + * {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page
listTables(DatasetId datasetId, TableListOption... options); + + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ + InsertAllResponse insertAll(InsertAllRequest request); + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(String datasetId, String tableId, + TableDataListOption... options); + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(TableId tableId, TableDataListOption... options); + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getJob(String jobId, JobOption... options); + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getJob(JobId jobId, JobOption... options); + + /** + * Lists the jobs. + * + * @throws BigQueryException upon failure + */ + Page listJobs(JobListOption... options); + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId); + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(JobId tableId); + + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request); + + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + QueryResponse getQueryResults(JobId job, QueryResultsOption... options); + + /** + * Returns a channel to write data to be inserted into a BigQuery table. Data format and other + * options can be configured using the {@link WriteChannelConfiguration} parameter. + * + * @throws BigQueryException upon failure + */ + TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration); +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java new file mode 100644 index 000000000000..e58f0d0b7213 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java @@ -0,0 +1,125 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Error. Objects of this class represent errors encountered by the BigQuery + * service while executing a request. A BigQuery Job that terminated with an error has a non-null + * {@link JobStatus#error()}. A job can also encounter errors during its execution that do not cause + * the whole job to fail (see {@link JobStatus#executionErrors()}). Similarly, queries and insert + * all requests can cause BigQuery errors that do not mean the whole operation failed (see + * {@link QueryResponse#executionErrors()} and {@link InsertAllResponse#insertErrors()}). When a + * {@link BigQueryException} is thrown the BigQuery Error that caused it, if any, can be accessed + * with {@link BigQueryException#error()}. + */ +public class BigQueryError implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public BigQueryError apply(ErrorProto pb) { + return BigQueryError.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ErrorProto apply(BigQueryError error) { + return error.toPb(); + } + }; + private static final long serialVersionUID = -6566785320629096688L; + + private final String reason; + private final String location; + private final String debugInfo; + private final String message; + + public BigQueryError(String reason, String location, String message, String debugInfo) { + this.reason = reason; + this.location = location; + this.debugInfo = debugInfo; + this.message = message; + } + + public BigQueryError(String reason, String location, String message) { + this.reason = reason; + this.location = location; + this.message = message; + this.debugInfo = null; + } + + /** + * Returns short error code that summarizes the error. + * + * @see Troubleshooting + * Errors + */ + public String reason() { + return reason; + } + + /** + * Returns where the error occurred, if present. + */ + public String location() { + return location; + } + + String debugInfo() { + return debugInfo; + } + + /** + * Returns a human-readable description of the error. + */ + public String message() { + return message; + } + + @Override + public int hashCode() { + return Objects.hash(reason, location, message); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("reason", reason) + .add("location", location) + .add("message", message) + .toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof BigQueryError && Objects.equals(toPb(), ((BigQueryError) obj).toPb()); + } + + ErrorProto toPb() { + ErrorProto errorPb = new ErrorProto(); + if (reason != null) { + errorPb.setReason(reason); + } + if (location != null) { + errorPb.setLocation(location); + } + if (message != null) { + errorPb.setMessage(message); + } + if (debugInfo != null) { + errorPb.setDebugInfo(debugInfo); + } + return errorPb; + } + + static BigQueryError fromPb(ErrorProto errorPb) { + return new BigQueryError(errorPb.getReason(), errorPb.getLocation(), errorPb.getMessage(), + errorPb.getDebugInfo()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java new file mode 100644 index 000000000000..a157afd25db2 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -0,0 +1,87 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; + +/** + * BigQuery service exception. + * + * @see Google Cloud + * BigQuery error codes + */ +public class BigQueryException extends BaseServiceException { + + // see: https://cloud.google.com/bigquery/troubleshooting-errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(500, null), + new Error(502, null), + new Error(503, null), + new Error(504, null)); + private static final long serialVersionUID = -5006625989225438209L; + + private final BigQueryError error; + + public BigQueryException(int code, String message) { + this(code, message, null); + } + + public BigQueryException(int code, String message, BigQueryError error) { + super(code, message, error != null ? error.reason() : null, true); + this.error = error; + } + + public BigQueryException(IOException exception) { + super(exception, true); + BigQueryError error = null; + if (reason() != null) { + error = new BigQueryError(reason(), location(), getMessage(), debugInfo()); + } + this.error = error; + } + + /** + * Returns the {@link BigQueryError} that caused this exception. Returns {@code null} if none + * exists. + */ + public BigQueryError error() { + return error; + } + + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; + } + + /** + * Translate RetryHelperException to the BigQueryException that caused the error. This method will + * always throw an exception. + * + * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} + */ + static BaseServiceException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage()); + } +} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java similarity index 62% rename from gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java rename to gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java index 62b1f442310c..90e7bbccd483 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java @@ -14,21 +14,12 @@ * limitations under the License. */ -package com.google.gcloud.storage; +package com.google.gcloud.bigquery; + +import com.google.gcloud.ServiceFactory; /** - * Interface for Google Cloud storage list result. + * An interface for BigQuery factories. */ -public interface ListResult extends Iterable { - - /** - * Returns the cursor for the nextPage or {@code null} if no more results. - */ - String nextPageCursor(); - - /** - * Returns the results of the nextPage or {@code null} if no more result. - */ - ListResult nextPage(); - +public interface BigQueryFactory extends ServiceFactory { } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java new file mode 100644 index 000000000000..27f4af5d5007 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -0,0 +1,631 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.BaseService; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.bigquery.spi.BigQueryRpc; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; + +final class BigQueryImpl extends BaseService implements BigQuery { + + private static class DatasetPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = -3057564042439021278L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + + DatasetPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listDatasets(serviceOptions, requestOptions); + } + } + + private static class TablePageFetcher implements NextPageFetcher
{ + + private static final long serialVersionUID = 8611248840504201187L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final String dataset; + + TablePageFetcher(String dataset, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.dataset = dataset; + } + + @Override + public Page
nextPage() { + return listTables(dataset, serviceOptions, requestOptions); + } + } + + private static class JobPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = 8536533282558245472L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + + JobPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listJobs(serviceOptions, requestOptions); + } + } + + private static class TableDataPageFetcher implements NextPageFetcher> { + + private static final long serialVersionUID = -8501991114794410114L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final TableId table; + + TableDataPageFetcher(TableId table, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.table = table; + } + + @Override + public Page> nextPage() { + return listTableData(table, serviceOptions, requestOptions); + } + } + + private static class QueryResultsPageFetcherImpl + implements NextPageFetcher>, QueryResult.QueryResultsPageFetcher { + + private static final long serialVersionUID = -9198905840550459803L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final JobId job; + + QueryResultsPageFetcherImpl(JobId job, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.job = job; + } + + @Override + public QueryResult nextPage() { + return getQueryResults(job, serviceOptions, requestOptions).result(); + } + } + + private final BigQueryRpc bigQueryRpc; + + BigQueryImpl(BigQueryOptions options) { + super(options); + bigQueryRpc = options.rpc(); + } + + @Override + public Dataset create(DatasetInfo dataset, DatasetOption... options) { + final com.google.api.services.bigquery.model.Dataset datasetPb = + dataset.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return Dataset.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.create(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Table create(TableInfo table, TableOption... options) { + final com.google.api.services.bigquery.model.Table tablePb = + table.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return Table.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.create(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Job create(JobInfo job, JobOption... options) { + final com.google.api.services.bigquery.model.Job jobPb = + job.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return Job.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() { + return bigQueryRpc.create(jobPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Dataset getDataset(String datasetId, DatasetOption... options) { + return getDataset(DatasetId.of(datasetId), options); + } + + @Override + public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) { + final Map optionsMap = optionMap(options); + try { + com.google.api.services.bigquery.model.Dataset answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.getDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Dataset.fromPb(this, answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listDatasets(DatasetListOption... options) { + return listDatasets(options(), optionMap(options)); + } + + private static Page listDatasets(final BigQueryOptions serviceOptions, + final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listDatasets(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new DatasetPageFetcher(serviceOptions, cursor, optionsMap), cursor, + Iterables.transform(result.y(), + new Function() { + @Override + public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { + return Dataset.fromPb(serviceOptions.service(), dataset); + } + })); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, DatasetDeleteOption... options) { + return delete(DatasetId.of(datasetId), options); + } + + @Override + public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) { + final Map optionsMap = optionMap(options); + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, String tableId) { + return delete(TableId.of(datasetId, tableId)); + } + + @Override + public boolean delete(final TableId tableId) { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteTable(tableId.dataset(), tableId.table()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Dataset update(DatasetInfo dataset, DatasetOption... options) { + final com.google.api.services.bigquery.model.Dataset datasetPb = + dataset.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return Dataset.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Dataset call() { + return bigQueryRpc.patch(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Table update(TableInfo table, TableOption... options) { + final com.google.api.services.bigquery.model.Table tablePb = + table.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return Table.fromPb(this, + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.patch(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Table getTable(final String datasetId, final String tableId, TableOption... options) { + return getTable(TableId.of(datasetId, tableId), options); + } + + @Override + public Table getTable(final TableId tableId, TableOption... options) { + final Map optionsMap = optionMap(options); + try { + com.google.api.services.bigquery.model.Table answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Table call() { + return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Table.fromPb(this, answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page
listTables(String datasetId, TableListOption... options) { + return listTables(datasetId, options(), optionMap(options)); + } + + @Override + public Page
listTables(DatasetId datasetId, TableListOption... options) { + return listTables(datasetId.dataset(), options(), optionMap(options)); + } + + private static Page
listTables(final String datasetId, final BigQueryOptions + serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> + call() { + return serviceOptions.rpc().listTables(datasetId, optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable
tables = Iterables.transform(result.y(), + new Function() { + @Override + public Table apply(com.google.api.services.bigquery.model.Table table) { + return Table.fromPb(serviceOptions.service(), table); + } + }); + return new PageImpl<>(new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), + cursor, tables); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public InsertAllResponse insertAll(InsertAllRequest request) { + final TableId tableId = request.table(); + final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest(); + requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); + requestPb.setSkipInvalidRows(request.skipInvalidRows()); + requestPb.setTemplateSuffix(request.templateSuffix()); + List rowsPb = Lists.transform(request.rows(), new Function() { + @Override + public Rows apply(RowToInsert rowToInsert) { + return new Rows().setInsertId(rowToInsert.id()).setJson(rowToInsert.content()); + } + }); + requestPb.setRows(rowsPb); + return InsertAllResponse.fromPb( + bigQueryRpc.insertAll(tableId.dataset(), tableId.table(), requestPb)); + } + + @Override + public Page> listTableData(String datasetId, String tableId, + TableDataListOption... options) { + return listTableData(TableId.of(datasetId, tableId), options(), optionMap(options)); + } + + @Override + public Page> listTableData(TableId tableId, TableDataListOption... options) { + return listTableData(tableId, options(), optionMap(options)); + } + + private static Page> listTableData(final TableId tableId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc() + .listTableData(tableId.dataset(), tableId.table(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new TableDataPageFetcher(tableId, serviceOptions, cursor, optionsMap), + cursor, transformTableData(result.y())); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static List> transformTableData(Iterable tableDataPb) { + return ImmutableList.copyOf( + Iterables.transform(tableDataPb != null ? tableDataPb : ImmutableList.of(), + new Function>() { + @Override + public List apply(TableRow rowPb) { + return Lists.transform(rowPb.getF(), FieldValue.FROM_PB_FUNCTION); + } + })); + } + + @Override + public Job getJob(String jobId, JobOption... options) { + return getJob(JobId.of(jobId), options); + } + + @Override + public Job getJob(final JobId jobId, JobOption... options) { + final Map optionsMap = optionMap(options); + try { + com.google.api.services.bigquery.model.Job answer = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() { + return bigQueryRpc.getJob(jobId.job(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Job.fromPb(this, answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listJobs(JobListOption... options) { + return listJobs(options(), optionMap(options)); + } + + private static Page listJobs(final BigQueryOptions serviceOptions, + final Map optionsMap) { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> + call() { + return serviceOptions.rpc().listJobs(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable jobs = Iterables.transform(result.y(), + new Function() { + @Override + public Job apply(com.google.api.services.bigquery.model.Job job) { + return Job.fromPb(serviceOptions.service(), job); + } + }); + return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + } + + @Override + public boolean cancel(String jobId) { + return cancel(JobId.of(jobId)); + } + + @Override + public boolean cancel(final JobId jobId) { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.cancel(jobId.job()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse query(final QueryRequest request) { + try { + com.google.api.services.bigquery.model.QueryResponse results = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.QueryResponse call() { + return bigQueryRpc.query(request.setProjectId(options().projectId()).toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.jobCompleted(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + builder.jobCompleted(true); + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), options(), ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) { + Map optionsMap = optionMap(options); + return getQueryResults(job, options(), optionsMap); + } + + private static QueryResponse getQueryResults(final JobId jobId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + GetQueryResultsResponse results = + runWithRetries(new Callable() { + @Override + public GetQueryResultsResponse call() { + return serviceOptions.rpc().getQueryResults(jobId.job(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.etag(results.getEtag()); + builder.jobCompleted(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), serviceOptions, ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static QueryResult.Builder transformQueryResults(JobId jobId, List rowsPb, + String cursor, BigQueryOptions serviceOptions, Map optionsMap) { + QueryResultsPageFetcherImpl nextPageFetcher = + new QueryResultsPageFetcherImpl(jobId, serviceOptions, cursor, optionsMap); + return QueryResult.builder() + .pageFetcher(nextPageFetcher) + .cursor(cursor) + .results(transformTableData(rowsPb)); + } + + @Override + public TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration) { + return new TableDataWriteChannel(options(), + writeChannelConfiguration.setProjectId(options().projectId())); + } + + private Map optionMap(Option... options) { + Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class); + for (Option option : options) { + Object prev = optionMap.put(option.rpcOption(), option.value()); + checkArgument(prev == null, "Duplicate option %s", option); + } + return optionMap; + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java new file mode 100644 index 000000000000..d48cf646f349 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java @@ -0,0 +1,114 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.ServiceOptions; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; +import com.google.gcloud.bigquery.spi.DefaultBigQueryRpc; + +import java.util.Set; + +public class BigQueryOptions extends ServiceOptions { + + private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery"; + private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE); + private static final long serialVersionUID = -215981591481708043L; + + public static class DefaultBigqueryFactory implements BigQueryFactory { + + private static final BigQueryFactory INSTANCE = new DefaultBigqueryFactory(); + + @Override + public BigQuery create(BigQueryOptions options) { + return new BigQueryImpl(options); + } + } + + public static class DefaultBigQueryRpcFactory implements BigQueryRpcFactory { + + private static final BigQueryRpcFactory INSTANCE = new DefaultBigQueryRpcFactory(); + + @Override + public BigQueryRpc create(BigQueryOptions options) { + return new DefaultBigQueryRpc(options); + } + } + + public static class Builder extends + ServiceOptions.Builder { + + private Builder() { + } + + private Builder(BigQueryOptions options) { + super(options); + } + + @Override + public BigQueryOptions build() { + return new BigQueryOptions(this); + } + } + + private BigQueryOptions(Builder builder) { + super(BigQueryFactory.class, BigQueryRpcFactory.class, builder); + } + + @Override + protected BigQueryFactory defaultServiceFactory() { + return DefaultBigqueryFactory.INSTANCE; + } + + @Override + protected BigQueryRpcFactory defaultRpcFactory() { + return DefaultBigQueryRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof BigQueryOptions)) { + return false; + } + BigQueryOptions other = (BigQueryOptions) obj; + return baseEquals(other); + } + + public static BigQueryOptions defaultInstance() { + return builder().build(); + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java new file mode 100644 index 000000000000..c12cbf5fe432 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java @@ -0,0 +1,258 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationTableCopy; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery copy job configuration. A copy job copies an existing table to another new or + * existing table. Copy job configurations have {@link JobConfiguration.Type#COPY} type. + */ +public final class CopyJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = 1140509641399762967L; + + private final List sourceTables; + private final TableId destinationTable; + private final JobInfo.CreateDisposition createDisposition; + private final JobInfo.WriteDisposition writeDisposition; + + public static final class Builder + extends JobConfiguration.Builder { + + private List sourceTables; + private TableId destinationTable; + private JobInfo.CreateDisposition createDisposition; + private JobInfo.WriteDisposition writeDisposition; + + private Builder() { + super(Type.COPY); + } + + private Builder(CopyJobConfiguration jobConfiguration) { + this(); + this.sourceTables = jobConfiguration.sourceTables; + this.destinationTable = jobConfiguration.destinationTable; + this.createDisposition = jobConfiguration.createDisposition; + this.writeDisposition = jobConfiguration.writeDisposition; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationTableCopy copyConfigurationPb = configurationPb.getCopy(); + this.destinationTable = TableId.fromPb(copyConfigurationPb.getDestinationTable()); + if (copyConfigurationPb.getSourceTables() != null) { + this.sourceTables = + Lists.transform(copyConfigurationPb.getSourceTables(), TableId.FROM_PB_FUNCTION); + } else { + this.sourceTables = ImmutableList.of(TableId.fromPb(copyConfigurationPb.getSourceTable())); + } + if (copyConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + JobInfo.CreateDisposition.valueOf(copyConfigurationPb.getCreateDisposition()); + } + if (copyConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = JobInfo.WriteDisposition.valueOf( + copyConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the source tables to copy. + */ + public Builder sourceTables(List sourceTables) { + this.sourceTables = sourceTables != null ? ImmutableList.copyOf(sourceTables) : null; + return this; + } + + /** + * Sets the destination table of the copy job. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public Builder createDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public Builder writeDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + public CopyJobConfiguration build() { + return new CopyJobConfiguration(this); + } + } + + private CopyJobConfiguration(Builder builder) { + super(builder); + this.sourceTables = checkNotNull(builder.sourceTables); + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + } + + /** + * Returns the source tables to copy. + */ + public List sourceTables() { + return sourceTables; + } + + /** + * Returns the destination table to load the data into. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public JobInfo.CreateDisposition createDisposition() { + return this.createDisposition; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public JobInfo.WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTables", sourceTables) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CopyJobConfiguration && baseEquals((CopyJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceTables, destinationTable, createDisposition, + writeDisposition); + } + + @Override + CopyJobConfiguration setProjectId(final String projectId) { + Builder builder = toBuilder(); + builder.sourceTables( + Lists.transform(sourceTables(), new Function() { + @Override + public TableId apply(TableId tableId) { + return tableId.setProjectId(projectId); + } + })); + builder.destinationTable(destinationTable().setProjectId(projectId)); + return builder.build(); + } + + @Override + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationTableCopy configurationPb = new JobConfigurationTableCopy(); + configurationPb.setDestinationTable(destinationTable.toPb()); + if (sourceTables.size() == 1) { + configurationPb.setSourceTable(sourceTables.get(0).toPb()); + } else { + configurationPb.setSourceTables(Lists.transform(sourceTables, TableId.TO_PB_FUNCTION)); + } + if (createDisposition != null) { + configurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + configurationPb.setWriteDisposition(writeDisposition.toString()); + } + return new com.google.api.services.bigquery.model.JobConfiguration().setCopy(configurationPb); + } + + /** + * Creates a builder for a BigQuery Copy Job configuration given destination and source table. + */ + public static Builder builder(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, ImmutableList.of(checkNotNull(sourceTable))); + } + + /** + * Creates a builder for a BigQuery Copy Job configuration given destination and source tables. + */ + public static Builder builder(TableId destinationTable, List sourceTables) { + return new Builder().destinationTable(destinationTable).sourceTables(sourceTables); + } + + /** + * Returns a BigQuery Copy Job configuration for the given destination and source table. + */ + public static CopyJobConfiguration of(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, sourceTable).build(); + } + + /** + * Returns a BigQuery Copy Job configuration for the given destination and source tables. + */ + public static CopyJobConfiguration of(TableId destinationTable, List sourceTables) { + return builder(destinationTable, sourceTables).build(); + } + + @SuppressWarnings("unchecked") + static CopyJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java new file mode 100644 index 000000000000..9576e7d75640 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java @@ -0,0 +1,271 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; + +import java.nio.charset.Charset; +import java.util.Objects; + +/** + * Google BigQuery options for CSV format. This class wraps some properties of CSV files used by + * BigQuery to parse external data. + */ +public class CsvOptions extends FormatOptions { + + private static final long serialVersionUID = 2193570529308612708L; + + private final Boolean allowJaggedRows; + private final Boolean allowQuotedNewLines; + private final String encoding; + private final String fieldDelimiter; + private final String quote; + private final Integer skipLeadingRows; + + public static final class Builder { + + private Boolean allowJaggedRows; + private Boolean allowQuotedNewLines; + private String encoding; + private String fieldDelimiter; + private String quote; + private Integer skipLeadingRows; + + private Builder() {} + + /** + * Set whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if there are too many + * bad records, an invalid error is returned in the job result. By default, rows with missing + * trailing columns are considered bad records. + */ + public Builder allowJaggedRows(Boolean allowJaggedRows) { + this.allowJaggedRows = allowJaggedRows; + return this; + } + + /** + * Sets whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. By default quoted newline are not allowed. + */ + public Builder allowQuotedNewLines(Boolean allowQuotedNewLines) { + this.allowQuotedNewLines = allowQuotedNewLines; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(String encoding) { + this.encoding = encoding; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(Charset encoding) { + this.encoding = encoding.name(); + return this; + } + + /** + * Sets the separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 + * encoding, and then uses the first byte of the encoded string to split the data in its raw, + * binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. + * The default value is a comma (','). + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return this; + } + + /** + * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the + * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split + * the data in its raw, binary state. The default value is a double-quote ('"'). If your data + * does not contain quoted sections, set the property value to an empty string. If your data + * contains quoted newline characters, you must also set {@link #allowQuotedNewLines(Boolean)} + * property to {@code true}. + */ + public Builder quote(String quote) { + this.quote = quote; + return this; + } + + /** + * Sets the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. The default value is 0. This property is useful if you have header rows in the file + * that should be skipped. + */ + public Builder skipLeadingRows(Integer skipLeadingRows) { + this.skipLeadingRows = skipLeadingRows; + return this; + } + + /** + * Creates a {@code CsvOptions} object. + */ + public CsvOptions build() { + return new CsvOptions(this); + } + } + + private CsvOptions(Builder builder) { + super(FormatOptions.CSV); + this.allowJaggedRows = builder.allowJaggedRows; + this.allowQuotedNewLines = builder.allowQuotedNewLines; + this.encoding = builder.encoding; + this.fieldDelimiter = builder.fieldDelimiter; + this.quote = builder.quote; + this.skipLeadingRows = builder.skipLeadingRows; + } + + /** + * Returns whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if the number of bad + * records exceeds {@link ExternalTableDefinition#maxBadRecords()}, an invalid error is returned + * in the job result. + */ + public Boolean allowJaggedRows() { + return allowJaggedRows; + } + + /** + * Returns whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. + */ + public Boolean allowQuotedNewLines() { + return allowQuotedNewLines; + } + + /** + * Returns the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. If + * not set, UTF-8 is used. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote()} and {@link #fieldDelimiter()}. + */ + public String encoding() { + return encoding; + } + + /** + * Returns the separator for fields in a CSV file. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the value that is used to quote data sections in a CSV file. + */ + public String quote() { + return quote; + } + + /** + * Returns the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. + */ + public Integer skipLeadingRows() { + return skipLeadingRows; + } + + /** + * Returns a builder for the {@code CsvOptions} object. + */ + public Builder toBuilder() { + return new Builder() + .allowJaggedRows(allowJaggedRows) + .allowQuotedNewLines(allowQuotedNewLines) + .encoding(encoding) + .fieldDelimiter(fieldDelimiter) + .quote(quote) + .skipLeadingRows(skipLeadingRows); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("type", type()) + .add("allowJaggedRows", allowJaggedRows) + .add("allowQuotedNewLines", allowQuotedNewLines) + .add("encoding", encoding) + .add("fieldDelimiter", fieldDelimiter) + .add("quote", quote) + .add("skipLeadingRows", skipLeadingRows) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type(), allowJaggedRows, allowQuotedNewLines, encoding, fieldDelimiter, + quote, skipLeadingRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CsvOptions && Objects.equals(toPb(), ((CsvOptions) obj).toPb()); + } + + com.google.api.services.bigquery.model.CsvOptions toPb() { + com.google.api.services.bigquery.model.CsvOptions csvOptions = + new com.google.api.services.bigquery.model.CsvOptions(); + csvOptions.setAllowJaggedRows(allowJaggedRows); + csvOptions.setAllowQuotedNewlines(allowQuotedNewLines); + csvOptions.setEncoding(encoding); + csvOptions.setFieldDelimiter(fieldDelimiter); + csvOptions.setQuote(quote); + csvOptions.setSkipLeadingRows(skipLeadingRows); + return csvOptions; + } + + /** + * Returns a builder for a CsvOptions object. + */ + public static Builder builder() { + return new Builder(); + } + + static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOptions) { + Builder builder = builder(); + if (csvOptions.getAllowJaggedRows() != null) { + builder.allowJaggedRows(csvOptions.getAllowJaggedRows()); + } + if (csvOptions.getAllowQuotedNewlines() != null) { + builder.allowQuotedNewLines(csvOptions.getAllowQuotedNewlines()); + } + if (csvOptions.getEncoding() != null) { + builder.encoding(csvOptions.getEncoding()); + } + if (csvOptions.getFieldDelimiter() != null) { + builder.fieldDelimiter(csvOptions.getFieldDelimiter()); + } + if (csvOptions.getQuote() != null) { + builder.quote(csvOptions.getQuote()); + } + if (csvOptions.getSkipLeadingRows() != null) { + builder.skipLeadingRows(csvOptions.getSkipLeadingRows()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java new file mode 100644 index 000000000000..e17d3e82c4ef --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java @@ -0,0 +1,253 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.gcloud.Page; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Dataset. + * + *

Objects of this class are immutable. Operations that modify the dataset like {@link #update} + * return a new object. To get a {@code Dataset} object with the most recent information use + * {@link #reload}. {@code Dataset} adds a layer of service-related functionality over + * {@link DatasetInfo}. + *

+ */ +public final class Dataset extends DatasetInfo { + + private static final long serialVersionUID = -4272921483363065593L; + + private final BigQueryOptions options; + private transient BigQuery bigquery; + + /** + * A builder for {@code Dataset} objects. + */ + public static final class Builder extends DatasetInfo.Builder { + + private final BigQuery bigquery; + private final DatasetInfo.BuilderImpl infoBuilder; + + Builder(BigQuery bigquery, DatasetId datasetId) { + this.bigquery = bigquery; + this.infoBuilder = new DatasetInfo.BuilderImpl(); + this.infoBuilder.datasetId(datasetId); + } + + Builder(Dataset dataset) { + this.bigquery = dataset.bigquery; + this.infoBuilder = new DatasetInfo.BuilderImpl(dataset); + } + + @Override + public Builder datasetId(DatasetId datasetId) { + infoBuilder.datasetId(datasetId); + return this; + } + + @Override + public Builder acl(List acl) { + infoBuilder.acl(acl); + return this; + } + + @Override + Builder creationTime(Long creationTime) { + infoBuilder.creationTime(creationTime); + return this; + } + + @Override + public Builder defaultTableLifetime(Long defaultTableLifetime) { + infoBuilder.defaultTableLifetime(defaultTableLifetime); + return this; + } + + @Override + public Builder description(String description) { + infoBuilder.description(description); + return this; + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + public Builder friendlyName(String friendlyName) { + infoBuilder.friendlyName(friendlyName); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + Builder lastModified(Long lastModified) { + infoBuilder.lastModified(lastModified); + return this; + } + + @Override + public Builder location(String location) { + infoBuilder.location(location); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + public Dataset build() { + return new Dataset(bigquery, infoBuilder); + } + } + + Dataset(BigQuery bigquery, DatasetInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.bigquery = checkNotNull(bigquery); + this.options = bigquery.options(); + } + + /** + * Checks if this dataset exists. + * + * @return {@code true} if this dataset exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getDataset(datasetId(), BigQuery.DatasetOption.fields()) != null; + } + + /** + * Fetches current dataset's latest information. Returns {@code null} if the dataset does not + * exist. + * + * @param options dataset options + * @return a {@code Dataset} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Dataset reload(BigQuery.DatasetOption... options) { + return bigquery.getDataset(datasetId().dataset(), options); + } + + /** + * Updates the dataset's information with this dataset's information. Dataset's user-defined id + * cannot be changed. A new {@code Dataset} object is returned. + * + * @param options dataset options + * @return a {@code Dataset} object with updated information + * @throws BigQueryException upon failure + */ + public Dataset update(BigQuery.DatasetOption... options) { + return bigquery.update(this, options); + } + + /** + * Deletes this dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + public boolean delete() { + return bigquery.delete(datasetId()); + } + + /** + * Returns the paginated list of tables in this dataset. + * + * @param options options for listing tables + * @throws BigQueryException upon failure + */ + public Page
list(BigQuery.TableListOption... options) { + return bigquery.listTables(datasetId(), options); + } + + /** + * Returns the requested table in this dataset or {@code null} if not found. + * + * @param table user-defined id of the requested table + * @param options table options + * @throws BigQueryException upon failure + */ + public Table get(String table, BigQuery.TableOption... options) { + return bigquery.getTable(TableId.of(datasetId().dataset(), table), options); + } + + /** + * Creates a new table in this dataset. + * + * @param table the table's user-defined id + * @param definition the table's definition + * @param options options for table creation + * @return a {@code Table} object for the created table + * @throws BigQueryException upon failure + */ + public Table create(String table, TableDefinition definition, BigQuery.TableOption... options) { + TableInfo tableInfo = TableInfo.of(TableId.of(datasetId().dataset(), table), definition); + return bigquery.create(tableInfo, options); + } + + /** + * Returns the dataset's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Dataset + && Objects.equals(toPb(), ((Dataset) obj).toPb()) + && Objects.equals(options, ((Dataset) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Dataset fromPb(BigQuery bigquery, + com.google.api.services.bigquery.model.Dataset datasetPb) { + return new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetPb)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java new file mode 100644 index 000000000000..006c089f8d63 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java @@ -0,0 +1,97 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.DatasetReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Dataset identity. + */ +public class DatasetId implements Serializable { + + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + private DatasetId(String project, String dataset) { + this.project = project; + this.dataset = dataset; + } + + /** + * Creates a dataset identity given project's and dataset's user-defined ids. + */ + public static DatasetId of(String project, String dataset) { + return new DatasetId(checkNotNull(project), checkNotNull(dataset)); + } + + /** + * Creates a dataset identity given only its user-defined id. + */ + public static DatasetId of(String dataset) { + return new DatasetId(null, checkNotNull(dataset)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof DatasetId && Objects.equals(toPb(), ((DatasetId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset); + } + + @Override + public String toString() { + return toPb().toString(); + } + + DatasetId setProjectId(String projectId) { + return project() != null ? this : DatasetId.of(projectId, dataset()); + } + + DatasetReference toPb() { + return new DatasetReference().setProjectId(project).setDatasetId(dataset); + } + + static DatasetId fromPb(DatasetReference datasetRef) { + return new DatasetId( + datasetRef.getProjectId(), + datasetRef.getDatasetId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java new file mode 100644 index 000000000000..aa767b97631b --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java @@ -0,0 +1,473 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.TableReference; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Dataset information. A dataset is a grouping mechanism that holds zero or more + * tables. Datasets are the lowest level unit of access control; you cannot control access at the + * table level. + * + * @see + * Managing Jobs, Datasets, and Projects + */ +public class DatasetInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public DatasetInfo apply(Dataset pb) { + return DatasetInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Dataset apply(DatasetInfo datasetInfo) { + return datasetInfo.toPb(); + } + }; + + private static final long serialVersionUID = -6615133444520365839L; + + private final DatasetId datasetId; + private final List acl; + private final Long creationTime; + private final Long defaultTableLifetime; + private final String description; + private final String etag; + private final String friendlyName; + private final String id; + private final Long lastModified; + private final String location; + private final String selfLink; + + /** + * A builder for {@code DatasetInfo} objects. + */ + public abstract static class Builder { + + /** + * Sets the dataset identity. + */ + public abstract Builder datasetId(DatasetId datasetId); + + /** + * Sets the dataset's access control configuration. + * + * @see Access Control + */ + public abstract Builder acl(List acl); + + abstract Builder creationTime(Long creationTime); + + /** + * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is + * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the + * dataset will have an expirationTime property set to the creation time plus the value in this + * property, and changing the value will only affect new tables, not existing ones. When the + * expirationTime for a given table is reached, that table will be deleted automatically. If a + * table's expirationTime is modified or removed before the table expires, or if you provide an + * explicit expirationTime when creating a table, that value takes precedence over the default + * expiration time indicated by this property. This property is experimental and might be + * subject to change or removed. + */ + public abstract Builder defaultTableLifetime(Long defaultTableLifetime); + + /** + * Sets a user-friendly description for the dataset. + */ + public abstract Builder description(String description); + + abstract Builder etag(String etag); + + /** + * Sets a user-friendly name for the dataset. + */ + public abstract Builder friendlyName(String friendlyName); + + abstract Builder id(String id); + + abstract Builder lastModified(Long lastModified); + + /** + * Sets the geographic location where the dataset should reside. This property is experimental + * and might be subject to change or removed. + * + * @see Dataset + * Location + */ + public abstract Builder location(String location); + + abstract Builder selfLink(String selfLink); + + /** + * Creates a {@code DatasetInfo} object. + */ + public abstract DatasetInfo build(); + } + + static final class BuilderImpl extends Builder { + + private DatasetId datasetId; + private List acl; + private Long creationTime; + private Long defaultTableLifetime; + private String description; + private String etag; + private String friendlyName; + private String id; + private Long lastModified; + private String location; + private String selfLink; + + BuilderImpl() {} + + BuilderImpl(DatasetInfo datasetInfo) { + this.datasetId = datasetInfo.datasetId; + this.acl = datasetInfo.acl; + this.creationTime = datasetInfo.creationTime; + this.defaultTableLifetime = datasetInfo.defaultTableLifetime; + this.description = datasetInfo.description; + this.etag = datasetInfo.etag; + this.friendlyName = datasetInfo.friendlyName; + this.id = datasetInfo.id; + this.lastModified = datasetInfo.lastModified; + this.location = datasetInfo.location; + this.selfLink = datasetInfo.selfLink; + } + + BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) { + if (datasetPb.getDatasetReference() != null) { + this.datasetId = DatasetId.fromPb(datasetPb.getDatasetReference()); + } + if (datasetPb.getAccess() != null) { + this.acl = Lists.transform(datasetPb.getAccess(), new Function() { + @Override + public Acl apply(Dataset.Access accessPb) { + return Acl.fromPb(accessPb); + } + }); + } + this.creationTime = datasetPb.getCreationTime(); + this.defaultTableLifetime = datasetPb.getDefaultTableExpirationMs(); + this.description = datasetPb.getDescription(); + this.etag = datasetPb.getEtag(); + this.friendlyName = datasetPb.getFriendlyName(); + this.id = datasetPb.getId(); + this.lastModified = datasetPb.getLastModifiedTime(); + this.location = datasetPb.getLocation(); + this.selfLink = datasetPb.getSelfLink(); + } + + @Override + public Builder datasetId(DatasetId datasetId) { + this.datasetId = checkNotNull(datasetId); + return this; + } + + @Override + public Builder acl(List acl) { + this.acl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + @Override + Builder creationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + @Override + public Builder defaultTableLifetime(Long defaultTableLifetime) { + this.defaultTableLifetime = + firstNonNull(defaultTableLifetime, Data.nullOf(Long.class)); + return this; + } + + @Override + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + @Override + Builder etag(String etag) { + this.etag = etag; + return this; + } + + @Override + public Builder friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return this; + } + + @Override + Builder id(String id) { + this.id = id; + return this; + } + + @Override + Builder lastModified(Long lastModified) { + this.lastModified = lastModified; + return this; + } + + @Override + public Builder location(String location) { + this.location = firstNonNull(location, Data.nullOf(String.class)); + return this; + } + + @Override + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + @Override + public DatasetInfo build() { + return new DatasetInfo(this); + } + } + + DatasetInfo(BuilderImpl builder) { + datasetId = checkNotNull(builder.datasetId); + acl = builder.acl; + creationTime = builder.creationTime; + defaultTableLifetime = builder.defaultTableLifetime; + description = builder.description; + etag = builder.etag; + friendlyName = builder.friendlyName; + id = builder.id; + lastModified = builder.lastModified; + location = builder.location; + selfLink = builder.selfLink; + } + + /** + * Returns the dataset identity. + */ + public DatasetId datasetId() { + return datasetId; + } + + /** + * Returns the dataset's access control configuration. + * + * @see Access Control + */ + public List acl() { + return acl; + } + + /** + * Returns the time when this dataset was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the default lifetime of all tables in the dataset, in milliseconds. Once this property + * is set, all newly-created tables in the dataset will have an expirationTime property set to the + * creation time plus the value in this property, and changing the value will only affect new + * tables, not existing ones. When the expirationTime for a given table is reached, that table + * will be deleted automatically. If a table's expirationTime is modified or removed before the + * table expires, or if you provide an explicit expirationTime when creating a table, that value + * takes precedence over the default expiration time indicated by this property. + */ + public Long defaultTableLifetime() { + return defaultTableLifetime; + } + + /** + * Returns a user-friendly description for the dataset. + */ + public String description() { + return description; + } + + /** + * Returns the hash of the dataset resource. + */ + public String etag() { + return etag; + } + + /** + * Returns a user-friendly name for the dataset. + */ + public String friendlyName() { + return friendlyName; + } + + /** + * Returns an opaque id for the dataset. + */ + public String id() { + return id; + } + + /** + * Returns the time when this dataset or any of its tables was last modified, in milliseconds + * since the epoch. + */ + public Long lastModified() { + return lastModified; + } + + /** + * Returns the geographic location where the dataset should reside. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns a builder for the dataset object. + */ + public Builder toBuilder() { + return new BuilderImpl(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("datasetId", datasetId) + .add("creationTime", creationTime) + .add("defaultTableLifetime", defaultTableLifetime) + .add("description", description) + .add("etag", etag) + .add("friendlyName", friendlyName) + .add("id", id) + .add("lastModified", lastModified) + .add("location", location) + .add("selfLink", selfLink) + .add("acl", acl) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(datasetId); + } + + @Override + public boolean equals(Object obj) { + return obj != null + && obj.getClass().equals(DatasetInfo.class) + && Objects.equals(toPb(), ((DatasetInfo) obj).toPb()); + } + + DatasetInfo setProjectId(String projectId) { + Builder builder = toBuilder(); + builder.datasetId(datasetId().setProjectId(projectId)); + if (acl() != null) { + List acls = Lists.newArrayListWithCapacity(acl().size()); + for (Acl acl : acl()) { + if (acl.entity().type() == Acl.Entity.Type.VIEW) { + Dataset.Access accessPb = acl.toPb(); + TableReference viewReferencePb = accessPb.getView(); + if (viewReferencePb.getProjectId() == null) { + viewReferencePb.setProjectId(projectId); + } + acls.add(Acl.of(new Acl.View(TableId.fromPb(viewReferencePb)))); + } else { + acls.add(acl); + } + } + builder.acl(acls); + } + return builder.build(); + } + + Dataset toPb() { + Dataset datasetPb = new Dataset(); + datasetPb.setDatasetReference(datasetId.toPb()); + datasetPb.setCreationTime(creationTime); + datasetPb.setDefaultTableExpirationMs(defaultTableLifetime); + datasetPb.setDescription(description); + datasetPb.setEtag(etag); + datasetPb.setFriendlyName(friendlyName); + datasetPb.setId(id); + datasetPb.setLastModifiedTime(lastModified); + datasetPb.setLocation(location); + datasetPb.setSelfLink(selfLink); + if (acl != null) { + datasetPb.setAccess(Lists.transform(acl, new Function() { + @Override + public Dataset.Access apply(Acl acl) { + return acl.toPb(); + } + })); + } + return datasetPb; + } + + /** + * Returns a builder for a {@code DatasetInfo} object given it's identity. + */ + public static Builder builder(DatasetId datasetId) { + return new BuilderImpl().datasetId(datasetId); + } + + /** + * Returns a builder for a {@code DatasetInfo} object given it's user-defined id. + */ + public static Builder builder(String datasetId) { + return builder(DatasetId.of(datasetId)); + } + + /** + * Returns a builder for the DatasetInfo object given it's user-defined project and dataset ids. + */ + public static Builder builder(String projectId, String datasetId) { + return builder(DatasetId.of(projectId, datasetId)); + } + + static DatasetInfo fromPb(Dataset datasetPb) { + return new BuilderImpl(datasetPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableDefinition.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableDefinition.java new file mode 100644 index 000000000000..5f396d948f5a --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableDefinition.java @@ -0,0 +1,408 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.ExternalDataConfiguration; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery external table definition. BigQuery's external tables are tables whose data + * reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are + * experimental and might be subject to change or removed. + * + * @see Federated Data Sources + * + */ +public class ExternalTableDefinition extends TableDefinition { + + static final Function + FROM_EXTERNAL_DATA_FUNCTION = + new Function() { + @Override + public ExternalTableDefinition apply(ExternalDataConfiguration pb) { + return ExternalTableDefinition.fromExternalDataConfiguration(pb); + } + }; + static final Function + TO_EXTERNAL_DATA_FUNCTION = + new Function() { + @Override + public ExternalDataConfiguration apply(ExternalTableDefinition tableInfo) { + return tableInfo.toExternalDataConfigurationPb(); + } + }; + + private static final long serialVersionUID = -5951580238459622025L; + + private final List sourceUris; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Boolean ignoreUnknownValues; + private final String compression; + + public static final class Builder + extends TableDefinition.Builder { + + private List sourceUris; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Boolean ignoreUnknownValues; + private String compression; + + private Builder() { + super(Type.EXTERNAL); + } + + private Builder(ExternalTableDefinition tableDefinition) { + super(tableDefinition); + this.sourceUris = tableDefinition.sourceUris; + this.formatOptions = tableDefinition.formatOptions; + this.maxBadRecords = tableDefinition.maxBadRecords; + this.ignoreUnknownValues = tableDefinition.ignoreUnknownValues; + this.compression = tableDefinition.compression; + } + + private Builder(Table tablePb) { + super(tablePb); + com.google.api.services.bigquery.model.ExternalDataConfiguration externalDataConfiguration = + tablePb.getExternalDataConfiguration(); + if (externalDataConfiguration != null) { + if (externalDataConfiguration.getSourceUris() != null) { + this.sourceUris = ImmutableList.copyOf(externalDataConfiguration.getSourceUris()); + } + if (externalDataConfiguration.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(externalDataConfiguration.getSourceFormat()); + } + this.compression = externalDataConfiguration.getCompression(); + this.ignoreUnknownValues = externalDataConfiguration.getIgnoreUnknownValues(); + if (externalDataConfiguration.getCsvOptions() != null) { + this.formatOptions = CsvOptions.fromPb(externalDataConfiguration.getCsvOptions()); + } + this.maxBadRecords = externalDataConfiguration.getMaxBadRecords(); + } + } + + /** + * Sets the fully-qualified URIs that point to your data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character that must come after the + * bucket's name. Size limits related to load jobs apply to external data sources, plus an + * additional limit of 10 GB maximum size across all URIs. + * + * @see Quota + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = ImmutableList.copyOf(checkNotNull(sourceUris)); + return this; + } + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + * + * + * Source Format + */ + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = checkNotNull(formatOptions); + return this; + } + + /** + * Sets the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * The default value is 0, which requires that all records are valid. + */ + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If true, the extra values are ignored. If false, records with extra columns are treated as + * bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value set with {@link #formatOptions(FormatOptions)} + * property determines what BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * Sets compression type of the data source. By default no compression is assumed. + * + * @see + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return this; + } + + /** + * Creates an {@code ExternalTableDefinition} object. + */ + @Override + public ExternalTableDefinition build() { + return new ExternalTableDefinition(this); + } + } + + private ExternalTableDefinition(Builder builder) { + super(builder); + this.compression = builder.compression; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.maxBadRecords = builder.maxBadRecords; + this.formatOptions = builder.formatOptions; + this.sourceUris = builder.sourceUris; + } + + /** + * Returns the compression type of the data source. + * + * @see + * Compression + */ + public String compression() { + return compression; + } + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If true, the extra values are ignored. If false, records with extra columns are treated + * as bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value of {@link #formatOptions()} determines what + * BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + */ + public Integer maxBadRecords() { + return maxBadRecords; + } + + /** + * Returns the fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can + * contain one '*' wildcard character that must come after the bucket's name. Size limits + * related to load jobs apply to external data sources, plus an additional limit of 10 GB + * maximum size across all URIs. + * + * @see Quota + */ + public List sourceUris() { + return sourceUris; + } + + /** + * Returns the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + */ + @SuppressWarnings("unchecked") + public F formatOptions() { + return (F) formatOptions; + } + + /** + * Returns a builder for the {@code ExternalTableDefinition} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceUris", sourceUris) + .add("formatOptions", formatOptions) + .add("compression", compression) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("maxBadRecords", maxBadRecords); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExternalTableDefinition && baseEquals((ExternalTableDefinition) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), compression, ignoreUnknownValues, maxBadRecords, + formatOptions, sourceUris); + } + + @Override + com.google.api.services.bigquery.model.Table toPb() { + Table tablePb = super.toPb(); + tablePb.setExternalDataConfiguration(toExternalDataConfigurationPb()); + return tablePb; + } + + com.google.api.services.bigquery.model.ExternalDataConfiguration toExternalDataConfigurationPb() { + com.google.api.services.bigquery.model.ExternalDataConfiguration externalConfigurationPb = + new com.google.api.services.bigquery.model.ExternalDataConfiguration(); + if (compression != null) { + externalConfigurationPb.setCompression(compression); + } + if (ignoreUnknownValues != null) { + externalConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + } + if (maxBadRecords != null) { + externalConfigurationPb.setMaxBadRecords(maxBadRecords); + } + if (schema() != null) { + externalConfigurationPb.setSchema(schema().toPb()); + } + if (formatOptions != null) { + externalConfigurationPb.setSourceFormat(formatOptions.type()); + } + if (sourceUris != null) { + externalConfigurationPb.setSourceUris(sourceUris); + } + if (formatOptions != null && FormatOptions.CSV.equals(formatOptions.type())) { + externalConfigurationPb.setCsvOptions(((CsvOptions) formatOptions).toPb()); + } + return externalConfigurationPb; + } + + /** + * Creates a builder for an ExternalTableDefinition object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalTableDefinition object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(List sourceUris, Schema schema, FormatOptions format) { + return new Builder().sourceUris(sourceUris).schema(schema).formatOptions(format); + } + + /** + * Creates a builder for an ExternalTableDefinition object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalTableDefinition object given source URI, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(String sourceUri, Schema schema, FormatOptions format) { + return builder(ImmutableList.of(sourceUri), schema, format); + } + + /** + * Creates an ExternalTableDefinition object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalTableDefinition object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalTableDefinition of(List sourceUris, Schema schema, + FormatOptions format) { + return builder(sourceUris, schema, format).build(); + } + + /** + * Creates an ExternalTableDefinition object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalTableDefinition object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalTableDefinition of(String sourceUri, Schema schema, FormatOptions format) { + return builder(sourceUri, schema, format).build(); + } + + @SuppressWarnings("unchecked") + static ExternalTableDefinition fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } + + static ExternalTableDefinition fromExternalDataConfiguration( + ExternalDataConfiguration externalDataConfiguration) { + Builder builder = new Builder(); + if (externalDataConfiguration.getSourceUris() != null) { + builder.sourceUris(externalDataConfiguration.getSourceUris()); + } + if (externalDataConfiguration.getSchema() != null) { + builder.schema(Schema.fromPb(externalDataConfiguration.getSchema())); + } + if (externalDataConfiguration.getSourceFormat() != null) { + builder.formatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat())); + } + if (externalDataConfiguration.getCompression() != null) { + builder.compression(externalDataConfiguration.getCompression()); + } + if (externalDataConfiguration.getIgnoreUnknownValues() != null) { + builder.ignoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + builder.formatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions())); + } + if (externalDataConfiguration.getMaxBadRecords() != null) { + builder.maxBadRecords(externalDataConfiguration.getMaxBadRecords()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java new file mode 100644 index 000000000000..7c5a2698b159 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java @@ -0,0 +1,294 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationExtract; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery extract job configuration. An extract job exports a BigQuery table to Google + * Cloud Storage. The extract destination provided as URIs that point to objects in Google Cloud + * Storage. Extract job configurations have {@link JobConfiguration.Type#EXTRACT} type. + */ +public final class ExtractJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = 4147749733166593761L; + + private final TableId sourceTable; + private final List destinationUris; + private final Boolean printHeader; + private final String fieldDelimiter; + private final String format; + private final String compression; + + public static final class Builder + extends JobConfiguration.Builder { + + private TableId sourceTable; + private List destinationUris; + private Boolean printHeader; + private String fieldDelimiter; + private String format; + private String compression; + + private Builder() { + super(Type.EXTRACT); + } + + private Builder(ExtractJobConfiguration jobInfo) { + this(); + this.sourceTable = jobInfo.sourceTable; + this.destinationUris = jobInfo.destinationUris; + this.printHeader = jobInfo.printHeader; + this.fieldDelimiter = jobInfo.fieldDelimiter; + this.format = jobInfo.format; + this.compression = jobInfo.compression; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationExtract extractConfigurationPb = configurationPb.getExtract(); + this.sourceTable = TableId.fromPb(extractConfigurationPb.getSourceTable()); + this.destinationUris = extractConfigurationPb.getDestinationUris(); + this.printHeader = extractConfigurationPb.getPrintHeader(); + this.fieldDelimiter = extractConfigurationPb.getFieldDelimiter(); + this.format = extractConfigurationPb.getDestinationFormat(); + this.compression = extractConfigurationPb.getCompression(); + } + + /** + * Sets the table to export. + */ + public Builder sourceTable(TableId sourceTable) { + this.sourceTable = sourceTable; + return this; + } + + /** + * Sets the list of fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) where the + * extracted table should be written. + */ + public Builder destinationUris(List destinationUris) { + this.destinationUris = destinationUris != null ? ImmutableList.copyOf(destinationUris) : null; + return this; + } + + /** + * Sets whether to print out a header row in the results. By default an header is printed. + */ + public Builder printHeader(Boolean printHeader) { + this.printHeader = printHeader; + return this; + } + + /** + * Sets the delimiter to use between fields in the exported data. By default "," is used. + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return this; + } + + /** + * Sets the exported file format. If not set table is exported in CSV format. + * + * + * Destination Format + */ + public Builder format(String format) { + this.format = format; + return this; + } + + /** + * Sets the compression value to use for exported files. If not set exported files are not + * compressed. + * + * + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return this; + } + + public ExtractJobConfiguration build() { + return new ExtractJobConfiguration(this); + } + } + + private ExtractJobConfiguration(Builder builder) { + super(builder); + this.sourceTable = checkNotNull(builder.sourceTable); + this.destinationUris = checkNotNull(builder.destinationUris); + this.printHeader = builder.printHeader; + this.fieldDelimiter = builder.fieldDelimiter; + this.format = builder.format; + this.compression = builder.compression; + } + + /** + * Returns the table to export. + */ + public TableId sourceTable() { + return sourceTable; + } + + /** + * Returns the list of fully-qualified Google Cloud Storage URIs where the extracted table should + * be written. + * + * @see + * Exporting Data Into One or More Files + */ + public List destinationUris() { + return destinationUris; + } + + /** + * Returns whether an header row is printed with the result. + */ + public Boolean printHeader() { + return printHeader; + } + + /** + * Returns the delimiter used between fields in the exported data. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the exported files format. + */ + public String format() { + return format; + } + + /** + * Returns the compression value of exported files. + */ + public String compression() { + return compression; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTable", sourceTable) + .add("destinationUris", destinationUris) + .add("format", format) + .add("printHeader", printHeader) + .add("fieldDelimiter", fieldDelimiter) + .add("compression", compression); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractJobConfiguration && baseEquals((ExtractJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceTable, destinationUris, printHeader, fieldDelimiter, + format, compression); + } + + @Override + ExtractJobConfiguration setProjectId(String projectId) { + return toBuilder().sourceTable(sourceTable().setProjectId(projectId)).build(); + } + + @Override + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationExtract extractConfigurationPb = new JobConfigurationExtract(); + extractConfigurationPb.setDestinationUris(destinationUris); + extractConfigurationPb.setSourceTable(sourceTable.toPb()); + extractConfigurationPb.setPrintHeader(printHeader); + extractConfigurationPb.setFieldDelimiter(fieldDelimiter); + extractConfigurationPb.setDestinationFormat(format); + extractConfigurationPb.setCompression(compression); + return new com.google.api.services.bigquery.model.JobConfiguration() + .setExtract(extractConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Extract Job configuration given source table and destination + * URI. + */ + public static Builder builder(TableId sourceTable, String destinationUri) { + return builder(sourceTable, ImmutableList.of(checkNotNull(destinationUri))); + } + + /** + * Creates a builder for a BigQuery Extract Job configuration given source table and destination + * URIs. + */ + public static Builder builder(TableId sourceTable, List destinationUris) { + return new Builder().sourceTable(sourceTable).destinationUris(destinationUris); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table and destination URI. + */ + public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri) { + return builder(sourceTable, destinationUri).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table and destination URIs. + */ + public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris) { + return builder(sourceTable, destinationUris).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table, format and destination + * URI. + */ + public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri, + String format) { + return builder(sourceTable, destinationUri).format(format).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table, format and destination + * URIs. + */ + public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris, + String format) { + return builder(sourceTable, destinationUris).format(format).build(); + } + + @SuppressWarnings("unchecked") + static ExtractJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration confPb) { + return new Builder(confPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java new file mode 100644 index 000000000000..55fae44c5eed --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java @@ -0,0 +1,375 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Table field. A table field has a name, a value, a mode and possibly a + * description. Supported types are: {@link Type#integer()}, {@link Type#bool()}, + * {@link Type#string()}, {@link Type#floatingPoint()}, {@link Type#timestamp()} and + * {@link Type#record(Field...)}. One or more fields form a table's schema. + */ +public class Field implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public Field apply(TableFieldSchema pb) { + return Field.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableFieldSchema apply(Field field) { + return field.toPb(); + } + }; + + private static final long serialVersionUID = -8154262932305199256L; + + /** + * Data Types for a BigQuery Table field. This class provides factory methods for all BigQuery + * field types. To instantiate a RECORD value the list of sub-fields must be provided. + * + * @see + * Data Types + */ + public static class Type implements Serializable { + + private static final long serialVersionUID = 2841484762609576959L; + + public enum Value { + STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD + } + + private final Value value; + private final List fields; + + private Type(Value value) { + this.value = checkNotNull(value); + this.fields = null; + } + + private Type(Value value, List fields) { + checkArgument(fields.size() > 0, "Record must have at least one field"); + this.value = value; + this.fields = fields; + } + + /** + * Returns the value identifier. + * + * @see + * Data Types + */ + public Value value() { + return value; + } + + /** + * Returns the list of sub-fields if {@link #value()} is set to {@link Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return fields; + } + + /** + * Returns a {@link Value#STRING} field value. + */ + public static Type string() { + return new Type(Value.STRING); + } + + /** + * Returns an {@link Value#INTEGER} field value. + */ + public static Type integer() { + return new Type(Value.INTEGER); + } + + /** + * Returns a {@link Value#FLOAT} field value. + */ + public static Type floatingPoint() { + return new Type(Value.FLOAT); + } + + /** + * Returns a {@link Value#BOOLEAN} field value. + */ + public static Type bool() { + return new Type(Value.BOOLEAN); + } + + /** + * Returns a {@link Value#TIMESTAMP} field value. + */ + public static Type timestamp() { + return new Type(Value.TIMESTAMP); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(Field... fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(fields)); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(List fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(checkNotNull(fields))); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("value", value) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(value, fields); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Type)) { + return false; + } + Type other = (Type) obj; + return Objects.equals(value, other.value) + && Objects.equals(fields, other.fields); + } + } + + /** + * Mode for a BigQuery Table field. {@link Mode#NULLABLE} fields can be set to {@code null}, + * {@link Mode#REQUIRED} fields must be provided. {@link Mode#REPEATED} fields can contain more + * than one value. + */ + public enum Mode { + NULLABLE, REQUIRED, REPEATED + } + + private final String name; + private final Type type; + private final String mode; + private final String description; + + public static final class Builder { + + private String name; + private Type type; + private String mode; + private String description; + + private Builder() {} + + private Builder(Field field) { + this.name = field.name; + this.type = field.type; + this.mode = field.mode; + this.description = field.description; + } + + /** + * Sets the field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or + * underscores (_), and must start with a letter or underscore. The maximum length is 128 + * characters. + */ + public Builder name(String name) { + this.name = checkNotNull(name); + return this; + } + + /** + * Sets the value of the field. + * + * @see + * Data Types + */ + public Builder type(Type type) { + this.type = checkNotNull(type); + return this; + } + + /** + * Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used. + */ + public Builder mode(Mode mode) { + this.mode = mode != null ? mode.name() : Data.nullOf(String.class); + return this; + } + + /** + * Sets the field description. The maximum length is 16K characters. + */ + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + /** + * Creates a {@code Field} object. + */ + public Field build() { + return new Field(this); + } + } + + private Field(Builder builder) { + this.name = checkNotNull(builder.name); + this.type = checkNotNull(builder.type); + this.mode = builder.mode; + this.description = builder.description; + } + + /** + * Returns the field name. + */ + public String name() { + return name; + } + + /** + * Returns the field value. + * + * @see + * Data Types + */ + public Type type() { + return type; + } + + /** + * Returns the field mode. By default {@link Mode#NULLABLE} is used. + */ + public Mode mode() { + return mode != null ? Mode.valueOf(mode) : null; + } + + /** + * Returns the field description. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the list of sub-fields if {@link #type()} is a {@link Type.Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return type.fields(); + } + + /** + * Returns a builder for the {@code Field} object. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name) + .add("value", type) + .add("mode", mode) + .add("description", description) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(name, type, mode, description); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Field && Objects.equals(toPb(), ((Field) obj).toPb()); + } + + TableFieldSchema toPb() { + TableFieldSchema fieldSchemaPb = new TableFieldSchema(); + fieldSchemaPb.setName(name); + fieldSchemaPb.setType(type.value().name()); + if (mode != null) { + fieldSchemaPb.setMode(mode); + } + if (description != null) { + fieldSchemaPb.setDescription(description); + } + if (fields() != null) { + List fieldsPb = Lists.transform(fields(), TO_PB_FUNCTION); + fieldSchemaPb.setFields(fieldsPb); + } + return fieldSchemaPb; + } + + /** + * Returns a Field object with given name and value. + */ + public static Field of(String name, Type type) { + return builder(name, type).build(); + } + + /** + * Returns a builder for a Field object with given name and value. + */ + public static Builder builder(String name, Type type) { + return new Builder().name(name).type(type); + } + + static Field fromPb(TableFieldSchema fieldSchemaPb) { + Builder fieldBuilder = new Builder(); + fieldBuilder.name(fieldSchemaPb.getName()); + Type.Value enumValue = Type.Value.valueOf(fieldSchemaPb.getType()); + if (fieldSchemaPb.getMode() != null) { + fieldBuilder.mode(Mode.valueOf(fieldSchemaPb.getMode())); + } + if (fieldSchemaPb.getDescription() != null) { + fieldBuilder.description(fieldSchemaPb.getDescription()); + } + if (fieldSchemaPb.getFields() != null) { + fieldBuilder.type(Type.record(Lists.transform(fieldSchemaPb.getFields(), FROM_PB_FUNCTION))); + } else { + fieldBuilder.type(new Type(enumValue)); + } + return fieldBuilder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java new file mode 100644 index 000000000000..8b27c70db782 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java @@ -0,0 +1,266 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; + +import com.google.api.client.util.Data; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Table Field Value class. Objects of this class represent values of a BigQuery + * Table Field. A list of values forms a table row. Tables rows can be gotten as the result of a + * query or when listing table data. + */ +public class FieldValue implements Serializable { + + static final Function FROM_PB_FUNCTION = new Function() { + @Override + public FieldValue apply(Object pb) { + return FieldValue.fromPb(pb); + } + }; + private static final int MICROSECONDS = 1000000; + private static final long serialVersionUID = 469098630191710061L; + + private final Attribute attribute; + private final Object value; + + /** + * The field value's attribute, giving information on the field's content type. + */ + public enum Attribute { + /** + * A primitive field value. A {@code FieldValue} is primitive when the corresponding field has + * type {@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()} or the value is set to {@code null}. + */ + PRIMITIVE, + + /** + * A {@code FieldValue} for a field with {@link Field.Mode#REPEATED} mode. + */ + REPEATED, + + /** + * A {@code FieldValue} for a field of type {@link Field.Type#record(Field...)}. + */ + RECORD + } + + FieldValue(Attribute attribute, Object value) { + this.attribute = attribute; + this.value = value; + } + + /** + * Returns the attribute of this Field Value. + * + * @return {@link Attribute#PRIMITIVE} if the field is a primitive type + * ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}) or is {@code null}. Returns {@link Attribute#REPEATED} if + * the corresponding field has ({@link Field.Mode#REPEATED}) mode. Returns + * {@link Attribute#RECORD} if the corresponding field is a + * {@link Field.Type#record(Field...)} type. + */ + public Attribute attribute() { + return attribute; + } + + /** + * Returns {@code true} if this field's value is {@code null}, {@code false} otherwise. + */ + public boolean isNull() { + return value == null; + } + + /** + * Returns this field's value as an {@link Object}. If {@link #isNull()} is {@code true} this + * method returns {@code null}. + */ + public Object value() { + return value; + } + + /** + * Returns this field's value as a {@link String}. This method should only be used if the + * corresponding field has primitive type ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}). + * + * @throws ClassCastException if the field is not a primitive type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public String stringValue() { + checkNotNull(value); + return (String) value; + } + + /** + * Returns this field's value as a {@code long}. This method should only be used if the + * corresponding field has {@link Field.Type#integer()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Integer} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long longValue() { + return Long.parseLong(stringValue()); + } + + /** + * Returns this field's value as a {@link Double}. This method should only be used if the + * corresponding field has {@link Field.Type#floatingPoint()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Double} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public double doubleValue() { + return Double.parseDouble(stringValue()); + } + + /** + * Returns this field's value as a {@link Boolean}. This method should only be used if the + * corresponding field has {@link Field.Type#bool()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws IllegalStateException if the field's value could not be converted to {@link Boolean} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public boolean booleanValue() { + String stringValue = stringValue(); + checkState(stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("false"), + "Field value is not of boolean type"); + return Boolean.parseBoolean(stringValue); + } + + /** + * Returns this field's value as a {@code long}, representing a timestamp in microseconds since + * epoch (UNIX time). This method should only be used if the corresponding field has + * {@link Field.Type#timestamp()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Long} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long timestampValue() { + // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since + // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00) + return new Double(((Double.valueOf(stringValue())) * MICROSECONDS)).longValue(); + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #attribute()} is + * {@link Attribute#REPEATED}). + * + * @throws ClassCastException if the field has not {@link Field.Mode#REPEATED} mode + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List repeatedValue() { + checkNotNull(value); + return (List) value; + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Type#record(Field...)} type (i.e. {@link #attribute()} + * is {@link Attribute#RECORD}). + * + * @throws ClassCastException if the field is not a {@link Field.Type#record(Field...)} type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List recordValue() { + checkNotNull(value); + return (List) value; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("attribute", attribute) + .add("value", value) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(attribute, value); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FieldValue)) { + return false; + } + FieldValue other = (FieldValue) obj; + return attribute == other.attribute && Objects.equals(value, other.value); + } + + @SuppressWarnings("unchecked") + static FieldValue fromPb(Object cellPb) { + if (Data.isNull(cellPb)) { + return new FieldValue(Attribute.PRIMITIVE, null); + } + if (cellPb instanceof String) { + return new FieldValue(Attribute.PRIMITIVE, cellPb); + } + if (cellPb instanceof List) { + List cellsListPb = (List) cellPb; + List repeatedCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + repeatedCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.REPEATED, repeatedCells); + } + if (cellPb instanceof Map) { + Map cellMapPb = (Map) cellPb; + if (cellMapPb.containsKey("f")) { + List cellsListPb = (List) cellMapPb.get("f"); + List recordCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + recordCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.RECORD, recordCells); + } + // This should never be the case when we are processing a first level table field (i.e. a + // row's field, not a record sub-field) + if (cellMapPb.containsKey("v")) { + return FieldValue.fromPb(cellMapPb.get("v")); + } + } + throw new AssertionError("Unexpected table cell format"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java new file mode 100644 index 000000000000..f46e7b40f4c1 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Google BigQuery format options. These class define the format of external data + * used by BigQuery, for either federated tables or load jobs. + */ +public class FormatOptions implements Serializable { + + static final String CSV = "CSV"; + static final String JSON = "NEWLINE_DELIMITED_JSON"; + static final String DATASTORE_BACKUP = "DATASTORE_BACKUP"; + private static final long serialVersionUID = -443376052020423691L; + + private final String type; + + FormatOptions(String type) { + this.type = type; + } + + /** + * Returns the external data format, as a string. + */ + public String type() { + return type; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("format", type).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof FormatOptions && Objects.equals(type, ((FormatOptions) obj).type()); + } + + /** + * Default options for CSV format. + */ + public static CsvOptions csv() { + return CsvOptions.builder().build(); + } + + /** + * Default options for NEWLINE_DELIMITED_JSON format. + */ + public static FormatOptions json() { + return new FormatOptions(JSON); + } + + /** + * Default options for DATASTORE_BACKUP format. + */ + public static FormatOptions datastoreBackup() { + return new FormatOptions(DATASTORE_BACKUP); + } + + /** + * Default options for the provided format. + */ + public static FormatOptions of(String format) { + if (checkNotNull(format).equals(CSV)) { + return csv(); + } + return new FormatOptions(format); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java new file mode 100644 index 000000000000..f0d61583f83f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java @@ -0,0 +1,456 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all request. This class can be used to stream data into BigQuery one + * record at a time without needing to run a load job. This approach enables querying data without + * the delay of running a load job. There are several important trade-offs to consider before + * choosing an approach. + * + * @see Streaming Data into + * BigQuery + */ +public class InsertAllRequest implements Serializable { + + private static final long serialVersionUID = 211200307773853078L; + + private final TableId table; + private final List rows; + private final Boolean skipInvalidRows; + private final Boolean ignoreUnknownValues; + private final String templateSuffix; + + /** + * A Google Big Query row to be inserted into a table. Each {@code RowToInsert} has an associated + * id used by BigQuery to detect duplicate insertion requests on a best-effort basis. + * + *

Example usage of creating a row to insert: + *

 {@code
+   * List repeatedFieldValue = Arrays.asList(1L, 2L);
+   * Map recordContent = new HashMap();
+   * recordContent.put("subfieldName1", "value");
+   * recordContent.put("subfieldName2", repeatedFieldValue);
+   * Map rowContent = new HashMap();
+   * rowContent.put("fieldName1", true);
+   * rowContent.put("fieldName2", recordContent);
+   * RowToInsert row = new RowToInsert("rowId", rowContent);
+   * }
+ * + * @see + * Data Consistency + */ + public static class RowToInsert implements Serializable { + + private static final long serialVersionUID = 8563060538219179157L; + + private final String id; + private final Map content; + + RowToInsert(String id, Map content) { + this.id = id; + this.content = ImmutableMap.copyOf(content); + } + + /** + * Returns the id associated with the row. Returns {@code null} if not set. + */ + public String id() { + return id; + } + + /** + * Returns the actual content of the row, as a map. + */ + public Map content() { + return content; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("id", id) + .add("content", content) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(id, content); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RowToInsert)) { + return false; + } + RowToInsert other = (RowToInsert) obj; + return Objects.equals(id, other.id) + && Objects.equals(content, other.content); + } + + /** + * Creates a row to be inserted with associated id. + * + * @param id id of the row, used to identify duplicates + * @param content the actual content of the row + */ + public static RowToInsert of(String id, Map content) { + return new RowToInsert(checkNotNull(id), checkNotNull(content)); + } + + /** + * Creates a row to be inserted without associated id. + * + * @param content the actual content of the row + */ + public static RowToInsert of(Map content) { + return new RowToInsert(null, checkNotNull(content)); + } + } + + public static final class Builder { + + private TableId table; + private List rows; + private Boolean skipInvalidRows; + private Boolean ignoreUnknownValues; + private String templateSuffix; + + private Builder() {} + + /** + * Sets the destination table for rows insert request. + */ + public Builder table(TableId table) { + this.table = checkNotNull(table); + return this; + } + + /** + * Sets the rows to insert as a list of {@link RowToInsert} objects. + */ + public Builder rows(Iterable rows) { + this.rows = Lists.newLinkedList(checkNotNull(rows)); + return this; + } + + /** + * Adds a row to be inserted. + */ + public Builder addRow(RowToInsert rowToInsert) { + checkNotNull(rowToInsert); + if (rows == null) { + rows = Lists.newArrayList(); + } + rows.add(rowToInsert); + return this; + } + + /** + * Adds a row to be inserted with associated id. + * + *

Example usage of adding a row with associated id: + *

 {@code
+     * InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     * List repeatedFieldValue = Arrays.asList(1L, 2L);
+     * Map recordContent = new HashMap();
+     * recordContent.put("subfieldName1", "value");
+     * recordContent.put("subfieldName2", repeatedFieldValue);
+     * Map rowContent = new HashMap();
+     * rowContent.put("fieldName1", true);
+     * rowContent.put("fieldName2", recordContent);
+     * builder.addRow("rowId", rowContent);
+     * }
+ */ + public Builder addRow(String id, Map content) { + addRow(new RowToInsert(id, content)); + return this; + } + + /** + * Adds a row to be inserted without an associated id. + * + *

Example usage of adding a row without an associated id: + *

 {@code
+     * InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     * List repeatedFieldValue = Arrays.asList(1L, 2L);
+     * Map recordContent = new HashMap();
+     * recordContent.put("subfieldName1", "value");
+     * recordContent.put("subfieldName2", repeatedFieldValue);
+     * Map rowContent = new HashMap();
+     * rowContent.put("fieldName1", true);
+     * rowContent.put("fieldName2", recordContent);
+     * builder.addRow(rowContent);
+     * }
+ */ + public Builder addRow(Map content) { + addRow(new RowToInsert(null, content)); + return this; + } + + /** + * Sets whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Builder skipInvalidRows(boolean skipInvalidRows) { + this.skipInvalidRows = skipInvalidRows; + return this; + } + + /** + * Sets whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Builder ignoreUnknownValues(boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * If specified, the destination table is treated as a base template. Rows are inserted into an + * instance table named "{destination}{templateSuffix}". BigQuery will manage the creation of + * the instance table, using the schema of the base template table. Table creation might take + * some time. To obtain table's information after {@link BigQuery#insertAll(InsertAllRequest)} + * is called use: + *
 {@code
+     * String suffixTableId = ...;
+     * TableInfo suffixTable = bigquery.getTable(DATASET, suffixTableId);
+     * while (suffixTable == null) {
+     *   Thread.sleep(1000L);
+     *   suffixTable = bigquery.getTable(DATASET, suffixTableId);
+     * }}
+ * + * @see + * Template Tables + */ + public Builder templateSuffix(String templateSuffix) { + this.templateSuffix = templateSuffix; + return this; + } + + public InsertAllRequest build() { + return new InsertAllRequest(this); + } + } + + private InsertAllRequest(Builder builder) { + this.table = checkNotNull(builder.table); + this.rows = ImmutableList.copyOf(checkNotNull(builder.rows)); + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.skipInvalidRows = builder.skipInvalidRows; + this.templateSuffix = builder.templateSuffix; + } + + /** + * Returns the destination table for rows insert request. + */ + public TableId table() { + return table; + } + + /** + * Returns the rows to be inserted. + */ + public List rows() { + return rows; + } + + /** + * Returns whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Boolean skipInvalidRows() { + return skipInvalidRows; + } + + /** + * If specified, the destination table is treated as a base template. Rows are inserted into an + * instance table named "{destination}{templateSuffix}". BigQuery will manage the creation of the + * instance table, using the schema of the base template table. Table creation might take some + * time. To obtain table's information after {@link BigQuery#insertAll(InsertAllRequest)} is + * called use: + *
 {@code
+   * String suffixTableId = ...;
+   * TableInfo suffixTable = bigquery.getTable(DATASET, suffixTableId);
+   * while (suffixTable == null) {
+   *   Thread.sleep(1000L);
+   *   suffixTable = bigquery.getTable(DATASET, suffixTableId);
+   * }}
+ * + * @see + * Template Tables + */ + public String templateSuffix() { + return templateSuffix; + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(TableId table) { + return new Builder().table(table); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, Iterable rows) { + return builder(table).rows(rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, RowToInsert... rows) { + return builder(table, ImmutableList.copyOf(rows)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(String datasetId, String tableId) { + return new Builder().table(TableId.of(datasetId, tableId)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, Iterable rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, RowToInsert... rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, Iterable rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, RowToInsert... rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, Iterable rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, RowToInsert... rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("table", table) + .add("rows", rows) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("skipInvalidRows", skipInvalidRows) + .add("templateSuffix", templateSuffix) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(table, rows, ignoreUnknownValues, skipInvalidRows, templateSuffix); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof InsertAllRequest)) { + return false; + } + InsertAllRequest other = (InsertAllRequest) obj; + return Objects.equals(table, other.table) + && Objects.equals(rows, other.rows) + && Objects.equals(ignoreUnknownValues, other.ignoreUnknownValues) + && Objects.equals(skipInvalidRows, other.skipInvalidRows) + && Objects.equals(templateSuffix, other.templateSuffix); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java new file mode 100644 index 000000000000..992c5d851bbc --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java @@ -0,0 +1,121 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all response. Objects of this class possibly contain errors for an + * {@link InsertAllRequest}. If a row failed to be inserted, the non-empty list of errors associated + * to that row's index can be obtained with {@link InsertAllResponse#errorsFor(long)}. + * {@link InsertAllResponse#insertErrors()} can be used to return all errors caused by a + * {@link InsertAllRequest} as a map. + */ +public class InsertAllResponse implements Serializable { + + private static final long serialVersionUID = -6934152676514098452L; + + private final Map> insertErrors; + + InsertAllResponse(Map> insertErrors) { + this.insertErrors = insertErrors != null ? ImmutableMap.copyOf(insertErrors) + : ImmutableMap.>of(); + } + + /** + * Returns all insertion errors as a map whose keys are indexes of rows that failed to insert. + * Each failed row index is associated with a non-empty list of {@link BigQueryError}. + */ + public Map> insertErrors() { + return insertErrors; + } + + /** + * Returns errors for the provided row index. If no error exists returns {@code null}. + */ + public List errorsFor(long index) { + return insertErrors.get(index); + } + + /** + * Returns {@code true} if no row insertion failed, {@code false} otherwise. If {@code false} + * {@link #insertErrors()} returns an empty map. + */ + public boolean hasErrors() { + return !insertErrors.isEmpty(); + } + + @Override + public int hashCode() { + return Objects.hash(insertErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof InsertAllResponse + && Objects.equals(insertErrors, ((InsertAllResponse) obj).insertErrors); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("insertErrors", insertErrors).toString(); + } + + TableDataInsertAllResponse toPb() { + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse(); + if (!insertErrors.isEmpty()) { + responsePb.setInsertErrors(ImmutableList.copyOf(Iterables.transform(insertErrors.entrySet(), + new Function>, InsertErrors>() { + @Override + public InsertErrors apply(Map.Entry> entry) { + return new InsertErrors() + .setIndex(entry.getKey()) + .setErrors(Lists.transform(entry.getValue(), BigQueryError.TO_PB_FUNCTION)); + } + }))); + } + return responsePb; + } + + static InsertAllResponse fromPb(TableDataInsertAllResponse responsePb) { + Map> insertErrors = null; + if (responsePb.getInsertErrors() != null) { + List errorsPb = responsePb.getInsertErrors(); + insertErrors = Maps.newHashMapWithExpectedSize(errorsPb.size()); + for (InsertErrors errorPb : errorsPb) { + insertErrors.put(errorPb.getIndex(), Lists.transform( + errorPb.getErrors() != null ? errorPb.getErrors() : ImmutableList.of(), + BigQueryError.FROM_PB_FUNCTION)); + } + } + return new InsertAllResponse(insertErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java new file mode 100644 index 000000000000..1e63344a600d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java @@ -0,0 +1,203 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.Objects; + +/** + * A Google BigQuery Job. + * + *

Objects of this class are immutable. To get a {@code Job} object with the most recent + * information use {@link #reload}. {@code Job} adds a layer of service-related functionality over + * {@link JobInfo}. + *

+ */ +public final class Job extends JobInfo { + + private static final long serialVersionUID = -4324100991693024704L; + + private final BigQueryOptions options; + private transient BigQuery bigquery; + + /** + * A builder for {@code Job} objects. + */ + public static final class Builder extends JobInfo.Builder { + + private final BigQuery bigquery; + private final JobInfo.BuilderImpl infoBuilder; + + Builder(BigQuery bigquery, JobConfiguration configuration) { + this.bigquery = bigquery; + this.infoBuilder = new JobInfo.BuilderImpl(); + this.infoBuilder.configuration(configuration); + } + + Builder(Job job) { + this.bigquery = job.bigquery; + this.infoBuilder = new JobInfo.BuilderImpl(job); + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + public Builder jobId(JobId jobId) { + infoBuilder.jobId(jobId); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + Builder status(JobStatus status) { + infoBuilder.status(status); + return this; + } + + @Override + Builder statistics(JobStatistics statistics) { + infoBuilder.statistics(statistics); + return this; + } + + @Override + Builder userEmail(String userEmail) { + infoBuilder.userEmail(userEmail); + return this; + } + + @Override + public Builder configuration(JobConfiguration configuration) { + infoBuilder.configuration(configuration); + return this; + } + + @Override + public Job build() { + return new Job(bigquery, infoBuilder); + } + } + + Job(BigQuery bigquery, JobInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.bigquery = checkNotNull(bigquery); + this.options = bigquery.options(); + } + + /** + * Checks if this job exists. + * + * @return {@code true} if this job exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getJob(jobId(), BigQuery.JobOption.fields()) != null; + } + + /** + * Checks if this job has completed its execution, either failing or succeeding. If the job does + * not exist this method returns {@code false}. To correctly wait for job's completion check that + * the job exists first, using {@link #exists()}: + *
 {@code
+   * if (job.exists()) {
+   *   while(!job.isDone()) {
+   *     Thread.sleep(1000L);
+   *   }
+   * }}
+ * + * @return {@code true} if this job is in {@link JobStatus.State#DONE} state, {@code false} if the + * state is not {@link JobStatus.State#DONE} or the job does not exist + * @throws BigQueryException upon failure + */ + public boolean isDone() { + Job job = bigquery.getJob(jobId(), BigQuery.JobOption.fields(BigQuery.JobField.STATUS)); + return job != null && job.status().state() == JobStatus.State.DONE; + } + + /** + * Fetches current job's latest information. Returns {@code null} if the job does not exist. + * + * @param options job options + * @return a {@code Job} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Job reload(BigQuery.JobOption... options) { + return bigquery.getJob(jobId().job(), options); + } + + /** + * Sends a job cancel request. + * + * @return {@code true} if cancel request was sent successfully, {@code false} if job was not + * found + * @throws BigQueryException upon failure + */ + public boolean cancel() { + return bigquery.cancel(jobId()); + } + + /** + * Returns the job's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Job + && Objects.equals(toPb(), ((Job) obj).toPb()) + && Objects.equals(options, ((Job) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Job fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Job jobPb) { + return new Job(bigquery, new JobInfo.BuilderImpl(jobPb)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java new file mode 100644 index 000000000000..2244969567ef --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java @@ -0,0 +1,145 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for a BigQuery job configuration. + */ +public abstract class JobConfiguration implements Serializable { + + private static final long serialVersionUID = -548132177415406526L; + + private final Type type; + + /** + * Type of a BigQuery Job. + */ + enum Type { + /** + * A Copy Job copies an existing table to another new or existing table. Instances of + * {@code JobConfiguration} for this type are implemented by {@link CopyJobConfiguration}. + */ + COPY, + /** + * An Extract Job exports a BigQuery table to Google Cloud Storage. Instances of + * {@code JobConfiguration} for this type are implemented by {@link ExtractJobConfiguration}. + */ + EXTRACT, + /** + * A Load Job loads data from one of several formats into a table. Instances of + * {@code JobConfiguration} for this type are implemented by {@link LoadJobConfiguration}. + */ + LOAD, + /** + * A Query Job runs a query against BigQuery data. Instances of + * {@code JobConfiguration} for this type are implemented by {@link QueryJobConfiguration}. + */ + QUERY + } + + /** + * Base builder for job configurations. + * + * @param the job configuration type + * @param the job configuration builder + */ + public abstract static class Builder> { + + private Type type; + + Builder(Type type) { + this.type = checkNotNull(type); + } + + @SuppressWarnings("unchecked") + B self() { + return (B) this; + } + + B type(Type type) { + this.type = checkNotNull(type); + return self(); + } + + /** + * Creates an object. + */ + public abstract T build(); + } + + JobConfiguration(Builder builder) { + this.type = builder.type; + } + + /** + * Returns the type of the job configuration. + */ + public Type type() { + return type; + } + + /** + * Returns a builder for the object. + */ + public abstract Builder toBuilder(); + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this).add("type", type); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + final int baseHashCode() { + return Objects.hash(type); + } + + final boolean baseEquals(JobConfiguration jobConfiguration) { + return Objects.equals(toPb(), jobConfiguration.toPb()); + } + + abstract JobConfiguration setProjectId(String projectId); + + abstract com.google.api.services.bigquery.model.JobConfiguration toPb(); + + @SuppressWarnings("unchecked") + static T fromPb( + com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + if (configurationPb.getCopy() != null) { + return (T) CopyJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getExtract() != null) { + return (T) ExtractJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getLoad() != null) { + return (T) LoadJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getQuery() != null) { + return (T) QueryJobConfiguration.fromPb(configurationPb); + } else { + // never reached + throw new IllegalArgumentException("Job configuration is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java new file mode 100644 index 000000000000..898c894f9a21 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Job identity. + */ +public class JobId implements Serializable { + + private static final long serialVersionUID = 1225914835379688976L; + + private final String project; + private final String job; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns the job's user-defined id. + */ + public String job() { + return job; + } + + private JobId(String project, String job) { + this.project = project; + this.job = job; + } + + /** + * Creates a job identity given project's and job's user-defined id. + */ + public static JobId of(String project, String job) { + return new JobId(checkNotNull(project), checkNotNull(job)); + } + + /** + * Creates a job identity given only its user-defined id. + */ + public static JobId of(String job) { + return new JobId(null, checkNotNull(job)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobId && Objects.equals(toPb(), ((JobId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, job); + } + + @Override + public String toString() { + return toPb().toString(); + } + + JobReference toPb() { + return new JobReference().setProjectId(project).setJobId(job); + } + + static JobId fromPb(JobReference jobRef) { + return new JobId(jobRef.getProjectId(), jobRef.getJobId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java new file mode 100644 index 000000000000..1adf7fabafc1 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java @@ -0,0 +1,386 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Job; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Job information. Jobs are objects that manage asynchronous tasks such as running + * queries, loading data, and exporting data. Use {@link CopyJobConfiguration} for a job that + * copies an existing table. Use {@link ExtractJobConfiguration} for a job that exports a table to + * Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads data from Google + * Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that runs a query. + * + * @see Jobs + */ +public class JobInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public JobInfo apply(Job pb) { + return JobInfo.fromPb(pb); + } + }; + + private static final long serialVersionUID = -3272941007234620265L; + + private final String etag; + private final String id; + private final JobId jobId; + private final String selfLink; + private final JobStatus status; + private final JobStatistics statistics; + private final String userEmail; + private final JobConfiguration configuration; + + /** + * Specifies whether the job is allowed to create new tables. + */ + public enum CreateDisposition { + /** + * Configures the job to create the table if it does not exist. + */ + CREATE_IF_NEEDED, + + /** + * Configures the job to fail with a not-found error if the table does not exist. + */ + CREATE_NEVER + } + + /** + * Specifies the action that occurs if the destination table already exists. + */ + public enum WriteDisposition { + /** + * Configures the job to overwrite the table data if table already exists. + */ + WRITE_TRUNCATE, + + /** + * Configures the job to append data to the table if it already exists. + */ + WRITE_APPEND, + + /** + * Configures the job to fail with a duplicate error if the table already exists. + */ + WRITE_EMPTY + } + + /** + * A builder for {@code JobInfo} objects. + */ + public abstract static class Builder { + + abstract Builder etag(String etag); + + abstract Builder id(String id); + + /** + * Sets the job identity. + */ + public abstract Builder jobId(JobId jobId); + + abstract Builder selfLink(String selfLink); + + abstract Builder status(JobStatus status); + + abstract Builder statistics(JobStatistics statistics); + + abstract Builder userEmail(String userEmail); + + /** + * Sets a configuration for the {@code JobInfo} object. Use {@link CopyJobConfiguration} for a + * job that copies an existing table. Use {@link ExtractJobConfiguration} for a job that exports + * a table to Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads data + * from Google Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that runs + * a query. + */ + public abstract Builder configuration(JobConfiguration configuration); + + /** + * Creates a {@code JobInfo} object. + */ + public abstract JobInfo build(); + } + + static final class BuilderImpl extends Builder { + + private String etag; + private String id; + private JobId jobId; + private String selfLink; + private JobStatus status; + private JobStatistics statistics; + private String userEmail; + private JobConfiguration configuration; + + BuilderImpl() {} + + BuilderImpl(JobInfo jobInfo) { + this.etag = jobInfo.etag; + this.id = jobInfo.id; + this.jobId = jobInfo.jobId; + this.selfLink = jobInfo.selfLink; + this.status = jobInfo.status; + this.statistics = jobInfo.statistics; + this.userEmail = jobInfo.userEmail; + this.configuration = jobInfo.configuration; + } + + BuilderImpl(Job jobPb) { + this.etag = jobPb.getEtag(); + this.id = jobPb.getId(); + if (jobPb.getJobReference() != null) { + this.jobId = JobId.fromPb(jobPb.getJobReference()); + } + this.selfLink = jobPb.getSelfLink(); + if (jobPb.getStatus() != null) { + this.status = JobStatus.fromPb(jobPb.getStatus()); + } + if (jobPb.getStatistics() != null) { + this.statistics = JobStatistics.fromPb(jobPb.getStatistics()); + } + this.userEmail = jobPb.getUserEmail(); + this.configuration = JobConfiguration.fromPb(jobPb.getConfiguration()); + } + + @Override + Builder etag(String etag) { + this.etag = etag; + return this; + } + + @Override + Builder id(String id) { + this.id = id; + return this; + } + + @Override + public Builder jobId(JobId jobId) { + this.jobId = jobId; + return this; + } + + @Override + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + @Override + Builder status(JobStatus status) { + this.status = status; + return this; + } + + @Override + Builder statistics(JobStatistics statistics) { + this.statistics = statistics; + return this; + } + + @Override + Builder userEmail(String userEmail) { + this.userEmail = userEmail; + return this; + } + + @Override + public Builder configuration(JobConfiguration configuration) { + this.configuration = configuration; + return this; + } + + @Override + public JobInfo build() { + return new JobInfo(this); + } + } + + JobInfo(BuilderImpl builder) { + this.jobId = builder.jobId; + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.status = builder.status; + this.statistics = builder.statistics; + this.userEmail = builder.userEmail; + this.configuration = builder.configuration; + } + + /** + * Returns the hash of the job resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the job. + */ + public String id() { + return id; + } + + /** + * Returns the job identity. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * GET requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the status of this job. Examine this value when polling an asynchronous job to see if + * the job is complete. + */ + public JobStatus status() { + return status; + } + + /** + * Returns information about the job, including starting time and ending time of the job. + */ + @SuppressWarnings("unchecked") + public S statistics() { + return (S) statistics; + } + + /** + * Returns the email address of the user who ran the job. + */ + public String userEmail() { + return userEmail; + } + + /** + * Returns the job's configuration. + */ + @SuppressWarnings("unchecked") + public C configuration() { + return (C) configuration; + } + + /** + * Returns a builder for the job object. + */ + public Builder toBuilder() { + return new BuilderImpl(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("job", jobId) + .add("status", status) + .add("statistics", statistics) + .add("userEmail", userEmail) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink) + .add("configuration", configuration) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + return obj != null + && obj.getClass().equals(JobInfo.class) + && Objects.equals(toPb(), ((JobInfo) obj).toPb()); + } + + JobInfo setProjectId(String projectId) { + return toBuilder().configuration(configuration.setProjectId(projectId)).build(); + } + + Job toPb() { + Job jobPb = new Job(); + jobPb.setEtag(etag); + jobPb.setId(id); + jobPb.setSelfLink(selfLink); + jobPb.setUserEmail(userEmail); + if (jobId != null) { + jobPb.setJobReference(jobId.toPb()); + } + if (status != null) { + jobPb.setStatus(status.toPb()); + } + if (statistics != null) { + jobPb.setStatistics(statistics.toPb()); + } + jobPb.setConfiguration(configuration.toPb()); + return jobPb; + } + + /** + * Returns a builder for a {@code JobInfo} object given the job configuration. Use + * {@link CopyJobConfiguration} for a job that copies an existing table. Use + * {@link ExtractJobConfiguration} for a job that exports a table to Google Cloud Storage. Use + * {@link LoadJobConfiguration} for a job that loads data from Google Cloud Storage into a table. + * Use {@link QueryJobConfiguration} for a job that runs a query. + */ + public static Builder builder(JobConfiguration configuration) { + return new BuilderImpl().configuration(configuration); + } + + /** + * Returns a {@code JobInfo} object given the job configuration. Use {@link CopyJobConfiguration} + * for a job that copies an existing table. Use {@link ExtractJobConfiguration} for a job that + * exports a table to Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads + * data from Google Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that + * runs a query. + */ + public static JobInfo of(JobConfiguration configuration) { + return builder(configuration).build(); + } + + /** + * Returns a builder for a {@code JobInfo} object given the job identity and configuration. Use + * {@link CopyJobConfiguration} for a job that copies an existing table. Use + * {@link ExtractJobConfiguration} for a job that exports a table to Google Cloud Storage. Use + * {@link LoadJobConfiguration} for a job that loads data from Google Cloud Storage into a table. + * Use {@link QueryJobConfiguration} for a job that runs a query. + */ + public static JobInfo of(JobId jobId, JobConfiguration configuration) { + return builder(configuration).jobId(jobId).build(); + } + + static JobInfo fromPb(Job jobPb) { + return new BuilderImpl(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java new file mode 100644 index 000000000000..34e4917921ba --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java @@ -0,0 +1,516 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.JobStatistics2; +import com.google.api.services.bigquery.model.JobStatistics3; +import com.google.api.services.bigquery.model.JobStatistics4; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job statistics. + */ +public class JobStatistics implements Serializable { + + private static final long serialVersionUID = 1433024714741660399L; + + private final Long creationTime; + private final Long endTime; + private final Long startTime; + + /** + * A Google BigQuery Extract Job statistics. + */ + public static class ExtractStatistics extends JobStatistics { + + private static final long serialVersionUID = -1566598819212767373L; + + private final List destinationUriFileCounts; + + static final class Builder extends JobStatistics.Builder { + + private List destinationUriFileCounts; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.destinationUriFileCounts = statisticsPb.getExtract().getDestinationUriFileCounts(); + } + + Builder destinationUriFileCounts(List destinationUriFileCounts) { + this.destinationUriFileCounts = destinationUriFileCounts; + return self(); + } + + @Override + ExtractStatistics build() { + return new ExtractStatistics(this); + } + } + + private ExtractStatistics(Builder builder) { + super(builder); + this.destinationUriFileCounts = builder.destinationUriFileCounts; + } + + /** + * Returns the number of files per destination URI or URI pattern specified in the extract job. + * These values will be in the same order as the URIs specified by + * {@link ExtractJobConfiguration#destinationUris()}. + */ + public List destinationUriFileCounts() { + return destinationUriFileCounts; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper().add("destinationUriFileCounts", destinationUriFileCounts); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractStatistics + && Objects.equals(toPb(), ((ExtractStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), destinationUriFileCounts); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statisticsPb = super.toPb(); + return statisticsPb.setExtract( + new JobStatistics4().setDestinationUriFileCounts(destinationUriFileCounts)); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static ExtractStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Load Job statistics. + */ + public static class LoadStatistics extends JobStatistics { + + private static final long serialVersionUID = -707369246536309215L; + + private final Long inputBytes; + private final Long inputFiles; + private final Long outputBytes; + private final Long outputRows; + + static final class Builder extends JobStatistics.Builder { + + private Long inputBytes; + private Long inputFiles; + private Long outputBytes; + private Long outputRows; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.inputBytes = statisticsPb.getLoad().getInputFileBytes(); + this.inputFiles = statisticsPb.getLoad().getInputFiles(); + this.outputBytes = statisticsPb.getLoad().getOutputBytes(); + this.outputRows = statisticsPb.getLoad().getOutputRows(); + } + + Builder inputBytes(Long inputBytes) { + this.inputBytes = inputBytes; + return self(); + } + + Builder inputFiles(Long inputFiles) { + this.inputFiles = inputFiles; + return self(); + } + + Builder outputBytes(Long outputBytes) { + this.outputBytes = outputBytes; + return self(); + } + + Builder outputRows(Long outputRows) { + this.outputRows = outputRows; + return self(); + } + + @Override + LoadStatistics build() { + return new LoadStatistics(this); + } + } + + private LoadStatistics(Builder builder) { + super(builder); + this.inputBytes = builder.inputBytes; + this.inputFiles = builder.inputFiles; + this.outputBytes = builder.outputBytes; + this.outputRows = builder.outputRows; + + } + + /** + * Returns the number of bytes of source data in a load job. + */ + public Long inputBytes() { + return inputBytes; + } + + /** + * Returns the number of source files in a load job. + */ + public Long inputFiles() { + return inputFiles; + } + + /** + * Returns the size of the data loaded by a load job so far, in bytes. + */ + public Long outputBytes() { + return outputBytes; + } + + /** + * Returns the number of rows loaded by a load job so far. + */ + public Long outputRows() { + return outputRows; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("inputBytes", inputBytes) + .add("inputFiles", inputFiles) + .add("outputBytes", outputBytes) + .add("outputRows", outputRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadStatistics && Objects.equals(toPb(), ((LoadStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), inputBytes, inputFiles, outputBytes, outputRows); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics3 loadStatisticsPb = new JobStatistics3(); + loadStatisticsPb.setInputFileBytes(inputBytes); + loadStatisticsPb.setInputFiles(inputFiles); + loadStatisticsPb.setOutputBytes(outputBytes); + loadStatisticsPb.setOutputRows(outputRows); + return super.toPb().setLoad(loadStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Query Job statistics. + */ + public static class QueryStatistics extends JobStatistics { + + private static final long serialVersionUID = 7539354109226732353L; + + private final Integer billingTier; + private final Boolean cacheHit; + private final Long totalBytesBilled; + private final Long totalBytesProcessed; + private final List queryPlan; + + static final class Builder extends JobStatistics.Builder { + + private Integer billingTier; + private Boolean cacheHit; + private Long totalBytesBilled; + private Long totalBytesProcessed; + private List queryPlan; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.billingTier = statisticsPb.getQuery().getBillingTier(); + this.cacheHit = statisticsPb.getQuery().getCacheHit(); + this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled(); + this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed(); + if (statisticsPb.getQuery().getQueryPlan() != null) { + this.queryPlan = + Lists.transform(statisticsPb.getQuery().getQueryPlan(), QueryStage.FROM_PB_FUNCTION); + } + } + + Builder billingTier(Integer billingTier) { + this.billingTier = billingTier; + return self(); + } + + Builder cacheHit(Boolean cacheHit) { + this.cacheHit = cacheHit; + return self(); + } + + Builder totalBytesBilled(Long totalBytesBilled) { + this.totalBytesBilled = totalBytesBilled; + return self(); + } + + Builder totalBytesProcessed(Long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return self(); + } + + Builder queryPlan(List queryPlan) { + this.queryPlan = queryPlan; + return self(); + } + + @Override + QueryStatistics build() { + return new QueryStatistics(this); + } + } + + private QueryStatistics(Builder builder) { + super(builder); + this.billingTier = builder.billingTier; + this.cacheHit = builder.cacheHit; + this.totalBytesBilled = builder.totalBytesBilled; + this.totalBytesProcessed = builder.totalBytesProcessed; + this.queryPlan = builder.queryPlan; + } + + /** + * Returns the billing tier for the job. + */ + public Integer billingTier() { + return billingTier; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see + * Query Caching + */ + public Boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the total number of bytes billed for the job. + */ + public Long totalBytesBilled() { + return totalBytesBilled; + } + + /** + * Returns the total number of bytes processed by the job. + */ + public Long totalBytesProcessed() { + return totalBytesProcessed; + } + + /** + * Returns the query plan as a list of stages or {@code null} if a query plan is not available. + * Each stage involves a number of steps that read from data sources, perform a series of + * transformations on the input, and emit an output to a future stage (or the final result). The + * query plan is available for a completed query job and is retained for 7 days. + * + * @see Query Plan + */ + public List queryPlan() { + return queryPlan; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("billingTier", billingTier) + .add("cacheHit", cacheHit) + .add("totalBytesBilled", totalBytesBilled) + .add("totalBytesProcessed", totalBytesProcessed) + .add("queryPlan", queryPlan); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryStatistics + && Objects.equals(toPb(), ((QueryStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), billingTier, cacheHit, totalBytesBilled, + totalBytesProcessed, queryPlan); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics2 queryStatisticsPb = new JobStatistics2(); + queryStatisticsPb.setBillingTier(billingTier); + queryStatisticsPb.setCacheHit(cacheHit); + queryStatisticsPb.setTotalBytesBilled(totalBytesBilled); + queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed); + if (queryPlan != null) { + queryStatisticsPb.setQueryPlan(Lists.transform(queryPlan, QueryStage.TO_PB_FUNCTION)); + } + return super.toPb().setQuery(queryStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static QueryStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + static class Builder> { + + private Long creationTime; + private Long endTime; + private Long startTime; + + protected Builder() {} + + protected Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + this.creationTime = statisticsPb.getCreationTime(); + this.endTime = statisticsPb.getEndTime(); + this.startTime = statisticsPb.getStartTime(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B creationTime(Long creationTime) { + this.creationTime = creationTime; + return self(); + } + + B endTime(Long endTime) { + this.endTime = endTime; + return self(); + } + + B startTime(Long startTime) { + this.startTime = startTime; + return self(); + } + + @SuppressWarnings("unchecked") + T build() { + return (T) new JobStatistics(this); + } + } + + protected JobStatistics(Builder builder) { + this.creationTime = builder.creationTime; + this.endTime = builder.endTime; + this.startTime = builder.startTime; + } + + /** + * Returns the creation time of the job in milliseconds since epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the end time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not finished yet. + */ + public Long endTime() { + return endTime; + } + + /** + * Returns the start time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not started yet. + */ + public Long startTime() { + return startTime; + } + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("creationTime", creationTime) + .add("endTime", endTime) + .add("startTime", startTime); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public int hashCode() { + return Objects.hash(creationTime, endTime, startTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatistics && Objects.equals(toPb(), ((JobStatistics) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statistics = + new com.google.api.services.bigquery.model.JobStatistics(); + statistics.setCreationTime(creationTime); + statistics.setEndTime(endTime); + statistics.setStartTime(startTime); + return statistics; + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static T fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + if (statisticPb.getLoad() != null) { + return (T) LoadStatistics.fromPb(statisticPb); + } else if (statisticPb.getExtract() != null) { + return (T) ExtractStatistics.fromPb(statisticPb); + } else if (statisticPb.getQuery() != null) { + return (T) QueryStatistics.fromPb(statisticPb); + } else { + return (T) new Builder(statisticPb).build(); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java new file mode 100644 index 000000000000..738a644a5dde --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java @@ -0,0 +1,130 @@ +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job status. Objects of this class can be examined when polling an asynchronous + * job to see if the job completed. + */ +public class JobStatus implements Serializable { + + private static final long serialVersionUID = -714976456815445365L; + + /** + * Possible states that a BigQuery Job can assume. + */ + public enum State { + /** + * The BigQuery Job is waiting to be executed. + */ + PENDING, + + /** + * The BigQuery Job is being executed. + */ + RUNNING, + + /** + * The BigQuery Job has completed either succeeding or failing. If failed {@link #error()} will + * be non-null. + */ + DONE + } + + private final State state; + private final BigQueryError error; + private final List executionErrors; + + JobStatus(State state) { + this.state = state; + this.error = null; + this.executionErrors = null; + } + + JobStatus(State state, BigQueryError error, List executionErrors) { + this.state = state; + this.error = error; + this.executionErrors = executionErrors != null ? ImmutableList.copyOf(executionErrors) : null; + } + + /** + * Returns the state of the job. A {@link State#PENDING} job is waiting to be executed. A + * {@link State#RUNNING} is being executed. A {@link State#DONE} job has completed either + * succeeding or failing. If failed {@link #error()} will be non-null. + */ + public State state() { + return state; + } + + /** + * Returns the final error result of the job. If present, indicates that the job has completed + * and was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public BigQueryError error() { + return error; + } + + /** + * Returns all errors encountered during the running of the job. Errors here do not necessarily + * mean that the job has completed or was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("state", state) + .add("error", error) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(state, error, executionErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatus && Objects.equals(toPb(), ((JobStatus) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatus toPb() { + com.google.api.services.bigquery.model.JobStatus statusPb = + new com.google.api.services.bigquery.model.JobStatus(); + if (state != null) { + statusPb.setState(state.toString()); + } + if (error != null) { + statusPb.setErrorResult(error.toPb()); + } + if (executionErrors != null) { + statusPb.setErrors(Lists.transform(executionErrors, BigQueryError.TO_PB_FUNCTION)); + } + return statusPb; + } + + static JobStatus fromPb(com.google.api.services.bigquery.model.JobStatus statusPb) { + List allErrors = null; + if (statusPb.getErrors() != null) { + allErrors = Lists.transform(statusPb.getErrors(), BigQueryError.FROM_PB_FUNCTION); + } + BigQueryError error = + statusPb.getErrorResult() != null ? BigQueryError.fromPb(statusPb.getErrorResult()) : null; + return new JobStatus(State.valueOf(statusPb.getState()), error, allErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java new file mode 100644 index 000000000000..223a25a478e0 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java @@ -0,0 +1,165 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.util.List; + +/** + * Common interface for a load configuration. A load configuration + * ({@link WriteChannelConfiguration}) can be used to load data into a table with a + * {@link com.google.gcloud.WriteChannel} ({@link BigQuery#writer(WriteChannelConfiguration)}). + * A load configuration ({@link LoadJobConfiguration}) can also be used to create a load job + * ({@link JobInfo#of(JobConfiguration)}). + */ +public interface LoadConfiguration { + + interface Builder { + + /** + * Sets the destination table to load the data into. + */ + Builder destinationTable(TableId destinationTable); + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + Builder createDisposition(CreateDisposition createDisposition); + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + Builder writeDisposition(WriteDisposition writeDisposition); + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV}, {@code NEWLINE_DELIMITED_JSON} and {@code DATASTORE_BACKUP}. If not + * specified, {@code CSV} format is assumed. + * + * + * Source Format + */ + Builder formatOptions(FormatOptions formatOptions); + + /** + * Sets the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + Builder maxBadRecords(Integer maxBadRecords); + + /** + * Sets the schema for the destination table. The schema can be omitted if the destination table + * already exists, or if you're loading data from a Google Cloud Datastore backup (i.e. + * {@code DATASTORE_BACKUP} format option). + */ + Builder schema(Schema schema); + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If {@code true}, the extra values are ignored. If {@code false}, records with extra columns + * are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + Builder ignoreUnknownValues(Boolean ignoreUnknownValues); + + /** + * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + Builder projectionFields(List projectionFields); + + LoadConfiguration build(); + } + + /** + * Returns the destination table to load the data into. + */ + TableId destinationTable(); + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + CreateDisposition createDisposition(); + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + WriteDisposition writeDisposition(); + + /** + * Returns additional properties used to parse CSV data (used when {@link #format()} is set + * to CSV). Returns {@code null} if not set. + */ + CsvOptions csvOptions(); + + /** + * Returns the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + Integer maxBadRecords(); + + /** + * Returns the schema for the destination table, if set. Returns {@code null} otherwise. + */ + Schema schema(); + + /** + * Returns the format of the data files. + */ + String format(); + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If {@code true}, the extra values are ignored. If {@code true}, records with extra + * columns are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + Boolean ignoreUnknownValues(); + + /** + * Returns which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + List projectionFields(); + + /** + * Returns a builder for the load configuration object. + */ + Builder toBuilder(); +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java new file mode 100644 index 000000000000..9c9fa7a769b6 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java @@ -0,0 +1,390 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery load job configuration. A load job loads data from one of several formats into a + * table. Data is provided as URIs that point to objects in Google Cloud Storage. Load job + * configurations have {@link JobConfiguration.Type#LOAD} type. + */ +public final class LoadJobConfiguration extends JobConfiguration implements LoadConfiguration { + + private static final long serialVersionUID = -2673554846792429829L; + + private final List sourceUris; + private final TableId destinationTable; + private final JobInfo.CreateDisposition createDisposition; + private final JobInfo.WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder + extends JobConfiguration.Builder + implements LoadConfiguration.Builder { + + private List sourceUris; + private TableId destinationTable; + private JobInfo.CreateDisposition createDisposition; + private JobInfo.WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() { + super(Type.LOAD); + } + + private Builder(LoadJobConfiguration loadConfiguration) { + this(); + this.destinationTable = loadConfiguration.destinationTable; + this.createDisposition = loadConfiguration.createDisposition; + this.writeDisposition = loadConfiguration.writeDisposition; + this.formatOptions = loadConfiguration.formatOptions; + this.maxBadRecords = loadConfiguration.maxBadRecords; + this.schema = loadConfiguration.schema; + this.ignoreUnknownValues = loadConfiguration.ignoreUnknownValues; + this.projectionFields = loadConfiguration.projectionFields; + this.sourceUris = loadConfiguration.sourceUris; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationLoad loadConfigurationPb = configurationPb.getLoad(); + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + JobInfo.CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = + JobInfo.WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + if (loadConfigurationPb.getSourceUris() != null) { + this.sourceUris = ImmutableList.copyOf(configurationPb.getLoad().getSourceUris()); + } + } + + @Override + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + @Override + public Builder createDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + @Override + public Builder writeDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + @Override + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + @Override + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + @Override + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + @Override + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + @Override + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + /** + * Sets the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = ImmutableList.copyOf(checkNotNull(sourceUris)); + return this; + } + + @Override + public LoadJobConfiguration build() { + return new LoadJobConfiguration(this); + } + } + + private LoadJobConfiguration(Builder builder) { + super(builder); + this.sourceUris = builder.sourceUris; + this.destinationTable = builder.destinationTable; + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + @Override + public TableId destinationTable() { + return destinationTable; + } + + @Override + public JobInfo.CreateDisposition createDisposition() { + return this.createDisposition; + } + + @Override + public JobInfo.WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + @Override + public Integer maxBadRecords() { + return maxBadRecords; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + @Override + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + @Override + public List projectionFields() { + return projectionFields; + } + + /** + * Returns the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public List sourceUris() { + return sourceUris; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields) + .add("sourceUris", sourceUris); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadJobConfiguration && baseEquals((LoadJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceUris); + } + + @Override + LoadJobConfiguration setProjectId(String projectId) { + return toBuilder().destinationTable(destinationTable().setProjectId(projectId)).build(); + } + + @Override + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + if (sourceUris != null) { + loadConfigurationPb.setSourceUris(ImmutableList.copyOf(sourceUris)); + } + return new com.google.api.services.bigquery.model.JobConfiguration() + .setLoad(loadConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table and source + * URIs. + */ + public static Builder builder(TableId destinationTable, List sourceUris) { + return new Builder().destinationTable(destinationTable).sourceUris(sourceUris); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table and source + * URI. + */ + public static Builder builder(TableId destinationTable, String sourceUri) { + return builder(destinationTable, ImmutableList.of(sourceUri)); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table, format and + * source URIs. + */ + public static Builder builder(TableId destinationTable, List sourceUris, + FormatOptions format) { + return builder(destinationTable, sourceUris).formatOptions(format); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table, format and + * source URI. + */ + public static Builder builder(TableId destinationTable, String sourceUri, FormatOptions format) { + return builder(destinationTable, ImmutableList.of(sourceUri), format); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table and source URIs. + */ + public static LoadJobConfiguration of(TableId destinationTable, List sourceUris) { + return builder(destinationTable, sourceUris).build(); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table and source URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, String sourceUri) { + return of(destinationTable, ImmutableList.of(sourceUri)); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table, format and source + * URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, List sourceUris, + FormatOptions format) { + return builder(destinationTable, sourceUris, format).build(); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table, format and source + * URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, String sourceUri, + FormatOptions format) { + return of(destinationTable, ImmutableList.of(sourceUri), format); + } + + @SuppressWarnings("unchecked") + static LoadJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration confPb) { + return new Builder(confPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java new file mode 100644 index 000000000000..3fdc27ecab99 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java @@ -0,0 +1,72 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.bigquery.spi.BigQueryRpc; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for BigQuery operation option. + */ +class Option implements Serializable { + + private static final long serialVersionUID = -6647817677804099207L; + + private final BigQueryRpc.Option rpcOption; + private final Object value; + + Option(BigQueryRpc.Option rpcOption, Object value) { + this.rpcOption = checkNotNull(rpcOption); + this.value = value; + } + + BigQueryRpc.Option rpcOption() { + return rpcOption; + } + + Object value() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Option)) { + return false; + } + Option other = (Option) obj; + return Objects.equals(rpcOption, other.rpcOption) + && Objects.equals(value, other.value); + } + + @Override + public int hashCode() { + return Objects.hash(rpcOption, value); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", rpcOption.value()) + .add("value", value) + .toString(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java new file mode 100644 index 000000000000..688611d07526 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java @@ -0,0 +1,537 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Query Job configuration. A Query Job runs a query against BigQuery data. Query + * job configurations have {@link JobConfiguration.Type#QUERY} type. + */ +public final class QueryJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = -1108948249081804890L; + + /** + * Priority levels for a query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public enum Priority { + /** + * Query is executed as soon as possible and count towards the + * concurrent rate limit and the daily + * rate limit. + */ + INTERACTIVE, + + /** + * Query is queued and started as soon as idle resources are available, usually within a few + * minutes. If the query hasn't started within 3 hours, its priority is changed to + * {@link Priority#INTERACTIVE}. + */ + BATCH + } + + private final String query; + private final TableId destinationTable; + private final Map tableDefinitions; + private final List userDefinedFunctions; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final DatasetId defaultDataset; + private final Priority priority; + private final Boolean allowLargeResults; + private final Boolean useQueryCache; + private final Boolean flattenResults; + private final Boolean dryRun; + + public static final class Builder + extends JobConfiguration.Builder { + + private String query; + private TableId destinationTable; + private Map tableDefinitions; + private List userDefinedFunctions; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private DatasetId defaultDataset; + private Priority priority; + private Boolean allowLargeResults; + private Boolean useQueryCache; + private Boolean flattenResults; + private Boolean dryRun; + + private Builder() { + super(Type.QUERY); + } + + private Builder(QueryJobConfiguration jobConfiguration) { + this(); + this.query = jobConfiguration.query; + this.destinationTable = jobConfiguration.destinationTable; + this.tableDefinitions = jobConfiguration.tableDefinitions; + this.userDefinedFunctions = jobConfiguration.userDefinedFunctions; + this.createDisposition = jobConfiguration.createDisposition; + this.writeDisposition = jobConfiguration.writeDisposition; + this.defaultDataset = jobConfiguration.defaultDataset; + this.priority = jobConfiguration.priority; + this.allowLargeResults = jobConfiguration.allowLargeResults; + this.useQueryCache = jobConfiguration.useQueryCache; + this.flattenResults = jobConfiguration.flattenResults; + this.dryRun = jobConfiguration.dryRun; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationQuery queryConfigurationPb = configurationPb.getQuery(); + this.query = queryConfigurationPb.getQuery(); + allowLargeResults = queryConfigurationPb.getAllowLargeResults(); + useQueryCache = queryConfigurationPb.getUseQueryCache(); + flattenResults = queryConfigurationPb.getFlattenResults(); + dryRun = configurationPb.getDryRun(); + if (queryConfigurationPb.getDestinationTable() != null) { + destinationTable = TableId.fromPb(queryConfigurationPb.getDestinationTable()); + } + if (queryConfigurationPb.getDefaultDataset() != null) { + defaultDataset = DatasetId.fromPb(queryConfigurationPb.getDefaultDataset()); + } + if (queryConfigurationPb.getPriority() != null) { + priority = Priority.valueOf(queryConfigurationPb.getPriority()); + } + if (queryConfigurationPb.getTableDefinitions() != null) { + tableDefinitions = Maps.transformValues(queryConfigurationPb.getTableDefinitions(), + ExternalTableDefinition.FROM_EXTERNAL_DATA_FUNCTION); + } + if (queryConfigurationPb.getUserDefinedFunctionResources() != null) { + userDefinedFunctions = Lists.transform( + queryConfigurationPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + if (queryConfigurationPb.getCreateDisposition() != null) { + createDisposition = + CreateDisposition.valueOf(queryConfigurationPb.getCreateDisposition()); + } + if (queryConfigurationPb.getWriteDisposition() != null) { + writeDisposition = + WriteDisposition.valueOf(queryConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the BigQuery SQL query to execute. + */ + public Builder query(String query) { + this.query = query; + return this; + } + + /** + * Sets the table where to put query results. If not provided a new table is created. This value + * is required if {@link Builder#allowLargeResults(Boolean)} is set to {@code true}. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Builder tableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions != null ? Maps.newHashMap(tableDefinitions) : null; + return this; + } + + /** + * Adds a new external table definition. If a definition already exists for {@code tableName} + * it is updated. + * + * @param tableName name of the table + * @param tableDefinition external data configuration for the table used by this query + */ + public Builder addTableDefinition(String tableName, ExternalTableDefinition tableDefinition) { + if (this.tableDefinitions == null) { + this.tableDefinitions = Maps.newHashMap(); + } + this.tableDefinitions.put(checkNotNull(tableName), checkNotNull(tableDefinition)); + return this; + } + + /** + * Sets user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = + userDefinedFunctions != null ? ImmutableList.copyOf(userDefinedFunctions) : null; + return this; + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(String defaultDataset) { + return defaultDataset(DatasetId.of(defaultDataset)); + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public Builder priority(Priority priority) { + this.priority = priority; + return this; + } + + /** + * Sets whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. If {@code true} + * {@link Builder#destinationTable(TableId)} must be provided. + * + * @see + * Returning Large Query Results + */ + public Builder allowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return this; + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + /** + * Sets whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. By default results are + * flattened. + * + * @see Flatten + */ + public Builder flattenResults(Boolean flattenResults) { + this.flattenResults = flattenResults; + return this; + } + + /** + * Sets whether the job has to be dry run or not. If set, the job is not executed. A valid query + * will return a mostly empty response with some processing statistics, while an invalid query + * will return the same error it would if it wasn't a dry run. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + public QueryJobConfiguration build() { + return new QueryJobConfiguration(this); + } + } + + private QueryJobConfiguration(Builder builder) { + super(builder); + this.query = checkNotNull(builder.query); + this.allowLargeResults = builder.allowLargeResults; + this.createDisposition = builder.createDisposition; + this.defaultDataset = builder.defaultDataset; + this.destinationTable = builder.destinationTable; + this.flattenResults = builder.flattenResults; + this.priority = builder.priority; + this.useQueryCache = builder.useQueryCache; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.tableDefinitions = + builder.tableDefinitions != null ? ImmutableMap.copyOf(builder.tableDefinitions) : null; + this.dryRun = builder.dryRun; + } + + /** + * Returns whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. + * the query is allowed to create large results at a slight cost in performance. + * + * @see + * Returning Large Query Results + */ + public Boolean allowLargeResults() { + return allowLargeResults; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public CreateDisposition createDisposition() { + return createDisposition; + } + + /** + * Returns the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns the table where to put query results. If not provided a new table is created. This + * value is required if {@link #allowLargeResults()} is {@code true}. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. + * + * @see Flatten + */ + public Boolean flattenResults() { + return flattenResults; + } + + /** + * Returns the query priority. + */ + public Priority priority() { + return priority; + } + + /** + * Returns the Google BigQuery SQL query. + */ + public String query() { + return query; + } + + /** + * Returns the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Map tableDefinitions() { + return tableDefinitions; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + /** + * Returns whether the job has to be dry run or not. If set, the job is not executed. A valid + * query will return a mostly empty response with some processing statistics, while an invalid + * query will return the same error it would if it wasn't a dry run. + */ + public Boolean dryRun() { + return dryRun; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("destinationTable", destinationTable) + .add("defaultDataset", defaultDataset) + .add("allowLargeResults", allowLargeResults) + .add("flattenResults", flattenResults) + .add("priority", priority) + .add("tableDefinitions", tableDefinitions) + .add("userQueryCache", useQueryCache) + .add("userDefinedFunctions", userDefinedFunctions) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("dryRun", dryRun); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryJobConfiguration && baseEquals((QueryJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), allowLargeResults, createDisposition, destinationTable, + defaultDataset, flattenResults, priority, query, tableDefinitions, useQueryCache, + userDefinedFunctions, writeDisposition, dryRun); + } + + @Override + QueryJobConfiguration setProjectId(String projectId) { + Builder builder = toBuilder(); + if (destinationTable() != null) { + builder.destinationTable(destinationTable().setProjectId(projectId)); + } + if (defaultDataset() != null) { + builder.defaultDataset(defaultDataset().setProjectId(projectId)); + } + return builder.build(); + } + + @Override + com.google.api.services.bigquery.model.JobConfiguration toPb() { + com.google.api.services.bigquery.model.JobConfiguration configurationPb = + new com.google.api.services.bigquery.model.JobConfiguration(); + JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery(); + queryConfigurationPb.setQuery(query); + configurationPb.setDryRun(dryRun()); + if (allowLargeResults != null) { + queryConfigurationPb.setAllowLargeResults(allowLargeResults); + } + if (createDisposition != null) { + queryConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (destinationTable != null) { + queryConfigurationPb.setDestinationTable(destinationTable.toPb()); + } + if (defaultDataset != null) { + queryConfigurationPb.setDefaultDataset(defaultDataset.toPb()); + } + if (flattenResults != null) { + queryConfigurationPb.setFlattenResults(flattenResults); + } + if (priority != null) { + queryConfigurationPb.setPriority(priority.toString()); + } + if (tableDefinitions != null) { + queryConfigurationPb.setTableDefinitions(Maps.transformValues(tableDefinitions, + ExternalTableDefinition.TO_EXTERNAL_DATA_FUNCTION)); + } + if (useQueryCache != null) { + queryConfigurationPb.setUseQueryCache(useQueryCache); + } + if (userDefinedFunctions != null) { + queryConfigurationPb.setUserDefinedFunctionResources( + Lists.transform(userDefinedFunctions, UserDefinedFunction.TO_PB_FUNCTION)); + } + if (writeDisposition != null) { + queryConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + return configurationPb.setQuery(queryConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Query Job given the query to be run. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Returns a BigQuery Copy Job for the given the query to be run. Job's id is chosen by the + * service. + */ + public static QueryJobConfiguration of(String query) { + return builder(query).build(); + } + + @SuppressWarnings("unchecked") + static QueryJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java new file mode 100644 index 000000000000..b3522a2a6ba3 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java @@ -0,0 +1,318 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Request. This class can be used to run a BigQuery SQL query and + * return results if the query completes within a specified timeout. The query results are saved to + * a temporary table that is deleted approximately 24 hours after the query is run. The query is run + * through a BigQuery Job whose identity can be accessed via {@link QueryResponse#jobId()}. If the + * query does not complete within the provided {@link Builder#maxWaitTime(Long)}, the response + * returned by {@link BigQuery#query(QueryRequest)} will have {@link QueryResponse#jobCompleted()} + * set to {@code false} and {@link QueryResponse#result()} set to {@code null}. To obtain query + * results you can use {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} until + * {@link QueryResponse#jobCompleted()} returns {@code true}. + * + *

Example usage of a query request: + *

 {@code
+ * // Substitute "field", "table" and "dataset" with real field, table and dataset identifiers
+ * QueryRequest request = QueryRequest.builder("SELECT field FROM table")
+ *     .defaultDataset(DatasetId.of("dataset"))
+ *     .maxWaitTime(60000L)
+ *     .pageSize(1000L)
+ *     .build();
+ * QueryResponse response = bigquery.query(request);
+ * while (!response.jobCompleted()) {
+ *   Thread.sleep(1000);
+ *   response = bigquery.getQueryResults(response.jobId());
+ * }
+ * List executionErrors = response.executionErrors();
+ * // look for errors in executionErrors
+ * QueryResult result = response.result();
+ * Iterator> rowIterator = result.iterateAll();
+ * while(rowIterator.hasNext()) {
+ *   List row = rowIterator.next();
+ *   // do something with row
+ * }
+ * }
+ * + * @see Query + * @see Query Reference + */ +public class QueryRequest implements Serializable { + + private static final long serialVersionUID = -8727328332415880852L; + + private final String query; + private final Long pageSize; + private final DatasetId defaultDataset; + private final Long maxWaitTime; + private final Boolean dryRun; + private final Boolean useQueryCache; + + public static final class Builder { + + private String query; + private Long pageSize; + private DatasetId defaultDataset; + private Long maxWaitTime; + private Boolean dryRun; + private Boolean useQueryCache; + + private Builder() {} + + /** + * Sets the BigQuery query to be executed. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return this; + } + + /** + * Sets the maximum number of rows of data to return per page of results. Setting this flag to a + * small value such as 1000 and then paging through results might improve reliability when the + * query result set is large. In addition to this limit, responses are also limited to 10 MB. + * By default, there is no maximum row count, and only the byte limit applies. + */ + public Builder pageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + /** + * Sets the default dataset to assume for any unqualified table names in the query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + /** + * Sets the default dataset to assume for any unqualified table names in the query. + */ + public Builder defaultDataset(String defaultDataset) { + return defaultDataset(DatasetId.of(defaultDataset)); + } + + /** + * Sets how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobCompleted()} set to {@code false}. If not set, a wait time + * of 10000 milliseconds (10 seconds) is used. + */ + public Builder maxWaitTime(Long maxWaitTime) { + this.maxWaitTime = maxWaitTime; + return this; + } + + /** + * Sets whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + public QueryRequest build() { + return new QueryRequest(this); + } + } + + private QueryRequest(Builder builder) { + query = builder.query; + pageSize = builder.pageSize; + defaultDataset = builder.defaultDataset; + maxWaitTime = builder.maxWaitTime; + dryRun = builder.dryRun; + useQueryCache = builder.useQueryCache; + } + + /** + * Sets the BigQuery query to be executed. + */ + public String query() { + return query; + } + + /** + * Returns the maximum number of rows of data to return per page of results. + */ + public Long pageSize() { + return pageSize; + } + + /** + * Returns the default dataset to assume for any unqualified table names in the query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobCompleted()} set to {@code false}. You can call + * {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query + * to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds) + * is used. + */ + public Long maxWaitTime() { + return maxWaitTime; + } + + /** + * Returns whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Boolean dryRun() { + return dryRun; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns a builder for the {@code QueryRequest} object. + */ + public Builder toBuilder() { + return new Builder() + .query(query) + .pageSize(pageSize) + .defaultDataset(defaultDataset) + .maxWaitTime(maxWaitTime) + .dryRun(dryRun) + .useQueryCache(useQueryCache); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("query", query) + .add("pageSize", pageSize) + .add("defaultDataset", defaultDataset) + .add("maxWaitTime", maxWaitTime) + .add("dryRun", dryRun) + .add("useQueryCache", useQueryCache) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(query, pageSize, defaultDataset, maxWaitTime, dryRun, useQueryCache); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryRequest && Objects.equals(toPb(), ((QueryRequest) obj).toPb()); + } + + QueryRequest setProjectId(String projectId) { + Builder builder = toBuilder(); + if (defaultDataset() != null) { + builder.defaultDataset(defaultDataset().setProjectId(projectId)); + } + return builder.build(); + } + + com.google.api.services.bigquery.model.QueryRequest toPb() { + com.google.api.services.bigquery.model.QueryRequest queryRequestPb = + new com.google.api.services.bigquery.model.QueryRequest().setQuery(query); + if (pageSize != null) { + queryRequestPb.setMaxResults(pageSize); + } + if (defaultDataset != null) { + queryRequestPb.setDefaultDataset(defaultDataset.toPb()); + } + if (maxWaitTime != null) { + queryRequestPb.setTimeoutMs(maxWaitTime); + } + if (dryRun != null) { + queryRequestPb.setDryRun(dryRun); + } + if (useQueryCache != null) { + queryRequestPb.setUseQueryCache(useQueryCache); + } + return queryRequestPb; + } + + /** + * Creates a builder for a {@code QueryRequest} given the BigQuery SQL query to be executed. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Creates a {@code QueryRequest} object given the BigQuery SQL query to be executed. + */ + public static QueryRequest of(String query) { + return new Builder().query(query).build(); + } + + static QueryRequest fromPb(com.google.api.services.bigquery.model.QueryRequest queryRequestPb) { + Builder builder = builder(queryRequestPb.getQuery()); + if (queryRequestPb.getMaxResults() != null) { + builder.pageSize(queryRequestPb.getMaxResults()); + } + if (queryRequestPb.getDefaultDataset() != null) { + builder.defaultDataset(DatasetId.fromPb(queryRequestPb.getDefaultDataset())); + } + if (queryRequestPb.getTimeoutMs() != null) { + builder.maxWaitTime(queryRequestPb.getTimeoutMs()); + } + if (queryRequestPb.getDryRun() != null) { + builder.dryRun(queryRequestPb.getDryRun()); + } + if (queryRequestPb.getUseQueryCache() != null) { + builder.useQueryCache(queryRequestPb.getUseQueryCache()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java new file mode 100644 index 000000000000..12000cc1cbd2 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java @@ -0,0 +1,196 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Response. This class contains the results of a Query Job + * ({@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)}) or of a + * Query Request ({@link BigQuery#query(QueryRequest)}). + * + *

Example usage of a query response: + *

 {@code
+ * QueryResponse response = bigquery.query(request);
+ * while (!response.jobCompleted()) {
+ *   Thread.sleep(1000);
+ *   response = bigquery.getQueryResults(response.jobId());
+ * }
+ * List executionErrors = response.executionErrors();
+ * // look for errors in executionErrors
+ * QueryResult result = response.result();
+ * Iterator> rowIterator = result.iterateAll();
+ * while(rowIterator.hasNext()) {
+ *   List row = rowIterator.next();
+ *   // do something with row
+ * }
+ * }
+ * + * @see Get Query + * Results + * @see Query + */ +public class QueryResponse implements Serializable { + + private static final long serialVersionUID = 3549226764825005655L; + + private final QueryResult result; + private final String etag; + private final JobId jobId; + private final boolean jobCompleted; + private final List executionErrors; + + static final class Builder { + + private QueryResult result; + private String etag; + private JobId jobId; + private boolean jobCompleted; + private List executionErrors; + + private Builder() {} + + Builder result(QueryResult result) { + this.result = result; + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder jobId(JobId jobId) { + this.jobId = jobId; + return this; + } + + Builder jobCompleted(boolean jobCompleted) { + this.jobCompleted = jobCompleted; + return this; + } + + Builder executionErrors(List executionErrors) { + this.executionErrors = executionErrors; + return this; + } + + QueryResponse build() { + return new QueryResponse(this); + } + } + + private QueryResponse(Builder builder) { + this.result = builder.result; + this.etag = builder.etag; + this.jobId = builder.jobId; + this.jobCompleted = builder.jobCompleted; + this.executionErrors = builder.executionErrors != null ? builder.executionErrors + : ImmutableList.of(); + } + + /** + * Returns the result of the query. Returns {@code null} if {@link #jobCompleted()} is {@code + * false}. + */ + public QueryResult result() { + return result; + } + + /** + * Returns the hash of the {@code QueryResponse} resource or {@code null} if not set. + */ + public String etag() { + return etag; + } + + /** + * Returns the identity of the BigQuery Job that was created to run the query. This field will be + * present even if the original request timed out. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns whether the job running the query has completed or not. If {@link #result()} is not + * {@code null}, this method will always return {@code true}. If this method returns {@code false} + * {@link #result()} returns {@code null}. This method can be used to check if query execution + * completed and results are available. + */ + public boolean jobCompleted() { + return jobCompleted; + } + + /** + * Returns whether errors and warnings occurred during the execution of the job. If this method + * returns {@code true} it does not necessarily mean that the job has completed or was + * unsuccessful. + */ + public boolean hasErrors() { + return !executionErrors.isEmpty(); + } + + /** + * Returns errors and warnings encountered during the running of the job, if any. Errors here do + * not necessarily mean that the job has completed or was unsuccessful. + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("result", result) + .add("etag", etag) + .add("jobId", jobId) + .add("jobCompleted", jobCompleted) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResponse response = (QueryResponse) obj; + return jobCompleted == response.jobCompleted + && Objects.equals(etag, response.etag) + && Objects.equals(result, response.result) + && Objects.equals(jobId, response.jobId) + && Objects.equals(executionErrors, response.executionErrors); + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java new file mode 100644 index 000000000000..692abab937a9 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java @@ -0,0 +1,176 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.PageImpl; + +import java.util.List; +import java.util.Objects; + +public class QueryResult extends PageImpl> { + + private static final long serialVersionUID = -4831062717210349818L; + + private final boolean cacheHit; + private final Schema schema; + private final long totalRows; + private final long totalBytesProcessed; + + interface QueryResultsPageFetcher extends PageImpl.NextPageFetcher> { + @Override + QueryResult nextPage(); + } + + static final class Builder { + + private QueryResultsPageFetcher pageFetcher; + private String cursor; + private Iterable> results; + private boolean cacheHit; + private Schema schema; + private long totalRows; + private long totalBytesProcessed; + + private Builder() {} + + Builder cacheHit(boolean cacheHit) { + this.cacheHit = cacheHit; + return this; + } + + Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + Builder totalBytesProcessed(long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return this; + } + + Builder totalRows(long totalRows) { + this.totalRows = totalRows; + return this; + } + + Builder pageFetcher(QueryResultsPageFetcher pageFetcher) { + this.pageFetcher = pageFetcher; + return this; + } + + Builder cursor(String cursor) { + this.cursor = cursor; + return this; + } + + Builder results(Iterable> results) { + this.results = results; + return this; + } + + QueryResult build() { + return new QueryResult(this); + } + } + + private QueryResult(Builder builder) { + super(builder.pageFetcher, builder.cursor, builder.results != null ? builder.results + : ImmutableList.>of()); + this.cacheHit = builder.cacheHit; + this.schema = builder.schema; + this.totalBytesProcessed = builder.totalBytesProcessed; + this.totalRows = builder.totalRows; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see Query Caching + */ + public boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the schema of the results. This is present only when the query completes successfully. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the total number of bytes processed for the query. If this query was a dry run, this is + * the number of bytes that would be processed if the query were run. + */ + public long totalBytesProcessed() { + return totalBytesProcessed; + } + + /** + * Returns the total number of rows in the complete query result set, which can be more than the + * number of rows in the first page of results returned by {@link #values()}. Returns {@code 0} + * if the query was a dry run. + */ + public long totalRows() { + return totalRows; + } + + @Override + public QueryResult nextPage() { + return (QueryResult) super.nextPage(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("rows", values()) + .add("cacheHit", cacheHit) + .add("schema", schema) + .add("totalBytesProcessed", totalBytesProcessed) + .add("totalRows", totalRows) + .add("cursor", nextPageCursor()) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), cacheHit, schema, totalBytesProcessed, totalRows); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResult response = (QueryResult) obj; + return Objects.equals(nextPageCursor(), response.nextPageCursor()) + && Objects.equals(values(), response.values()) + && Objects.equals(schema, response.schema) + && totalRows == response.totalRows + && totalBytesProcessed == response.totalBytesProcessed + && cacheHit == response.cacheHit; + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java new file mode 100644 index 000000000000..8c9f91fd39f3 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java @@ -0,0 +1,444 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ExplainQueryStage; +import com.google.api.services.bigquery.model.ExplainQueryStep; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * BigQuery provides diagnostic information about a completed query's execution plan (or query plan + * for short). The query plan describes a query as a series of stages, with each stage comprising a + * number of steps that read from data sources, perform a series of transformations on the input, + * and emit an output to a future stage (or the final result). This class contains information on a + * query stage. + * + * @see Query Plan + */ +public class QueryStage implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public QueryStage apply(ExplainQueryStage pb) { + return QueryStage.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ExplainQueryStage apply(QueryStage stage) { + return stage.toPb(); + } + }; + private static final long serialVersionUID = -472281297327952320L; + + /** + * Each query stage is made of a number of steps. This class contains information on a query step. + * + * @see Steps + * Metadata + */ + public static class QueryStep implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public QueryStep apply(ExplainQueryStep pb) { + return QueryStep.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ExplainQueryStep apply(QueryStep stage) { + return stage.toPb(); + } + }; + private static final long serialVersionUID = 8663444604771794411L; + + private final String name; + private final List substeps; + + QueryStep(String name, List substeps) { + this.name = name; + this.substeps = substeps; + } + + /** + * Returns a machine-readable name for the operation. + * + * @see Steps + * Metadata + */ + public String name() { + return name; + } + + /** + * Returns a list of human-readable stage descriptions. + */ + public List substeps() { + return substeps; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name) + .add("substeps", substeps) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(name, substeps); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof QueryStep)) { + return false; + } + QueryStep other = (QueryStep) obj; + return Objects.equals(name, other.name) && Objects.equals(substeps, other.substeps); + } + + ExplainQueryStep toPb() { + return new ExplainQueryStep().setKind(name).setSubsteps(substeps); + } + + static QueryStep fromPb(com.google.api.services.bigquery.model.ExplainQueryStep stepPb) { + return new QueryStep(stepPb.getKind(), ImmutableList.copyOf(stepPb.getSubsteps() != null + ? stepPb.getSubsteps() : ImmutableList.of())); + } + } + + private final double computeRatioAvg; + private final double computeRatioMax; + private final long id; + private final String name; + private final double readRatioAvg; + private final double readRatioMax; + private final long recordsRead; + private final long recordsWritten; + private final List steps; + private final double waitRatioAvg; + private final double waitRatioMax; + private final double writeRatioAvg; + private final double writeRatioMax; + + static final class Builder { + + private double computeRatioAvg; + private double computeRatioMax; + private long id; + private String name; + private double readRatioAvg; + private double readRatioMax; + private long recordsRead; + private long recordsWritten; + private List steps; + private double waitRatioAvg; + private double waitRatioMax; + private double writeRatioAvg; + private double writeRatioMax; + + private Builder() {} + + Builder computeRatioAvg(double computeRatioAvg) { + this.computeRatioAvg = computeRatioAvg; + return this; + } + + Builder computeRatioMax(double computeRatioMax) { + this.computeRatioMax = computeRatioMax; + return this; + } + + Builder id(long id) { + this.id = id; + return this; + } + + Builder name(String name) { + this.name = name; + return this; + } + + Builder readRatioAvg(double readRatioAvg) { + this.readRatioAvg = readRatioAvg; + return this; + } + + Builder readRatioMax(double readRatioMax) { + this.readRatioMax = readRatioMax; + return this; + } + + Builder recordsRead(long recordsRead) { + this.recordsRead = recordsRead; + return this; + } + + Builder recordsWritten(long recordsWritten) { + this.recordsWritten = recordsWritten; + return this; + } + + Builder steps(List steps) { + this.steps = steps; + return this; + } + + Builder waitRatioAvg(double waitRatioAvg) { + this.waitRatioAvg = waitRatioAvg; + return this; + } + + Builder waitRatioMax(double waitRatioMax) { + this.waitRatioMax = waitRatioMax; + return this; + } + + Builder writeRatioAvg(double writeRatioAvg) { + this.writeRatioAvg = writeRatioAvg; + return this; + } + + Builder writeRatioMax(double writeRatioMax) { + this.writeRatioMax = writeRatioMax; + return this; + } + + QueryStage build() { + return new QueryStage(this); + } + } + + QueryStage(Builder builder) { + computeRatioAvg = builder.computeRatioAvg; + computeRatioMax = builder.computeRatioMax; + id = builder.id; + name = builder.name; + readRatioAvg = builder.readRatioAvg; + readRatioMax = builder.readRatioMax; + recordsRead = builder.recordsRead; + recordsWritten = builder.recordsWritten; + steps = builder.steps; + waitRatioAvg = builder.waitRatioAvg; + waitRatioMax = builder.waitRatioMax; + writeRatioAvg = builder.writeRatioAvg; + writeRatioMax = builder.writeRatioMax; + } + + /** + * Returns the time the average worker spent CPU-bound, divided by the longest time spent by any + * worker in any segment. + */ + public double computeRatioAvg() { + return computeRatioAvg; + } + + /** + * Returns the time the slowest worker spent CPU-bound, divided by the longest time spent by any + * worker in any segment. + */ + public double computeRatioMax() { + return computeRatioMax; + } + + /** + * Returns a unique ID for the stage within its plan. + */ + public long id() { + return id; + } + + /** + * Returns a human-readable name for the stage. + */ + public String name() { + return name; + } + + /** + * Returns the time the average worker spent reading input data, divided by the longest time spent + * by any worker in any segment. + */ + public double readRatioAvg() { + return readRatioAvg; + } + + /** + * Returns the time the slowest worker spent reading input data, divided by the longest time spent + * by any worker in any segment. + */ + public double readRatioMax() { + return readRatioMax; + } + + /** + * Returns the number of rows (top-level records) read by the stage. + */ + public long recordsRead() { + return recordsRead; + } + + /** + * Returns the number of rows (top-level records) written by the stage. + */ + public long recordsWritten() { + return recordsWritten; + } + + /** + * Returns the list of steps within the stage in dependency order (approximately chronological). + */ + public List steps() { + return steps; + } + + /** + * Returns the time the average worker spent waiting to be scheduled, divided by the longest time + * spent by any worker in any segment. + */ + public double waitRatioAvg() { + return waitRatioAvg; + } + + /** + * Returns the time the slowest worker spent waiting to be scheduled, divided by the longest time + * spent by any worker in any segment. + */ + public double waitRatioMax() { + return waitRatioMax; + } + + /** + * Returns the time the average worker spent writing output data, divided by the longest time + * spent by any worker in any segment. + */ + public double writeRatioAvg() { + return writeRatioAvg; + } + + /** + * Returns the time the slowest worker spent writing output data, divided by the longest time + * spent by any worker in any segment. + */ + public double writeRatioMax() { + return writeRatioMax; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("computeRatioAvg", computeRatioAvg) + .add("computeRatioMax", computeRatioMax) + .add("id", id) + .add("name", name) + .add("readRatioAvg", readRatioAvg) + .add("readRatioMax", readRatioMax) + .add("recordsRead", recordsRead) + .add("recordsWritten", recordsWritten) + .add("steps", steps) + .add("waitRatioAvg", waitRatioAvg) + .add("waitRatioMax", waitRatioMax) + .add("writeRatioAvg", writeRatioAvg) + .add("writeRatioMax", writeRatioMax) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(computeRatioAvg, computeRatioMax, id, name, readRatioAvg, readRatioMax, + recordsRead, recordsWritten, steps, waitRatioAvg, waitRatioMax, writeRatioAvg); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof QueryStage)) { + return false; + } + QueryStage other = (QueryStage) obj; + return id == other.id + && computeRatioAvg == other.computeRatioAvg + && computeRatioMax == other.computeRatioMax + && readRatioAvg == other.readRatioAvg + && readRatioMax == other.readRatioMax + && recordsRead == other.recordsRead + && recordsWritten == other.recordsWritten + && waitRatioAvg == other.waitRatioAvg + && waitRatioMax == other.waitRatioMax + && writeRatioAvg == other.writeRatioAvg + && writeRatioMax == other.writeRatioMax + && Objects.equals(steps, other.steps) + && Objects.equals(name, other.name); + } + + static Builder builder() { + return new Builder(); + } + + ExplainQueryStage toPb() { + ExplainQueryStage stagePb = new ExplainQueryStage() + .setComputeRatioAvg(computeRatioAvg) + .setComputeRatioMax(computeRatioMax) + .setId(id) + .setName(name) + .setReadRatioAvg(readRatioAvg) + .setReadRatioMax(readRatioMax) + .setRecordsRead(recordsRead) + .setRecordsWritten(recordsWritten) + .setWaitRatioAvg(waitRatioAvg) + .setWaitRatioMax(waitRatioMax) + .setWriteRatioAvg(writeRatioAvg) + .setWriteRatioMax(writeRatioMax); + if (steps != null) { + stagePb.setSteps(Lists.transform(steps, QueryStep.TO_PB_FUNCTION)); + } + return stagePb; + } + + static QueryStage fromPb(com.google.api.services.bigquery.model.ExplainQueryStage stagePb) { + Builder builder = new QueryStage.Builder(); + builder.computeRatioAvg(stagePb.getComputeRatioAvg()); + builder.computeRatioMax(stagePb.getComputeRatioMax()); + builder.id(stagePb.getId()); + builder.name(stagePb.getName()); + builder.readRatioAvg(stagePb.getReadRatioAvg()); + builder.readRatioMax(stagePb.getReadRatioMax()); + builder.recordsRead(stagePb.getRecordsRead()); + builder.recordsWritten(stagePb.getRecordsWritten()); + if (stagePb.getSteps() != null) { + builder.steps(Lists.transform(stagePb.getSteps(), QueryStep.FROM_PB_FUNCTION)); + } + builder.waitRatioAvg(stagePb.getWaitRatioAvg()); + builder.waitRatioMax(stagePb.getWaitRatioMax()); + builder.writeRatioAvg(stagePb.getWriteRatioAvg()); + builder.writeRatioMax(stagePb.getWriteRatioMax()); + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java new file mode 100644 index 000000000000..787bb0d7f35f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java @@ -0,0 +1,159 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * This class represents the schema for a Google BigQuery Table or data source. + */ +public class Schema implements Serializable { + + static final Function + FROM_PB_FUNCTION = new Function() { + @Override + public Schema apply(com.google.api.services.bigquery.model.TableSchema pb) { + return Schema.fromPb(pb); + } + }; + static final Function + TO_PB_FUNCTION = new Function() { + @Override + public com.google.api.services.bigquery.model.TableSchema apply(Schema schema) { + return schema.toPb(); + } + }; + + private static final long serialVersionUID = 2007400596384553696L; + + private final List fields; + + public static final class Builder { + + private List fields; + + private Builder() {} + + /** + * Adds a field's schema to the table's schema. + */ + public Builder addField(Field field) { + if (fields == null) { + fields = Lists.newArrayList(); + } + fields.add(checkNotNull(field)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Iterable fields) { + this.fields = Lists.newArrayList(checkNotNull(fields)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Field... fields) { + this.fields = Lists.newArrayList(fields); + return this; + } + + /** + * Creates an {@code Schema} object. + */ + public Schema build() { + return new Schema(this); + } + } + + private Schema(Builder builder) { + this.fields = builder.fields != null ? ImmutableList.copyOf(builder.fields) + : ImmutableList.of(); + } + + /** + * Returns the fields in the current table schema. + */ + public List fields() { + return fields; + } + + /** + * Returns a builder for the {@code Schema} object. + */ + public Builder toBuilder() { + return builder().fields(fields); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(fields); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Schema && Objects.equals(toPb(), ((Schema) obj).toPb()); + } + + com.google.api.services.bigquery.model.TableSchema toPb() { + com.google.api.services.bigquery.model.TableSchema tableSchemaPb = + new com.google.api.services.bigquery.model.TableSchema(); + if (fields != null) { + List fieldsPb = Lists.transform(fields, Field.TO_PB_FUNCTION); + tableSchemaPb.setFields(fieldsPb); + } + return tableSchemaPb; + } + + public static Builder builder() { + return new Builder(); + } + + public static Schema of(Iterable fields) { + return builder().fields(fields).build(); + } + + public static Schema of(Field... fields) { + return builder().fields(fields).build(); + } + + static Schema fromPb(com.google.api.services.bigquery.model.TableSchema tableSchemaPb) { + return Schema.of(Lists.transform(tableSchemaPb.getFields(), Field.FROM_PB_FUNCTION)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/StandardTableDefinition.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/StandardTableDefinition.java new file mode 100644 index 000000000000..d0e49157a99c --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/StandardTableDefinition.java @@ -0,0 +1,282 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Streamingbuffer; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * A Google BigQuery default table definition. This definition is used for standard, two-dimensional + * tables with individual records organized in rows, and a data type assigned to each column (also + * called a field). Individual fields within a record may contain nested and repeated children + * fields. Every table is described by a schema that describes field names, types, and other + * information. + * + * @see Managing Tables + */ +public class StandardTableDefinition extends TableDefinition { + + private static final long serialVersionUID = 2113445776046717900L; + + private final Long numBytes; + private final Long numRows; + private final String location; + private final StreamingBuffer streamingBuffer; + + /** + * Google BigQuery Table's Streaming Buffer information. This class contains information on a + * table's streaming buffer as the estimated size in number of rows/bytes. + */ + public static class StreamingBuffer implements Serializable { + + private static final long serialVersionUID = 822027055549277843L; + private final long estimatedRows; + private final long estimatedBytes; + private final long oldestEntryTime; + + StreamingBuffer(long estimatedRows, long estimatedBytes, long oldestEntryTime) { + this.estimatedRows = estimatedRows; + this.estimatedBytes = estimatedBytes; + this.oldestEntryTime = oldestEntryTime; + } + + /** + * Returns a lower-bound estimate of the number of rows currently in the streaming buffer. + */ + public long estimatedRows() { + return estimatedRows; + } + + /** + * Returns a lower-bound estimate of the number of bytes currently in the streaming buffer. + */ + public long estimatedBytes() { + return estimatedBytes; + } + + /** + * Returns the timestamp of the oldest entry in the streaming buffer, in milliseconds since + * epoch. + */ + public long oldestEntryTime() { + return oldestEntryTime; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("estimatedRows", estimatedRows) + .add("estimatedBytes", estimatedBytes) + .add("oldestEntryTime", oldestEntryTime) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(estimatedRows, estimatedBytes, oldestEntryTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof StreamingBuffer + && Objects.equals(toPb(), ((StreamingBuffer) obj).toPb()); + } + + Streamingbuffer toPb() { + return new Streamingbuffer() + .setEstimatedBytes(BigInteger.valueOf(estimatedBytes)) + .setEstimatedRows(BigInteger.valueOf(estimatedRows)) + .setOldestEntryTime(BigInteger.valueOf(oldestEntryTime)); + } + + static StreamingBuffer fromPb(Streamingbuffer streamingBufferPb) { + return new StreamingBuffer(streamingBufferPb.getEstimatedRows().longValue(), + streamingBufferPb.getEstimatedBytes().longValue(), + streamingBufferPb.getOldestEntryTime().longValue()); + } + } + + public static final class Builder + extends TableDefinition.Builder { + + private Long numBytes; + private Long numRows; + private String location; + private StreamingBuffer streamingBuffer; + + private Builder() { + super(Type.TABLE); + } + + private Builder(StandardTableDefinition tableDefinition) { + super(tableDefinition); + this.numBytes = tableDefinition.numBytes; + this.numRows = tableDefinition.numRows; + this.location = tableDefinition.location; + this.streamingBuffer = tableDefinition.streamingBuffer; + } + + private Builder(Table tablePb) { + super(tablePb); + if (tablePb.getNumRows() != null) { + this.numRows(tablePb.getNumRows().longValue()); + } + this.numBytes = tablePb.getNumBytes(); + this.location = tablePb.getLocation(); + if (tablePb.getStreamingBuffer() != null) { + this.streamingBuffer = StreamingBuffer.fromPb(tablePb.getStreamingBuffer()); + } + } + + Builder numBytes(Long numBytes) { + this.numBytes = numBytes; + return self(); + } + + Builder numRows(Long numRows) { + this.numRows = numRows; + return self(); + } + + Builder location(String location) { + this.location = location; + return self(); + } + + Builder streamingBuffer(StreamingBuffer streamingBuffer) { + this.streamingBuffer = streamingBuffer; + return self(); + } + + /** + * Creates a {@code StandardTableDefinition} object. + */ + @Override + public StandardTableDefinition build() { + return new StandardTableDefinition(this); + } + } + + private StandardTableDefinition(Builder builder) { + super(builder); + this.numBytes = builder.numBytes; + this.numRows = builder.numRows; + this.location = builder.location; + this.streamingBuffer = builder.streamingBuffer; + } + + /** + * Returns the size of this table in bytes, excluding any data in the streaming buffer. + */ + public Long numBytes() { + return numBytes; + } + + /** + * Returns the number of rows in this table, excluding any data in the streaming buffer. + */ + public Long numRows() { + return numRows; + } + + /** + * Returns the geographic location where the table should reside. This value is inherited from the + * dataset. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns information on the table's streaming buffer if any exists. Returns {@code null} if no + * streaming buffer exists. + */ + public StreamingBuffer streamingBuffer() { + return streamingBuffer; + } + + /** + * Returns a builder for a BigQuery standard table definition. + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Creates a BigQuery standard table definition given its schema. + * + * @param schema the schema of the table + */ + public static StandardTableDefinition of(Schema schema) { + return builder().schema(schema).build(); + } + + /** + * Returns a builder for the {@code StandardTableDefinition} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("numBytes", numBytes) + .add("numRows", numRows) + .add("location", location) + .add("streamingBuffer", streamingBuffer); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof StandardTableDefinition && baseEquals((StandardTableDefinition) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), numBytes, numRows, location, streamingBuffer); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + if (numRows != null) { + tablePb.setNumRows(BigInteger.valueOf(numRows)); + } + tablePb.setNumBytes(numBytes); + tablePb.setLocation(location); + if (streamingBuffer != null) { + tablePb.setStreamingBuffer(streamingBuffer.toPb()); + } + return tablePb; + } + + @SuppressWarnings("unchecked") + static StandardTableDefinition fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java new file mode 100644 index 000000000000..3f902d2ff242 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java @@ -0,0 +1,344 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.Page; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Table. + * + *

Objects of this class are immutable. Operations that modify the table like {@link #update} + * return a new object. To get a {@code Table} object with the most recent information use + * {@link #reload}. {@code Table} adds a layer of service-related functionality over + * {@link TableInfo}. + *

+ */ +public final class Table extends TableInfo { + + private static final long serialVersionUID = 5744556727066570096L; + + private final BigQueryOptions options; + private transient BigQuery bigquery; + + /** + * A builder for {@code Table} objects. + */ + public static class Builder extends TableInfo.Builder { + + private final BigQuery bigquery; + private final TableInfo.BuilderImpl infoBuilder; + + Builder(BigQuery bigquery, TableId tableId, TableDefinition defintion) { + this.bigquery = bigquery; + this.infoBuilder = new TableInfo.BuilderImpl(); + this.infoBuilder.tableId(tableId).definition(defintion); + } + + Builder(Table table) { + this.bigquery = table.bigquery; + this.infoBuilder = new TableInfo.BuilderImpl(table); + } + + @Override + Builder creationTime(Long creationTime) { + infoBuilder.creationTime(creationTime); + return this; + } + + @Override + public Builder description(String description) { + infoBuilder.description(description); + return this; + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + public Builder expirationTime(Long expirationTime) { + infoBuilder.expirationTime(expirationTime); + return this; + } + + @Override + public Builder friendlyName(String friendlyName) { + infoBuilder.friendlyName(friendlyName); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + Builder lastModifiedTime(Long lastModifiedTime) { + infoBuilder.lastModifiedTime(lastModifiedTime); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + public Builder tableId(TableId tableId) { + infoBuilder.tableId(tableId); + return this; + } + + @Override + public Builder definition(TableDefinition definition) { + infoBuilder.definition(definition); + return this; + } + + @Override + public Table build() { + return new Table(bigquery, infoBuilder); + } + } + + Table(BigQuery bigquery, TableInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.bigquery = checkNotNull(bigquery); + this.options = bigquery.options(); + } + + /** + * Checks if this table exists. + * + * @return {@code true} if this table exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getTable(tableId(), BigQuery.TableOption.fields()) != null; + } + + /** + * Fetches current table's latest information. Returns {@code null} if the table does not exist. + * + * @param options table options + * @return a {@code Table} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Table reload(BigQuery.TableOption... options) { + return bigquery.getTable(tableId(), options); + } + + /** + * Updates the table's information with this table's information. Dataset's and table's + * user-defined ids cannot be changed. A new {@code Table} object is returned. + * + * @param options dataset options + * @return a {@code Table} object with updated information + * @throws BigQueryException upon failure + */ + public Table update(BigQuery.TableOption... options) { + return bigquery.update(this, options); + } + + /** + * Deletes this table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + public boolean delete() { + return bigquery.delete(tableId()); + } + + /** + * Insert rows into the table. + * + * @param rows rows to be inserted + * @throws BigQueryException upon failure + */ + public InsertAllResponse insert(Iterable rows) + throws BigQueryException { + return bigquery.insertAll(InsertAllRequest.of(tableId(), rows)); + } + + /** + * Insert rows into the table. + * + * @param rows rows to be inserted + * @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set + * the entire insert operation will fail if rows to be inserted contain an invalid row + * @param ignoreUnknownValues whether to accept rows that contain values that do not match the + * schema. The unknown values are ignored. If not set, rows with unknown values are considered + * to be invalid + * @throws BigQueryException upon failure + */ + public InsertAllResponse insert(Iterable rows, + boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException { + InsertAllRequest request = InsertAllRequest.builder(tableId(), rows) + .skipInvalidRows(skipInvalidRows) + .ignoreUnknownValues(ignoreUnknownValues) + .build(); + return bigquery.insertAll(request); + } + + /** + * Returns the paginated list rows in this table. + * + * @param options table data list options + * @throws BigQueryException upon failure + */ + public Page> list(BigQuery.TableDataListOption... options) + throws BigQueryException { + return bigquery.listTableData(tableId(), options); + } + + /** + * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the + * started {@link Job} object. + * + * @param destinationDataset the user-defined id of the destination dataset + * @param destinationTable the user-defined id of the destination table + * @param options job options + * @throws BigQueryException upon failure + */ + public Job copy(String destinationDataset, String destinationTable, BigQuery.JobOption... options) + throws BigQueryException { + return copy(TableId.of(destinationDataset, destinationTable), options); + } + + /** + * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the + * started {@link Job} object. + * + * @param destinationTable the destination table of the copy job + * @param options job options + * @throws BigQueryException upon failure + */ + public Job copy(TableId destinationTable, BigQuery.JobOption... options) + throws BigQueryException { + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, tableId()); + return bigquery.create(JobInfo.of(configuration), options); + } + + /** + * Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the + * started {@link Job} object. + * + * @param format the format of the extracted data + * @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) + * where the extracted table should be written + * @param options job options + * @throws BigQueryException upon failure + */ + public Job extract(String format, String destinationUri, BigQuery.JobOption... options) + throws BigQueryException { + return extract(format, ImmutableList.of(destinationUri), options); + } + + /** + * Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns + * the started {@link Job} object. + * + * @param format the format of the exported data + * @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) + * where the extracted table should be written + * @param options job options + * @throws BigQueryException upon failure + */ + public Job extract(String format, List destinationUris, BigQuery.JobOption... options) + throws BigQueryException { + ExtractJobConfiguration extractConfiguration = + ExtractJobConfiguration.of(tableId(), destinationUris, format); + return bigquery.create(JobInfo.of(extractConfiguration), options); + } + + /** + * Starts a BigQuery Job to load data into the current table from the provided source URI. Returns + * the started {@link Job} object. + * + * @param format the format of the data to load + * @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from + * which to load the data + * @param options job options + * @throws BigQueryException upon failure + */ + public Job load(FormatOptions format, String sourceUri, BigQuery.JobOption... options) + throws BigQueryException { + return load(format, ImmutableList.of(sourceUri), options); + } + + /** + * Starts a BigQuery Job to load data into the current table from the provided source URIs. + * Returns the started {@link Job} object. + * + * @param format the format of the exported data + * @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from + * which to load the data + * @param options job options + * @throws BigQueryException upon failure + */ + public Job load(FormatOptions format, List sourceUris, BigQuery.JobOption... options) + throws BigQueryException { + LoadJobConfiguration loadConfig = LoadJobConfiguration.of(tableId(), sourceUris, format); + return bigquery.create(JobInfo.of(loadConfig), options); + } + + /** + * Returns the table's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Table + && Objects.equals(toPb(), ((Table) obj).toPb()) + && Objects.equals(options, ((Table) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + static Table fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Table tablePb) { + return new Table(bigquery, new TableInfo.BuilderImpl(tablePb)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java new file mode 100644 index 000000000000..9c6a950ca27f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; + +/** + * WriteChannel implementation to stream data into a BigQuery table. + */ +class TableDataWriteChannel extends BaseWriteChannel { + + TableDataWriteChannel(BigQueryOptions options, + WriteChannelConfiguration writeChannelConfiguration) { + this(options, writeChannelConfiguration, options.rpc().open(writeChannelConfiguration.toPb())); + } + + TableDataWriteChannel(BigQueryOptions options, WriteChannelConfiguration config, + String uploadId) { + super(options, config, uploadId); + } + + @Override + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), BigQueryImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl + extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -787362105981823738L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder + extends BaseWriteChannel.BaseState.Builder { + + private Builder(BigQueryOptions options, WriteChannelConfiguration configuration, + String uploadId) { + super(options, configuration, uploadId); + } + + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(BigQueryOptions options, WriteChannelConfiguration config, + String uploadId) { + return new Builder(options, config, uploadId); + } + + @Override + public WriteChannel restore() { + TableDataWriteChannel channel = new TableDataWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDefinition.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDefinition.java new file mode 100644 index 000000000000..26e7bcc76f55 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDefinition.java @@ -0,0 +1,182 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for a Google BigQuery table definition. + */ +public abstract class TableDefinition implements Serializable { + + private static final long serialVersionUID = -374760330662959529L; + + private final Type type; + private final Schema schema; + + /** + * The table type. + */ + public enum Type { + /** + * A normal BigQuery table. Instances of {@code TableDefinition} for this type are implemented + * by {@link StandardTableDefinition}. + */ + TABLE, + + /** + * A virtual table defined by a SQL query. Instances of {@code TableDefinition} for this type + * are implemented by {@link ViewDefinition}. + * + * @see Views + */ + VIEW, + + /** + * A BigQuery table backed by external data. Instances of {@code TableDefinition} for this type + * are implemented by {@link ExternalTableDefinition}. + * + * @see Federated Data + * Sources + */ + EXTERNAL + } + + /** + * Base builder for table definitions. + * + * @param the table definition class + * @param the table definition builder + */ + public abstract static class Builder> { + + private Type type; + private Schema schema; + + Builder(Type type) { + this.type = type; + } + + Builder(TableDefinition tableDefinition) { + this.type = tableDefinition.type; + this.schema = tableDefinition.schema; + } + + Builder(Table tablePb) { + this.type = Type.valueOf(tablePb.getType()); + if (tablePb.getSchema() != null) { + this.schema(Schema.fromPb(tablePb.getSchema())); + } + } + + @SuppressWarnings("unchecked") + B self() { + return (B) this; + } + + B type(Type type) { + this.type = type; + return self(); + } + + /** + * Sets the table schema. + */ + public B schema(Schema schema) { + this.schema = checkNotNull(schema); + return self(); + } + + /** + * Creates an object. + */ + public abstract T build(); + } + + TableDefinition(Builder builder) { + this.type = builder.type; + this.schema = builder.schema; + } + + /** + * Returns the table's type. If this table is simple table the method returns {@link Type#TABLE}. + * If this table is an external table this method returns {@link Type#EXTERNAL}. If this table is + * a view table this method returns {@link Type#VIEW}. + */ + public Type type() { + return type; + } + + /** + * Returns the table's schema. + */ + public Schema schema() { + return schema; + } + + /** + * Returns a builder for the object. + */ + public abstract Builder toBuilder(); + + MoreObjects.ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this).add("type", type).add("schema", schema); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + final int baseHashCode() { + return Objects.hash(type); + } + + final boolean baseEquals(TableDefinition tableDefinition) { + return Objects.equals(toPb(), tableDefinition.toPb()); + } + + Table toPb() { + Table tablePb = new Table(); + if (schema != null) { + tablePb.setSchema(schema.toPb()); + } + tablePb.setType(type.name()); + return tablePb; + } + + @SuppressWarnings("unchecked") + static T fromPb(Table tablePb) { + switch (Type.valueOf(tablePb.getType())) { + case TABLE: + return (T) StandardTableDefinition.fromPb(tablePb); + case VIEW: + return (T) ViewDefinition.fromPb(tablePb); + case EXTERNAL: + return (T) ExternalTableDefinition.fromPb(tablePb); + default: + // never reached + throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java new file mode 100644 index 000000000000..20ed53cc1a5d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java @@ -0,0 +1,122 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableReference; +import com.google.common.base.Function; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Table identity. + */ +public class TableId implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public TableId apply(TableReference pb) { + return TableId.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableReference apply(TableId tableId) { + return tableId.toPb(); + } + }; + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + private final String table; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + /** + * Returns table's user-defined id. + */ + public String table() { + return table; + } + + private TableId(String project, String dataset, String table) { + this.project = project; + this.dataset = dataset; + this.table = table; + } + + /** + * Creates a table identity given project's, dataset's and table's user-defined ids. + */ + public static TableId of(String project, String dataset, String table) { + return new TableId(checkNotNull(project), checkNotNull(dataset), checkNotNull(table)); + } + + /** + * Creates a table identity given dataset's and table's user-defined ids. + */ + public static TableId of(String dataset, String table) { + return new TableId(null, checkNotNull(dataset), checkNotNull(table)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TableId && Objects.equals(toPb(), ((TableId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset, table); + } + + @Override + public String toString() { + return toPb().toString(); + } + + TableId setProjectId(String projectId) { + return project() != null ? this : TableId.of(projectId, dataset(), table()); + } + + TableReference toPb() { + return new TableReference().setProjectId(project).setDatasetId(dataset).setTableId(table); + } + + static TableId fromPb(TableReference tableRef) { + return new TableId( + tableRef.getProjectId(), + tableRef.getDatasetId(), + tableRef.getTableId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java new file mode 100644 index 000000000000..de331350e978 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java @@ -0,0 +1,390 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * Google BigQuery table information. Use {@link StandardTableDefinition} to create simple BigQuery + * table. Use {@link ViewDefinition} to create a BigQuery view. Use {@link ExternalTableDefinition} + * to create a BigQuery a table backed by external data. + * + * @see Managing Tables + */ +public class TableInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public TableInfo apply(Table pb) { + return TableInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Table apply(TableInfo tableInfo) { + return tableInfo.toPb(); + } + }; + + private static final long serialVersionUID = -7679032506430816205L; + + private final String etag; + private final String id; + private final String selfLink; + private final TableId tableId; + private final String friendlyName; + private final String description; + private final Long creationTime; + private final Long expirationTime; + private final Long lastModifiedTime; + private final TableDefinition definition; + + /** + * A builder for {@code TableInfo} objects. + */ + public abstract static class Builder { + + abstract Builder creationTime(Long creationTime); + + /** + * Sets a user-friendly description for the table. + */ + public abstract Builder description(String description); + + abstract Builder etag(String etag); + + /** + * Sets the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public abstract Builder expirationTime(Long expirationTime); + + /** + * Sets a user-friendly name for the table. + */ + public abstract Builder friendlyName(String friendlyName); + + abstract Builder id(String id); + + abstract Builder lastModifiedTime(Long lastModifiedTime); + + abstract Builder selfLink(String selfLink); + + /** + * Sets the table identity. + */ + public abstract Builder tableId(TableId tableId); + + /** + * Sets the table definition. Use {@link StandardTableDefinition} to create simple BigQuery + * table. Use {@link ViewDefinition} to create a BigQuery view. Use + * {@link ExternalTableDefinition} to create a BigQuery a table backed by external data. + */ + public abstract Builder definition(TableDefinition definition); + + /** + * Creates a {@code TableInfo} object. + */ + public abstract TableInfo build(); + } + + static class BuilderImpl extends Builder { + + private String etag; + private String id; + private String selfLink; + private TableId tableId; + private String friendlyName; + private String description; + private Long creationTime; + private Long expirationTime; + private Long lastModifiedTime; + private TableDefinition definition; + + BuilderImpl() {} + + BuilderImpl(TableInfo tableInfo) { + this.etag = tableInfo.etag; + this.id = tableInfo.id; + this.selfLink = tableInfo.selfLink; + this.tableId = tableInfo.tableId; + this.friendlyName = tableInfo.friendlyName; + this.description = tableInfo.description; + this.creationTime = tableInfo.creationTime; + this.expirationTime = tableInfo.expirationTime; + this.lastModifiedTime = tableInfo.lastModifiedTime; + this.definition = tableInfo.definition; + } + + BuilderImpl(Table tablePb) { + this.tableId = TableId.fromPb(tablePb.getTableReference()); + if (tablePb.getLastModifiedTime() != null) { + this.lastModifiedTime(tablePb.getLastModifiedTime().longValue()); + } + this.description = tablePb.getDescription(); + this.expirationTime = tablePb.getExpirationTime(); + this.friendlyName = tablePb.getFriendlyName(); + this.creationTime = tablePb.getCreationTime(); + this.etag = tablePb.getEtag(); + this.id = tablePb.getId(); + this.selfLink = tablePb.getSelfLink(); + this.definition = TableDefinition.fromPb(tablePb); + } + + @Override + Builder creationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + @Override + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + @Override + Builder etag(String etag) { + this.etag = etag; + return this; + } + + @Override + public Builder expirationTime(Long expirationTime) { + this.expirationTime = firstNonNull(expirationTime, Data.nullOf(Long.class)); + return this; + } + + @Override + public Builder friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return this; + } + + @Override + Builder id(String id) { + this.id = id; + return this; + } + + @Override + Builder lastModifiedTime(Long lastModifiedTime) { + this.lastModifiedTime = lastModifiedTime; + return this; + } + + @Override + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + @Override + public Builder tableId(TableId tableId) { + this.tableId = checkNotNull(tableId); + return this; + } + + @Override + public Builder definition(TableDefinition definition) { + this.definition = checkNotNull(definition); + return this; + } + + @Override + public TableInfo build() { + return new TableInfo(this); + } + } + + TableInfo(BuilderImpl builder) { + this.tableId = checkNotNull(builder.tableId); + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.friendlyName = builder.friendlyName; + this.description = builder.description; + this.creationTime = builder.creationTime; + this.expirationTime = builder.expirationTime; + this.lastModifiedTime = builder.lastModifiedTime; + this.definition = builder.definition; + } + + /** + * Returns the hash of the table resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the table. + */ + public String id() { + return id; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the table identity. + */ + public TableId tableId() { + return tableId; + } + + /** + * Returns a user-friendly name for the table. + */ + public String friendlyName() { + return Data.isNull(friendlyName) ? null : friendlyName; + } + + /** + * Returns a user-friendly description for the table. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the time when this table was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public Long expirationTime() { + return Data.isNull(expirationTime) ? null : expirationTime; + } + + /** + * Returns the time when this table was last modified, in milliseconds since the epoch. + */ + public Long lastModifiedTime() { + return lastModifiedTime; + } + + /** + * Returns the table definition. + */ + @SuppressWarnings("unchecked") + public T definition() { + return (T) definition; + } + + /** + * Returns a builder for the table object. + */ + public Builder toBuilder() { + return new BuilderImpl(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("tableId", tableId) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink) + .add("friendlyName", friendlyName) + .add("description", description) + .add("expirationTime", expirationTime) + .add("creationTime", creationTime) + .add("lastModifiedTime", lastModifiedTime) + .add("definition", definition) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(tableId); + } + + @Override + public boolean equals(Object obj) { + return obj != null + && obj.getClass().equals(TableInfo.class) + && Objects.equals(toPb(), ((TableInfo) obj).toPb()); + } + + /** + * Returns a builder for a {@code TableInfo} object given table identity and definition. Use + * {@link StandardTableDefinition} to create simple BigQuery table. Use {@link ViewDefinition} to + * create a BigQuery view. Use {@link ExternalTableDefinition} to create a BigQuery a table backed + * by external data. + */ + public static Builder builder(TableId tableId, TableDefinition definition) { + return new BuilderImpl().tableId(tableId).definition(definition); + } + + /** + * Returns a {@code TableInfo} object given table identity and definition. Use + * {@link StandardTableDefinition} to create simple BigQuery table. Use {@link ViewDefinition} to + * create a BigQuery view. Use {@link ExternalTableDefinition} to create a BigQuery a table backed + * by external data. + */ + public static TableInfo of(TableId tableId, TableDefinition definition) { + return builder(tableId, definition).build(); + } + + TableInfo setProjectId(String projectId) { + return toBuilder().tableId(tableId().setProjectId(projectId)).build(); + } + + Table toPb() { + Table tablePb = definition.toPb(); + tablePb.setTableReference(tableId.toPb()); + if (lastModifiedTime != null) { + tablePb.setLastModifiedTime(BigInteger.valueOf(lastModifiedTime)); + } + tablePb.setCreationTime(creationTime); + tablePb.setDescription(description); + tablePb.setEtag(etag); + tablePb.setExpirationTime(expirationTime); + tablePb.setFriendlyName(friendlyName); + tablePb.setId(id); + tablePb.setSelfLink(selfLink); + return tablePb; + } + + static TableInfo fromPb(Table tablePb) { + return new BuilderImpl(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java new file mode 100644 index 000000000000..2135e0ddc941 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java @@ -0,0 +1,151 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.UserDefinedFunctionResource; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery User Defined Function. BigQuery supports user-defined functions (UDFs) written in + * JavaScript. A UDF is similar to the "Map" function in a MapReduce: it takes a single row as input + * and produces zero or more rows as output. The output can potentially have a different schema than + * the input. + * + * @see User-Defined Functions + * + */ +public abstract class UserDefinedFunction implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunction apply(UserDefinedFunctionResource userDefinedFunctionPb) { + return UserDefinedFunction.fromPb(userDefinedFunctionPb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunctionResource apply(UserDefinedFunction userDefinedFunction) { + return userDefinedFunction.toPb(); + } + }; + + private static final long serialVersionUID = 8704260561787440287L; + + /** + * Type of user-defined function. User defined functions can be provided inline as code blobs + * ({@link #INLINE}) or as a Google Cloud Storage URI ({@link #FROM_URI}). + */ + public enum Type { + INLINE, + FROM_URI + } + + private final Type type; + private final String content; + + UserDefinedFunction(Type type, String content) { + this.type = type; + this.content = content; + } + + public Type type() { + return type; + } + + /** + * If {@link #type()} is {@link Type#INLINE} this method returns a code blob. If {@link #type()} + * is {@link Type#FROM_URI} the method returns a Google Cloud Storage URI (e.g. gs://bucket/path). + */ + public String content() { + return content; + } + + /** + * A Google Cloud BigQuery user-defined function, as a code blob. + */ + static final class InlineFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 1083672109192091686L; + + InlineFunction(String inlineCode) { + super(Type.INLINE, inlineCode); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("inlineCode", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setInlineCode(content()); + } + } + + /** + * A Google Cloud BigQuery user-defined function, as an URI to Google Cloud Storage. + */ + static final class UriFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 4660331691852223839L; + + UriFunction(String functionUri) { + super(Type.FROM_URI, functionUri); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("functionUri", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setResourceUri(content()); + } + } + + @Override + public int hashCode() { + return Objects.hash(type, content); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof UserDefinedFunction + && Objects.equals(toPb(), ((UserDefinedFunction) obj).toPb()); + } + + public abstract com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb(); + + /** + * Creates a Google Cloud BigQuery user-defined function given a code blob. + */ + public static UserDefinedFunction inline(String functionDefinition) { + return new InlineFunction(functionDefinition); + } + + /** + * Creates a Google Cloud BigQuery user-defined function given a Google Cloud Storage URI (e.g. + * gs://bucket/path). + */ + public static UserDefinedFunction fromUri(String functionDefinition) { + return new UriFunction(functionDefinition); + } + + static UserDefinedFunction fromPb( + com.google.api.services.bigquery.model.UserDefinedFunctionResource pb) { + if (pb.getInlineCode() != null) { + return new InlineFunction(pb.getInlineCode()); + } + if (pb.getResourceUri() != null) { + return new UriFunction(pb.getResourceUri()); + } + throw new IllegalArgumentException("Invalid user-defined function"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewDefinition.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewDefinition.java new file mode 100644 index 000000000000..796dd411b4a1 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewDefinition.java @@ -0,0 +1,233 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery view table definition. BigQuery's views are logical views, not materialized + * views, which means that the query that defines the view is re-executed every time the view is + * queried. + * + * @see Views + */ +public final class ViewDefinition extends TableDefinition { + + private static final long serialVersionUID = -8789311196910794545L; + + private final String query; + private final List userDefinedFunctions; + + public static final class Builder extends TableDefinition.Builder { + + private String query; + private List userDefinedFunctions; + + private Builder() { + super(Type.VIEW); + } + + private Builder(ViewDefinition viewDefinition) { + super(viewDefinition); + this.query = viewDefinition.query; + this.userDefinedFunctions = viewDefinition.userDefinedFunctions; + } + + private Builder(Table tablePb) { + super(tablePb); + com.google.api.services.bigquery.model.ViewDefinition viewPb = tablePb.getView(); + if (viewPb != null) { + this.query = viewPb.getQuery(); + if (viewPb.getUserDefinedFunctionResources() != null) { + this.userDefinedFunctions = Lists.transform(viewPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + } + } + + /** + * Sets the query used to create the view. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(checkNotNull(userDefinedFunctions)); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(UserDefinedFunction... userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(userDefinedFunctions); + return self(); + } + + /** + * Creates a {@code ViewDefinition} object. + */ + @Override + public ViewDefinition build() { + return new ViewDefinition(this); + } + } + + private ViewDefinition(Builder builder) { + super(builder); + this.query = builder.query; + this.userDefinedFunctions = builder.userDefinedFunctions; + } + + /** + * Returns the query used to create the view. + */ + public String query() { + return query; + } + + /** + * Returns user defined functions that can be used by {@link #query()}. Returns {@code null} if + * not set. + * + * @see User-Defined Functions + * + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns a builder for the {@code ViewInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("userDefinedFunctions", userDefinedFunctions); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ViewDefinition && baseEquals((ViewDefinition) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), query, userDefinedFunctions); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + com.google.api.services.bigquery.model.ViewDefinition viewDefinition = + new com.google.api.services.bigquery.model.ViewDefinition().setQuery(query); + if (userDefinedFunctions != null) { + viewDefinition.setUserDefinedFunctionResources(Lists.transform(userDefinedFunctions, + UserDefinedFunction.TO_PB_FUNCTION)); + } + tablePb.setView(viewDefinition); + return tablePb; + } + + /** + * Returns a builder for a BigQuery view definition. + * + * @param query the query used to generate the view + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Returns a builder for a BigQuery view definition. + * + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(String query, List functions) { + return new Builder().type(Type.VIEW).userDefinedFunctions(functions).query(query); + } + + /** + * Returns a builder for a BigQuery view definition. + * + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(String query, UserDefinedFunction... functions) { + return new Builder().type(Type.VIEW).userDefinedFunctions(functions).query(query); + } + + /** + * Creates a BigQuery view definition given the query used to generate the table. + * + * @param query the query used to generate the table + */ + public static ViewDefinition of(String query) { + return builder(query).build(); + } + + /** + * Creates a BigQuery view definition given a query and some user-defined functions. + * + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewDefinition of(String query, List functions) { + return builder(query, functions).build(); + } + + /** + * Creates a BigQuery view definition given a query and some user-defined functions. + * + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewDefinition of(String query, UserDefinedFunction... functions) { + return builder(query, functions).build(); + } + + @SuppressWarnings("unchecked") + static ViewDefinition fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java new file mode 100644 index 000000000000..6cc44ce7d5d6 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java @@ -0,0 +1,321 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Configuration for a load operation. A load configuration can be used to load data + * into a table with a {@link com.google.gcloud.WriteChannel} + * ({@link BigQuery#writer(WriteChannelConfiguration)}). + */ +public class WriteChannelConfiguration implements LoadConfiguration, Serializable { + + private static final long serialVersionUID = 470267591917413578L; + + private final TableId destinationTable; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder implements LoadConfiguration.Builder { + + private TableId destinationTable; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() {} + + private Builder(WriteChannelConfiguration writeChannelConfiguration) { + this.destinationTable = writeChannelConfiguration.destinationTable; + this.createDisposition = writeChannelConfiguration.createDisposition; + this.writeDisposition = writeChannelConfiguration.writeDisposition; + this.formatOptions = writeChannelConfiguration.formatOptions; + this.maxBadRecords = writeChannelConfiguration.maxBadRecords; + this.schema = writeChannelConfiguration.schema; + this.ignoreUnknownValues = writeChannelConfiguration.ignoreUnknownValues; + this.projectionFields = writeChannelConfiguration.projectionFields; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + JobConfigurationLoad loadConfigurationPb = configurationPb.getLoad(); + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + } + + @Override + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + @Override + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + @Override + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + @Override + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + @Override + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + @Override + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + @Override + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + @Override + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + @Override + public WriteChannelConfiguration build() { + return new WriteChannelConfiguration(this); + } + } + + protected WriteChannelConfiguration(Builder builder) { + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + @Override + public TableId destinationTable() { + return destinationTable; + } + + @Override + public CreateDisposition createDisposition() { + return this.createDisposition; + } + + @Override + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + @Override + public Integer maxBadRecords() { + return maxBadRecords; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + @Override + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + @Override + public List projectionFields() { + return projectionFields; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + MoreObjects.ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof WriteChannelConfiguration + && Objects.equals(toPb(), ((WriteChannelConfiguration) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(destinationTable, createDisposition, writeDisposition, formatOptions, + maxBadRecords, schema, ignoreUnknownValues, projectionFields); + } + + WriteChannelConfiguration setProjectId(String projectId) { + return toBuilder().destinationTable(destinationTable().setProjectId(projectId)).build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + return new com.google.api.services.bigquery.model.JobConfiguration() + .setLoad(loadConfigurationPb); + } + + static WriteChannelConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + return new Builder(configurationPb).build(); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table. + */ + public static Builder builder(TableId destinationTable) { + return new Builder().destinationTable(destinationTable); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table and format. + */ + public static Builder builder(TableId destinationTable, FormatOptions format) { + return builder(destinationTable).formatOptions(format); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table. + */ + public static WriteChannelConfiguration of(TableId destinationTable) { + return builder(destinationTable).build(); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table and format. + */ + public static WriteChannelConfiguration of(TableId destinationTable, FormatOptions format) { + return builder(destinationTable).formatOptions(format).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java new file mode 100644 index 000000000000..db5e956e0a12 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java @@ -0,0 +1,47 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to Google Cloud BigQuery. + * + *

A simple usage example showing how to create a table if it does not exist and load data into + * it. For the complete source code see + * + * CreateTableAndLoadData.java. + *

 {@code
+ * BigQuery bigquery = BigQueryOptions.defaultInstance().service();
+ * TableId tableId = TableId.of("dataset", "table");
+ * Table table = bigquery.getTable(tableId);
+ * if (table == null) {
+ *   System.out.println("Creating table " + tableId);
+ *   Field integerField = Field.of("fieldName", Field.Type.integer());
+ *   Schema schema = Schema.of(integerField);
+ *   table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema)));
+ * }
+ * System.out.println("Loading data into table " + tableId);
+ * Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path");
+ * while (!loadJob.isDone()) {
+ *   Thread.sleep(1000L);
+ * }
+ * if (loadJob.status().error() != null) {
+ *   System.out.println("Job completed with errors");
+ * } else {
+ *   System.out.println("Job succeeded");
+ * }}
+ * + * @see Google Cloud BigQuery + */ +package com.google.gcloud.bigquery; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java new file mode 100644 index 000000000000..d0b740e9e390 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpc.java @@ -0,0 +1,254 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.spi; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.gcloud.bigquery.BigQueryException; + +import java.util.Map; + +public interface BigQueryRpc { + + // These options are part of the Google Cloud BigQuery query parameters + enum Option { + FIELDS("fields"), + DELETE_CONTENTS("deleteContents"), + ALL_DATASETS("all"), + ALL_USERS("allUsers"), + MAX_RESULTS("maxResults"), + PAGE_TOKEN("pageToken"), + START_INDEX("startIndex"), + STATE_FILTER("stateFilter"), + TIMEOUT("timeoutMs"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Long getLong(Map options) { + return get(options); + } + + Boolean getBoolean(Map options) { + return get(options); + } + } + + class Tuple { + + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Dataset getDataset(String datasetId, Map options); + + /** + * Lists the project's datasets. Partial information is returned on a dataset (datasetReference, + * friendlyName and id). To get full information use {@link #getDataset(String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listDatasets(Map options); + + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + Dataset create(Dataset dataset, Map options); + + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + Table create(Table table, Map options); + + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + Job create(Job job, Map options); + + /** + * Delete the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteDataset(String datasetId, Map options); + + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + Dataset patch(Dataset dataset, Map options); + + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + Table patch(Table table, Map options); + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Table getTable(String datasetId, String tableId, Map options); + + /** + * Lists the dataset's tables. Partial information is returned on a table (tableReference, + * friendlyName, id and type). To get full information use {@link #getTable(String, String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listTables(String dataset, Map options); + + /** + * Delete the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteTable(String datasetId, String tableId); + + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ + TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request); + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Tuple> listTableData(String datasetId, String tableId, + Map options); + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getJob(String jobId, Map options); + + /** + * Lists the project's jobs. + * + * @throws BigQueryException upon failure + */ + Tuple> listJobs(Map options); + + /** + * Sends a job cancel request. This call will return immediately, and the client will need to poll + * for the job status to see if the cancel completed successfully. + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId); + + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + GetQueryResultsResponse getQueryResults(String jobId, Map options); + + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request); + + /** + * Opens a resumable upload session to load data into a BigQuery table and returns an upload URI. + * + * @param configuration load configuration + * @throws BigQueryException upon failure + */ + String open(JobConfiguration configuration); + + /** + * Uploads the provided data to the resumable upload session at the specified position. + * + * @param uploadId the resumable upload session URI + * @param toWrite a byte array of data to upload + * @param toWriteOffset offset in the {@code toWrite} param to start writing from + * @param destOffset offset in the destination where to upload data to + * @param length the number of bytes to upload + * @param last {@code true} indicates that the last chunk is being uploaded + * @throws BigQueryException upon failure + */ + void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last); +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java new file mode 100644 index 000000000000..1323ec0624f4 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/BigQueryRpcFactory.java @@ -0,0 +1,27 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.spi; + +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.spi.ServiceRpcFactory; + +/** + * An interface for BigQuery RPC factory. + * Implementation will be loaded via {@link java.util.ServiceLoader}. + */ +public interface BigQueryRpcFactory extends ServiceRpcFactory { +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java new file mode 100644 index 000000000000..71712bda7806 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/spi/DefaultBigQueryRpc.java @@ -0,0 +1,468 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.gcloud.bigquery.spi; + +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.ALL_DATASETS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.ALL_USERS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.DELETE_CONTENTS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.FIELDS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.MAX_RESULTS; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.START_INDEX; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.STATE_FILTER; +import static com.google.gcloud.bigquery.spi.BigQueryRpc.Option.TIMEOUT; +import static java.net.HttpURLConnection.HTTP_CREATED; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static java.net.HttpURLConnection.HTTP_OK; + +import com.google.api.client.http.ByteArrayContent; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestFactory; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpResponseException; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.json.JsonHttpContent; +import com.google.api.client.json.JsonFactory; +import com.google.api.client.json.jackson.JacksonFactory; +import com.google.api.services.bigquery.Bigquery; +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.DatasetList; +import com.google.api.services.bigquery.model.DatasetReference; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobList; +import com.google.api.services.bigquery.model.JobStatus; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableList; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; + +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +public class DefaultBigQueryRpc implements BigQueryRpc { + + public static final String DEFAULT_PROJECTION = "full"; + private static final String BASE_RESUMABLE_URI = + "https://www.googleapis.com/upload/bigquery/v2/projects/"; + // see: https://cloud.google.com/bigquery/loading-data-post-request#resume-upload + private static final int HTTP_RESUME_INCOMPLETE = 308; + private final BigQueryOptions options; + private final Bigquery bigquery; + + public DefaultBigQueryRpc(BigQueryOptions options) { + HttpTransport transport = options.httpTransportFactory().create(); + HttpRequestInitializer initializer = options.httpRequestInitializer(); + this.options = options; + bigquery = new Bigquery.Builder(transport, new JacksonFactory(), initializer) + .setRootUrl(options.host()) + .setApplicationName(options.applicationName()) + .build(); + } + + private static BigQueryException translate(IOException exception) { + return new BigQueryException(exception); + } + + @Override + public Dataset getDataset(String datasetId, Map options) { + try { + return bigquery.datasets() + .get(this.options.projectId(), datasetId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listDatasets(Map options) { + try { + DatasetList datasetsList = bigquery.datasets() + .list(this.options.projectId()) + .setAll(ALL_DATASETS.getBoolean(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable datasets = datasetsList.getDatasets(); + return Tuple.of(datasetsList.getNextPageToken(), + Iterables.transform(datasets != null ? datasets + : ImmutableList.of(), + new Function() { + @Override + public Dataset apply(DatasetList.Datasets datasetPb) { + return new Dataset() + .setDatasetReference(datasetPb.getDatasetReference()) + .setFriendlyName(datasetPb.getFriendlyName()) + .setId(datasetPb.getId()) + .setKind(datasetPb.getKind()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Dataset create(Dataset dataset, Map options) { + try { + return bigquery.datasets().insert(this.options.projectId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table create(Table table, Map options) { + try { + // unset the type, as it is output only + table.setType(null); + return bigquery.tables() + .insert(this.options.projectId(), table.getTableReference().getDatasetId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job create(Job job, Map options) { + try { + return bigquery.jobs() + .insert(this.options.projectId(), job) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteDataset(String datasetId, Map options) { + try { + bigquery.datasets().delete(this.options.projectId(), datasetId) + .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) + .execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public Dataset patch(Dataset dataset, Map options) { + try { + DatasetReference reference = dataset.getDatasetReference(); + return bigquery.datasets() + .patch(this.options.projectId(), reference.getDatasetId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table patch(Table table, Map options) { + try { + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + return bigquery.tables() + .patch(this.options.projectId(), reference.getDatasetId(), reference.getTableId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table getTable(String datasetId, String tableId, Map options) { + try { + return bigquery.tables() + .get(this.options.projectId(), datasetId, tableId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listTables(String datasetId, Map options) { + try { + TableList tableList = bigquery.tables() + .list(this.options.projectId(), datasetId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable tables = tableList.getTables(); + return Tuple.of(tableList.getNextPageToken(), + Iterables.transform(tables != null ? tables : ImmutableList.of(), + new Function() { + @Override + public Table apply(TableList.Tables tablePb) { + return new Table() + .setFriendlyName(tablePb.getFriendlyName()) + .setId(tablePb.getId()) + .setKind(tablePb.getKind()) + .setTableReference(tablePb.getTableReference()) + .setType(tablePb.getType()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteTable(String datasetId, String tableId) { + try { + bigquery.tables().delete(this.options.projectId(), datasetId, tableId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request) { + try { + return bigquery.tabledata() + .insertAll(this.options.projectId(), datasetId, tableId, request) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Tuple> listTableData(String datasetId, String tableId, + Map options) { + try { + TableDataList tableDataList = bigquery.tabledata() + .list(this.options.projectId(), datasetId, tableId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .execute(); + return Tuple.>of(tableDataList.getPageToken(), + tableDataList.getRows()); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job getJob(String jobId, Map options) { + try { + return bigquery.jobs() + .get(this.options.projectId(), jobId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listJobs(Map options) { + try { + JobList jobsList = bigquery.jobs() + .list(this.options.projectId()) + .setAllUsers(ALL_USERS.getBoolean(options)) + .setFields(FIELDS.getString(options)) + .setStateFilter(STATE_FILTER.>get(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setProjection(DEFAULT_PROJECTION) + .execute(); + Iterable jobs = jobsList.getJobs(); + return Tuple.of(jobsList.getNextPageToken(), + Iterables.transform(jobs != null ? jobs : ImmutableList.of(), + new Function() { + @Override + public Job apply(JobList.Jobs jobPb) { + JobStatus statusPb = jobPb.getStatus() != null + ? jobPb.getStatus() : new JobStatus(); + if (statusPb.getState() == null) { + statusPb.setState(jobPb.getState()); + } + if (statusPb.getErrorResult() == null) { + statusPb.setErrorResult(jobPb.getErrorResult()); + } + return new Job() + .setConfiguration(jobPb.getConfiguration()) + .setId(jobPb.getId()) + .setJobReference(jobPb.getJobReference()) + .setKind(jobPb.getKind()) + .setStatistics(jobPb.getStatistics()) + .setStatus(statusPb) + .setUserEmail(jobPb.getUserEmail()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean cancel(String jobId) { + try { + bigquery.jobs().cancel(this.options.projectId(), jobId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public GetQueryResultsResponse getQueryResults(String jobId, Map options) { + try { + return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .setTimeoutMs(TIMEOUT.getLong(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public QueryResponse query(QueryRequest request) { + try { + return bigquery.jobs().query(this.options.projectId(), request).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public String open(JobConfiguration configuration) { + try { + Job loadJob = new Job().setConfiguration(configuration); + StringBuilder builder = new StringBuilder() + .append(BASE_RESUMABLE_URI) + .append(options.projectId()) + .append("/jobs"); + GenericUrl url = new GenericUrl(builder.toString()); + url.set("uploadType", "resumable"); + JsonFactory jsonFactory = bigquery.getJsonFactory(); + HttpRequestFactory requestFactory = bigquery.getRequestFactory(); + HttpRequest httpRequest = + requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); + httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); + HttpResponse response = httpRequest.execute(); + return response.getHeaders().getLocation(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) { + try { + GenericUrl url = new GenericUrl(uploadId); + HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, + new ByteArrayContent(null, toWrite, toWriteOffset, length)); + long limit = destOffset + length; + StringBuilder range = new StringBuilder("bytes "); + range.append(destOffset).append('-').append(limit - 1).append('/'); + if (last) { + range.append(limit); + } else { + range.append('*'); + } + httpRequest.getHeaders().setContentRange(range.toString()); + int code; + String message; + IOException exception = null; + try { + HttpResponse response = httpRequest.execute(); + code = response.getStatusCode(); + message = response.getStatusMessage(); + } catch (HttpResponseException ex) { + exception = ex; + code = ex.getStatusCode(); + message = ex.getStatusMessage(); + } + if (!last && code != HTTP_RESUME_INCOMPLETE + || last && !(code == HTTP_OK || code == HTTP_CREATED)) { + if (exception != null) { + throw exception; + } + throw new BigQueryException(code, message); + } + } catch (IOException ex) { + throw translate(ex); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java new file mode 100644 index 000000000000..491e822d683c --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java @@ -0,0 +1,140 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.testing; + +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RetryParams; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.io.InputStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained + * via the {@link #options()} method. Returned options have custom + * {@link BigQueryOptions#retryParams()}: {@link RetryParams#retryMaxAttempts()} is {@code 10}, + * {@link RetryParams#retryMinAttempts()} is {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is + * {@code 30000}, {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and + * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}. + * {@link BigQueryOptions#connectTimeout()} and {@link BigQueryOptions#readTimeout()} are both set + * to {@code 60000}. + */ +public class RemoteBigQueryHelper { + + private static final Logger log = Logger.getLogger(RemoteBigQueryHelper.class.getName()); + private static final String DATASET_NAME_PREFIX = "gcloud_test_dataset_temp_"; + private final BigQueryOptions options; + + private RemoteBigQueryHelper(BigQueryOptions options) { + this.options = options; + } + + /** + * Returns a {@link BigQueryOptions} object to be used for testing. + */ + public BigQueryOptions options() { + return options; + } + + /** + * Deletes a dataset, even if non-empty. + * + * @param bigquery the BigQuery service to be used to issue the delete request + * @param dataset the dataset to be deleted + * @return {@code true} if deletion succeeded, {@code false} if the dataset was not found + * @throws BigQueryException upon failure + */ + public static boolean forceDelete(BigQuery bigquery, String dataset) { + return bigquery.delete(dataset, BigQuery.DatasetDeleteOption.deleteContents()); + } + + /** + * Returns a dataset name generated using a random UUID. + */ + public static String generateDatasetName() { + return DATASET_NAME_PREFIX + UUID.randomUUID().toString().replace('-', '_'); + } + + /** + * Creates a {@code RemoteBigQueryHelper} object for the given project id and JSON key input + * stream. + * + * @param projectId id of the project to be used for running the tests + * @param keyStream input stream for a JSON key + * @return A {@code RemoteBigQueryHelper} object for the provided options + * @throws BigQueryHelperException if {@code keyStream} is not a valid JSON key stream + */ + public static RemoteBigQueryHelper create(String projectId, InputStream keyStream) + throws BigQueryHelperException { + try { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .authCredentials(AuthCredentials.createForJson(keyStream)) + .projectId(projectId) + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } catch (IOException ex) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, ex.getMessage()); + } + throw BigQueryHelperException.translate(ex); + } + } + + /** + * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication + * credentials. + */ + public static RemoteBigQueryHelper create() { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } + + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); + } + + public static class BigQueryHelperException extends RuntimeException { + + private static final long serialVersionUID = 3984993496060055562L; + + public BigQueryHelperException(String message, Throwable cause) { + super(message, cause); + } + + public static BigQueryHelperException translate(Exception ex) { + return new BigQueryHelperException(ex.getMessage(), ex); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java new file mode 100644 index 000000000000..9ca792ecd77d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A testing helper for Google BigQuery. + * + *

A simple usage example: + * + *

Before the test: + *

 {@code
+ * RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ * BigQuery bigquery = bigqueryHelper.options().service();
+ * String dataset = RemoteBigQueryHelper.generateDatasetName();
+ * bigquery.create(DatasetInfo.builder(dataset).build());
+ * } 
+ * + *

After the test: + *

 {@code
+ * RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ * }
+ * + * @see + * gcloud-java tools for testing + */ +package com.google.gcloud.bigquery.testing; diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java new file mode 100644 index 000000000000..438526b95b6e --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.gcloud.bigquery.Acl.Domain; +import com.google.gcloud.bigquery.Acl.Entity; +import com.google.gcloud.bigquery.Acl.Entity.Type; +import com.google.gcloud.bigquery.Acl.Group; +import com.google.gcloud.bigquery.Acl.Role; +import com.google.gcloud.bigquery.Acl.User; +import com.google.gcloud.bigquery.Acl.View; + +import org.junit.Test; + +public class AclTest { + + @Test + public void testDomainEntity() { + Domain entity = new Domain("d1"); + assertEquals("d1", entity.domain()); + assertEquals(Type.DOMAIN, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testGroupEntity() { + Group entity = new Group("g1"); + assertEquals("g1", entity.identifier()); + assertEquals(Type.GROUP, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testSpecialGroupEntity() { + Group entity = Group.ofAllAuthenticatedUsers(); + assertEquals("allAuthenticatedUsers", entity.identifier()); + entity = Group.ofProjectWriters(); + assertEquals("projectWriters", entity.identifier()); + entity = Group.ofProjectReaders(); + assertEquals("projectReaders", entity.identifier()); + entity = Group.ofProjectOwners(); + assertEquals("projectOwners", entity.identifier()); + } + + @Test + public void testUserEntity() { + User entity = new User("u1"); + assertEquals("u1", entity.email()); + assertEquals(Type.USER, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testViewEntity() { + TableId viewId = TableId.of("project", "dataset", "view"); + View entity = new View(viewId); + assertEquals(viewId, entity.id()); + assertEquals(Type.VIEW, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testOf() { + Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER); + assertEquals(Group.ofAllAuthenticatedUsers(), acl.entity()); + assertEquals(Role.READER, acl.role()); + Dataset.Access pb = acl.toPb(); + assertEquals(acl, Acl.fromPb(pb)); + View view = new View(TableId.of("project", "dataset", "view")); + acl = Acl.of(view); + assertEquals(view, acl.entity()); + assertEquals(null, acl.role()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java new file mode 100644 index 000000000000..c8de039e233f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java @@ -0,0 +1,45 @@ +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class BigQueryErrorTest { + + private static final String REASON = "reason"; + private static final String LOCATION = "location"; + private static final String DEBUG_INFO = "debugInfo"; + private static final String MESSAGE = "message"; + private static final BigQueryError ERROR = + new BigQueryError(REASON, LOCATION, MESSAGE, DEBUG_INFO); + private static final BigQueryError ERROR_INCOMPLETE = + new BigQueryError(REASON, LOCATION, MESSAGE); + + @Test + public void testConstructor() { + assertEquals(REASON, ERROR.reason()); + assertEquals(LOCATION, ERROR.location()); + assertEquals(DEBUG_INFO, ERROR.debugInfo()); + assertEquals(MESSAGE, ERROR.message()); + assertEquals(REASON, ERROR_INCOMPLETE.reason()); + assertEquals(LOCATION, ERROR_INCOMPLETE.location()); + assertEquals(null, ERROR_INCOMPLETE.debugInfo()); + assertEquals(MESSAGE, ERROR_INCOMPLETE.message()); + } + + @Test + public void testToAndFromPb() { + compareBigQueryError(ERROR, BigQueryError.fromPb(ERROR.toPb())); + compareBigQueryError(ERROR_INCOMPLETE, BigQueryError.fromPb(ERROR_INCOMPLETE.toPb())); + } + + private void compareBigQueryError(BigQueryError expected, BigQueryError value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.reason(), value.reason()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.debugInfo(), value.debugInfo()); + assertEquals(expected.message(), value.message()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java new file mode 100644 index 000000000000..66e5289424e2 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java @@ -0,0 +1,115 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class BigQueryExceptionTest { + + @Test + public void testBigqueryException() { + BigQueryException exception = new BigQueryException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(502, "message"); + assertEquals(502, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(504, "message"); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(400, "message"); + assertEquals(400, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + BigQueryError error = new BigQueryError("reason", null, null); + exception = new BigQueryException(504, "message", error); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertEquals("reason", exception.reason()); + assertEquals(error, exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new BigQueryException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BigQueryException cause = new BigQueryException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BigQueryException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java new file mode 100644 index 000000000000..b398f238386a --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -0,0 +1,1080 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.eq; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.gcloud.Page; +import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpc.Tuple; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +public class BigQueryImplTest { + + private static final String PROJECT = "project"; + private static final String DATASET = "dataset"; + private static final String TABLE = "table"; + private static final String JOB = "job"; + private static final String OTHER_TABLE = "otherTable"; + private static final String OTHER_DATASET = "otherDataset"; + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")), Acl.Role.WRITER)); + private static final List ACCESS_RULES_WITH_PROJECT = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of(PROJECT, "dataset", "table")))); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET) + .acl(ACCESS_RULES) + .description("description") + .build(); + private static final DatasetInfo DATASET_INFO_WITH_PROJECT = DatasetInfo.builder(PROJECT, DATASET) + .acl(ACCESS_RULES_WITH_PROJECT) + .description("description") + .build(); + private static final DatasetInfo OTHER_DATASET_INFO = DatasetInfo.builder(PROJECT, OTHER_DATASET) + .acl(ACCESS_RULES) + .description("other description") + .build(); + private static final TableId TABLE_ID = TableId.of(DATASET, TABLE); + private static final TableId OTHER_TABLE_ID = TableId.of(PROJECT, DATASET, OTHER_TABLE); + private static final TableId TABLE_ID_WITH_PROJECT = TableId.of(PROJECT, DATASET, TABLE); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final StandardTableDefinition TABLE_DEFINITION = + StandardTableDefinition.of(TABLE_SCHEMA); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_DEFINITION); + private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_DEFINITION); + private static final TableInfo TABLE_INFO_WITH_PROJECT = + TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_DEFINITION); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION = + LoadJobConfiguration.of(TABLE_ID, "URI"); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION_WITH_PROJECT = + LoadJobConfiguration.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final JobInfo LOAD_JOB = + JobInfo.of(LOAD_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_LOAD_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), LOAD_JOB_CONFIGURATION_WITH_PROJECT); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID, ImmutableList.of(TABLE_ID, TABLE_ID)); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_WITH_PROJECT = + CopyJobConfiguration.of(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, + TABLE_ID_WITH_PROJECT)); + private static final JobInfo COPY_JOB = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_COPY_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), COPY_JOB_CONFIGURATION_WITH_PROJECT); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.builder("SQL") + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(TABLE_ID) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_WITH_PROJECT = + QueryJobConfiguration.builder("SQL") + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .destinationTable(TABLE_ID_WITH_PROJECT) + .build(); + private static final JobInfo QUERY_JOB = JobInfo.of(QUERY_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_QUERY_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), QUERY_JOB_CONFIGURATION_WITH_PROJECT); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION = + ExtractJobConfiguration.of(TABLE_ID, "URI"); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION_WITH_PROJECT = + ExtractJobConfiguration.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final JobInfo EXTRACT_JOB = JobInfo.of(EXTRACT_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_EXTRACT_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), EXTRACT_JOB_CONFIGURATION_WITH_PROJECT); + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final TableCell INTEGER_FIELD = new TableCell().setV("1"); + private static final TableRow TABLE_ROW = + new TableRow().setF(ImmutableList.of(BOOLEAN_FIELD, INTEGER_FIELD)); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("SQL") + .pageSize(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(DATASET)) + .build(); + private static final QueryRequest QUERY_REQUEST_WITH_PROJECT = QueryRequest.builder("SQL") + .pageSize(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .build(); + + // Empty BigQueryRpc options + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + + // Dataset options + private static final BigQuery.DatasetOption DATASET_OPTION_FIELDS = + BigQuery.DatasetOption.fields(BigQuery.DatasetField.ACCESS, BigQuery.DatasetField.ETAG); + + // Dataset list options + private static final BigQuery.DatasetListOption DATASET_LIST_ALL = + BigQuery.DatasetListOption.all(); + private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_TOKEN = + BigQuery.DatasetListOption.startPageToken("cursor"); + private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_SIZE = + BigQuery.DatasetListOption.pageSize(42L); + private static final Map DATASET_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_DATASETS, true, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Dataset delete options + private static final BigQuery.DatasetDeleteOption DATASET_DELETE_CONTENTS = + BigQuery.DatasetDeleteOption.deleteContents(); + private static final Map DATASET_DELETE_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.DELETE_CONTENTS, true); + + // Table options + private static final BigQuery.TableOption TABLE_OPTION_FIELDS = + BigQuery.TableOption.fields(BigQuery.TableField.SCHEMA, BigQuery.TableField.ETAG); + + // Table list options + private static final BigQuery.TableListOption TABLE_LIST_PAGE_SIZE = + BigQuery.TableListOption.pageSize(42L); + private static final BigQuery.TableListOption TABLE_LIST_PAGE_TOKEN = + BigQuery.TableListOption.startPageToken("cursor"); + private static final Map TABLE_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor"); + + // TableData list options + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_SIZE = + BigQuery.TableDataListOption.pageSize(42L); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_TOKEN = + BigQuery.TableDataListOption.startPageToken("cursor"); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_START_INDEX = + BigQuery.TableDataListOption.startIndex(0L); + private static final Map TABLE_DATA_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.START_INDEX, 0L); + + // Job options + private static final BigQuery.JobOption JOB_OPTION_FIELDS = + BigQuery.JobOption.fields(BigQuery.JobField.USER_EMAIL); + + // Job list options + private static final BigQuery.JobListOption JOB_LIST_OPTION_FIELD = + BigQuery.JobListOption.fields(BigQuery.JobField.STATISTICS); + private static final BigQuery.JobListOption JOB_LIST_ALL_USERS = + BigQuery.JobListOption.allUsers(); + private static final BigQuery.JobListOption JOB_LIST_STATE_FILTER = + BigQuery.JobListOption.stateFilter(JobStatus.State.DONE, JobStatus.State.PENDING); + private static final BigQuery.JobListOption JOB_LIST_PAGE_TOKEN = + BigQuery.JobListOption.startPageToken("cursor"); + private static final BigQuery.JobListOption JOB_LIST_PAGE_SIZE = + BigQuery.JobListOption.pageSize(42L); + private static final Map JOB_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_USERS, true, + BigQueryRpc.Option.STATE_FILTER, ImmutableList.of("done", "pending"), + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Query Results options + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_TIME = + BigQuery.QueryResultsOption.maxWaitTime(42L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_INDEX = + BigQuery.QueryResultsOption.startIndex(1024L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_TOKEN = + BigQuery.QueryResultsOption.startPageToken("cursor"); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_SIZE = + BigQuery.QueryResultsOption.pageSize(0L); + private static final Map QUERY_RESULTS_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.TIMEOUT, 42L, + BigQueryRpc.Option.START_INDEX, 1024L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 0L); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private BigQuery bigquery; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + rpcFactoryMock = EasyMock.createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = EasyMock.createMock(BigQueryRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + EasyMock.replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId(PROJECT) + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); + } + + @After + public void tearDown() { + EasyMock.verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testGetOptions() { + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertSame(options, bigquery.options()); + } + + @Test + public void testCreateDataset() { + EasyMock.expect(bigqueryRpcMock.create(DATASET_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.create(DATASET_INFO); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testCreateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testGetDataset() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.getDataset(DATASET); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testGetDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.getDataset(DatasetId.of(PROJECT, DATASET)); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testGetDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getDataset(eq(DATASET), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testListDatasets() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList datasetList = ImmutableList.of( + new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page page = bigquery.listDatasets(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testListEmptyDatasets() { + ImmutableList datasets = ImmutableList.of(); + Tuple> result = + Tuple.>of(null, datasets); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(); + assertNull(page.nextPageCursor()); + assertArrayEquals(ImmutableList.of().toArray(), + Iterables.toArray(page.values(), Dataset.class)); + } + + @Test + public void testListDatasetsWithOptions() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList datasetList = ImmutableList.of( + new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(DATASET_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, + DATASET_LIST_PAGE_SIZE); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testDeleteDataset() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET)); + } + + @Test + public void testDeleteDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DatasetId.of(PROJECT, DATASET))); + } + + @Test + public void testDeleteDatasetWithOptions() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, DATASET_DELETE_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, DATASET_DELETE_CONTENTS)); + } + + @Test + public void testUpdateDataset() { + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedDatasetInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.update(updatedDatasetInfo); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)), + dataset); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect( + bigqueryRpcMock.patch(eq(updatedDatasetInfoWithProject.toPb()), capture(capturedOptions))) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Dataset dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)), + dataset); + } + + @Test + public void testCreateTable() { + EasyMock.expect(bigqueryRpcMock.create(TABLE_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.create(TABLE_INFO); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); + } + + @Test + public void testCreateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); + } + + @Test + public void testGetTable() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.getTable(DATASET, TABLE); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); + } + + @Test + public void testGetTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.getTable(TABLE_ID); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); + } + + @Test + public void testGetTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getTable(eq(DATASET), eq(TABLE), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); + } + + @Test + public void testListTables() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page
page = bigquery.listTables(DATASET); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); + } + + @Test + public void testListTablesFromDatasetId() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page
page = bigquery.listTables(DatasetId.of(PROJECT, DATASET)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); + } + + @Test + public void testListTablesWithOptions() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList
tableList = ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), Table.class)); + } + + @Test + public void testDeleteTable() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, TABLE)); + } + + @Test + public void testDeleteTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(TABLE_ID)); + } + + @Test + public void testUpdateTable() { + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedTableInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.update(updatedTableInfo); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), + table); + } + + @Test + public void testUpdateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), + capture(capturedOptions))).andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Table table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), + table); + } + + @Test + public void testInsertAll() { + Map row1 = ImmutableMap.of("field", "value1"); + Map row2 = ImmutableMap.of("field", "value2"); + List rows = ImmutableList.of( + new RowToInsert("row1", row1), + new RowToInsert("row2", row2) + ); + InsertAllRequest request = InsertAllRequest.builder(TABLE_ID) + .rows(rows) + .skipInvalidRows(false) + .ignoreUnknownValues(true) + .templateSuffix("suffix") + .build(); + TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest().setRows( + Lists.transform(rows, new Function() { + @Override + public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { + return new TableDataInsertAllRequest.Rows().setInsertId(rowToInsert.id()) + .setJson(rowToInsert.content()); + } + })).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix"); + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse().setInsertErrors( + ImmutableList.of(new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors( + ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); + EasyMock.expect(bigqueryRpcMock.insertAll(DATASET, TABLE, requestPb)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + InsertAllResponse response = bigquery.insertAll(request); + assertNotNull(response.errorsFor(0L)); + assertNull(response.errorsFor(1L)); + assertEquals(1, response.errorsFor(0L).size()); + assertEquals("ErrorMessage", response.errorsFor(0L).get(0).message()); + } + + @Test + public void testListTableData() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataFromTableId() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(TableId.of(DATASET, TABLE)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataWithOptions() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE, + TABLE_DATA_LIST_PAGE_SIZE, TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testCreateQueryJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.create(QUERY_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), job); + } + + @Test + public void testCreateLoadJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(LOAD_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_LOAD_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.create(LOAD_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB)), job); + } + + @Test + public void testCreateCopyJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(COPY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.create(COPY_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); + } + + @Test + public void testCreateExtractJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(EXTRACT_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_EXTRACT_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.create(EXTRACT_JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_EXTRACT_JOB)), job); + } + + @Test + public void testCreateJobWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.create( + eq(JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb()), capture(capturedOptions))) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), job); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("user_email")); + assertEquals(37, selector.length()); + } + + @Test + public void testGetJob() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.getJob(JOB); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); + } + + @Test + public void testGetJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Job job = bigquery.getJob(JobId.of(PROJECT, JOB)); + assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); + } + + @Test + public void testListJobs() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page page = bigquery.listJobs(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); + } + + @Test + public void testListJobsWithOptions() { + String cursor = "cursor"; + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(JOB_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, + JOB_LIST_PAGE_TOKEN, JOB_LIST_PAGE_SIZE); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); + } + + @Test + public void testListJobsWithSelectedFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + bigquery = options.service(); + ImmutableList jobList = ImmutableList.of( + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_QUERY_JOB)), + new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_LOAD_JOB))); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, + new Function() { + @Override + public com.google.api.services.bigquery.model.Job apply(Job job) { + return job.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(capture(capturedOptions))).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), Job.class)); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("etag,jobs(")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("statistics")); + assertTrue(selector.contains("state,errorResult),nextPageToken")); + assertEquals(80, selector.length()); + } + + @Test + public void testCancelJob() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JOB)); + } + + @Test + public void testCancelJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); + } + + @Test + public void testQueryRequest() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setJobComplete(false); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertNull(response.result()); + assertEquals(queryJob, response.jobId()); + assertEquals(false, response.jobCompleted()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result()); + } + + @Test + public void testQueryRequestCompleted() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResults() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setEtag("etag") + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, EMPTY_RPC_OPTIONS)).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob); + assertEquals("etag", response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResultsWithOptions() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, QUERY_RESULTS_OPTIONS)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob, QUERY_RESULTS_OPTION_TIME, + QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_PAGE_SIZE, + QUERY_RESULTS_OPTION_PAGE_TOKEN); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testWriter() { + WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.of(TABLE_ID); + EasyMock.expect( + bigqueryRpcMock.open(WriteChannelConfiguration.of(TABLE_ID_WITH_PROJECT).toPb())) + .andReturn("upload-id"); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + WriteChannel channel = bigquery.writer(writeChannelConfiguration); + assertNotNull(channel); + assertTrue(channel.isOpen()); + } + + @Test + public void testRetryableException() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(500, "InternalError")) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + Dataset dataset = bigquery.getDataset(DATASET); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), + dataset); + } + + @Test + public void testNonRetryableException() { + String exceptionMessage = "Not Implemented"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(501, exceptionMessage)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DatasetId.of(DATASET)); + } + + @Test + public void testRuntimeException() { + String exceptionMessage = "Artificial runtime exception"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new RuntimeException(exceptionMessage)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DATASET); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java new file mode 100644 index 000000000000..3f3f6f0fd15c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java @@ -0,0 +1,130 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.util.List; + +public class CopyJobConfigurationTest { + + private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); + private static final List SOURCE_TABLES = ImmutableList.of( + TableId.of("dataset", "sourceTable1"), + TableId.of("dataset", "sourceTable2")); + private static final TableId DESTINATION_TABLE = TableId.of("dataset", "destinationTable"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLE) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_MULTIPLE_TABLES = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLES) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + + @Test + public void testToBuilder() { + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, COPY_JOB_CONFIGURATION.toBuilder().build()); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, + COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder().build()); + CopyJobConfiguration jobConfiguration = COPY_JOB_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", jobConfiguration.destinationTable().table()); + jobConfiguration = jobConfiguration.toBuilder().destinationTable(DESTINATION_TABLE).build(); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, jobConfiguration); + } + + @Test + public void testOf() { + CopyJobConfiguration job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(SOURCE_TABLES, job.sourceTables()); + job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLE); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), job.sourceTables()); + } + + @Test + public void testToBuilderIncomplete() { + CopyJobConfiguration jobConfiguration = + CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobConfiguration(jobConfiguration, jobConfiguration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.destinationTable()); + assertEquals(SOURCE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.writeDisposition()); + assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), COPY_JOB_CONFIGURATION.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB_CONFIGURATION.writeDisposition()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(COPY_JOB_CONFIGURATION.toPb().getCopy()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getExtract()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getLoad()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getQuery()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getCopy().getSourceTables()); + assertNull(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toPb().getCopy().getSourceTable()); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, + CopyJobConfiguration.fromPb(COPY_JOB_CONFIGURATION.toPb())); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, + CopyJobConfiguration.fromPb(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toPb())); + CopyJobConfiguration jobConfiguration = + CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobConfiguration( + jobConfiguration, CopyJobConfiguration.fromPb(jobConfiguration.toPb())); + } + + @Test + public void testSetProjectId() { + CopyJobConfiguration configuration = COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.setProjectId("p"); + assertEquals("p", configuration.destinationTable().project()); + for (TableId sourceTable : configuration.sourceTables()) { + assertEquals("p", sourceTable.project()); + } + } + + private void compareCopyJobConfiguration(CopyJobConfiguration expected, + CopyJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.sourceTables(), value.sourceTables()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java new file mode 100644 index 000000000000..371202174431 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java @@ -0,0 +1,87 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; + +public class CsvOptionsTest { + + private static final Boolean ALLOW_JAGGED_ROWS = true; + private static final Boolean ALLOW_QUOTED_NEWLINE = true; + private static final Charset ENCODING = StandardCharsets.UTF_8; + private static final String FIELD_DELIMITER = ","; + private static final String QUOTE = "\""; + private static final Integer SKIP_LEADING_ROWS = 42; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(ALLOW_JAGGED_ROWS) + .allowQuotedNewLines(ALLOW_QUOTED_NEWLINE) + .encoding(ENCODING) + .fieldDelimiter(FIELD_DELIMITER) + .quote(QUOTE) + .skipLeadingRows(SKIP_LEADING_ROWS) + .build(); + + @Test + public void testToBuilder() { + compareCsvOptions(CSV_OPTIONS, CSV_OPTIONS.toBuilder().build()); + CsvOptions csvOptions = CSV_OPTIONS.toBuilder() + .fieldDelimiter(";") + .build(); + assertEquals(";", csvOptions.fieldDelimiter()); + csvOptions = csvOptions.toBuilder().fieldDelimiter(",").build(); + compareCsvOptions(CSV_OPTIONS, csvOptions); + } + + @Test + public void testToBuilderIncomplete() { + CsvOptions csvOptions = CsvOptions.builder().fieldDelimiter("|").build(); + assertEquals(csvOptions, csvOptions.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FormatOptions.CSV, CSV_OPTIONS.type()); + assertEquals(ALLOW_JAGGED_ROWS, CSV_OPTIONS.allowJaggedRows()); + assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); + assertEquals(ENCODING.name(), CSV_OPTIONS.encoding()); + assertEquals(FIELD_DELIMITER, CSV_OPTIONS.fieldDelimiter()); + assertEquals(QUOTE, CSV_OPTIONS.quote()); + assertEquals(SKIP_LEADING_ROWS, CSV_OPTIONS.skipLeadingRows()); + } + + @Test + public void testToAndFromPb() { + compareCsvOptions(CSV_OPTIONS, CsvOptions.fromPb(CSV_OPTIONS.toPb())); + CsvOptions csvOptions = CsvOptions.builder().allowJaggedRows(ALLOW_JAGGED_ROWS).build(); + compareCsvOptions(csvOptions, CsvOptions.fromPb(csvOptions.toPb())); + } + + private void compareCsvOptions(CsvOptions expected, CsvOptions value) { + assertEquals(expected, value); + assertEquals(expected.allowJaggedRows(), value.allowJaggedRows()); + assertEquals(expected.allowQuotedNewLines(), value.allowQuotedNewLines()); + assertEquals(expected.encoding(), value.encoding()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.quote(), value.quote()); + assertEquals(expected.skipLeadingRows(), value.skipLeadingRows()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java new file mode 100644 index 000000000000..ec645d71c96f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class DatasetIdTest { + + private static final DatasetId DATASET = DatasetId.of("dataset"); + private static final DatasetId DATASET_COMPLETE = DatasetId.of("project", "dataset"); + + @Test + public void testOf() { + assertEquals(null, DATASET.project()); + assertEquals("dataset", DATASET.dataset()); + assertEquals("project", DATASET_COMPLETE.project()); + assertEquals("dataset", DATASET_COMPLETE.dataset()); + } + + @Test + public void testEquals() { + compareDatasetIds(DATASET, DatasetId.of("dataset")); + compareDatasetIds(DATASET_COMPLETE, DatasetId.of("project", "dataset")); + } + + @Test + public void testToPbAndFromPb() { + compareDatasetIds(DATASET, DatasetId.fromPb(DATASET.toPb())); + compareDatasetIds(DATASET_COMPLETE, DatasetId.fromPb(DATASET_COMPLETE.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(DATASET_COMPLETE, DATASET.setProjectId("project")); + } + + private void compareDatasetIds(DatasetId expected, DatasetId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java new file mode 100644 index 000000000000..20875c0fc853 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java @@ -0,0 +1,136 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class DatasetInfoTest { + + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")))); + private static final List ACCESS_RULES_COMPLETE = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("project", "dataset", "table")))); + private static final Long CREATION_TIME = System.currentTimeMillis(); + private static final Long DEFAULT_TABLE_EXPIRATION = CREATION_TIME + 100; + private static final String DESCRIPTION = "description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final DatasetId DATASET_ID_COMPLETE = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final DatasetInfo DATASET_INFO_COMPLETE = DATASET_INFO.toBuilder() + .datasetId(DATASET_ID_COMPLETE) + .acl(ACCESS_RULES_COMPLETE) + .build(); + + @Test + public void testToBuilder() { + compareDatasets(DATASET_INFO, DATASET_INFO.toBuilder().build()); + DatasetInfo datasetInfo = DATASET_INFO.toBuilder() + .datasetId(DatasetId.of("dataset2")) + .description("description2") + .build(); + assertEquals(DatasetId.of("dataset2"), datasetInfo.datasetId()); + assertEquals("description2", datasetInfo.description()); + datasetInfo = datasetInfo.toBuilder().datasetId(DATASET_ID).description("description").build(); + compareDatasets(DATASET_INFO, datasetInfo); + } + + @Test + public void testToBuilderIncomplete() { + DatasetInfo datasetInfo = DatasetInfo.builder(DATASET_ID).build(); + assertEquals(datasetInfo, datasetInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertNull(DATASET_INFO.datasetId().project()); + assertEquals(DATASET_ID, DATASET_INFO.datasetId()); + assertEquals(ACCESS_RULES, DATASET_INFO.acl()); + assertEquals(CREATION_TIME, DATASET_INFO.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO.description()); + assertEquals(ETAG, DATASET_INFO.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO.friendlyName()); + assertEquals(ID, DATASET_INFO.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO.lastModified()); + assertEquals(LOCATION, DATASET_INFO.location()); + assertEquals(SELF_LINK, DATASET_INFO.selfLink()); + assertEquals(DATASET_ID_COMPLETE, DATASET_INFO_COMPLETE.datasetId()); + assertEquals(ACCESS_RULES_COMPLETE, DATASET_INFO_COMPLETE.acl()); + assertEquals(CREATION_TIME, DATASET_INFO_COMPLETE.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO_COMPLETE.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO_COMPLETE.description()); + assertEquals(ETAG, DATASET_INFO_COMPLETE.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO_COMPLETE.friendlyName()); + assertEquals(ID, DATASET_INFO_COMPLETE.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO_COMPLETE.lastModified()); + assertEquals(LOCATION, DATASET_INFO_COMPLETE.location()); + assertEquals(SELF_LINK, DATASET_INFO_COMPLETE.selfLink()); + } + + @Test + public void testToPbAndFromPb() { + compareDatasets(DATASET_INFO_COMPLETE, DatasetInfo.fromPb(DATASET_INFO_COMPLETE.toPb())); + DatasetInfo datasetInfo = DatasetInfo.builder("project", "dataset").build(); + compareDatasets(datasetInfo, DatasetInfo.fromPb(datasetInfo.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(DATASET_INFO_COMPLETE, DATASET_INFO.setProjectId("project")); + } + + private void compareDatasets(DatasetInfo expected, DatasetInfo value) { + assertEquals(expected, value); + assertEquals(expected.datasetId(), value.datasetId()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.defaultTableLifetime(), value.defaultTableLifetime()); + assertEquals(expected.lastModified(), value.lastModified()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java new file mode 100644 index 000000000000..dd03b7899ebc --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java @@ -0,0 +1,373 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; + +import org.junit.After; +import org.junit.Test; + +import java.util.List; + +public class DatasetTest { + + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")))); + private static final Long CREATION_TIME = System.currentTimeMillis(); + private static final Long DEFAULT_TABLE_EXPIRATION = CREATION_TIME + 100; + private static final String DESCRIPTION = "description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID).build(); + private static final Field FIELD = Field.of("FieldName", Field.Type.integer()); + private static final StandardTableDefinition TABLE_DEFINITION = + StandardTableDefinition.of(Schema.of(FIELD)); + private static final ViewDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY"); + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = + ExternalTableDefinition.of(ImmutableList.of("URI"), Schema.of(), FormatOptions.csv()); + private static final TableInfo TABLE_INFO1 = + TableInfo.builder(TableId.of("dataset", "table1"), TABLE_DEFINITION).build(); + private static final TableInfo TABLE_INFO2 = + TableInfo.builder(TableId.of("dataset", "table2"), VIEW_DEFINITION).build(); + private static final TableInfo TABLE_INFO3 = + TableInfo.builder(TableId.of("dataset", "table3"), EXTERNAL_TABLE_DEFINITION).build(); + + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); + private BigQuery bigquery; + private Dataset expectedDataset; + private Dataset dataset; + + private void initializeExpectedDataset(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); + bigquery = createStrictMock(BigQuery.class); + expectedDataset = new Dataset(serviceMockReturnsOptions, new Dataset.BuilderImpl(DATASET_INFO)); + } + + private void initializeDataset() { + dataset = new Dataset(bigquery, new Dataset.BuilderImpl(DATASET_INFO)); + } + + @After + public void tearDown() throws Exception { + verify(bigquery, serviceMockReturnsOptions); + } + + @Test + public void testBuilder() { + initializeExpectedDataset(2); + replay(bigquery); + Dataset builtDataset = new Dataset.Builder(serviceMockReturnsOptions, DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + assertEquals(DATASET_ID, builtDataset.datasetId()); + assertEquals(ACCESS_RULES, builtDataset.acl()); + assertEquals(CREATION_TIME, builtDataset.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, builtDataset.defaultTableLifetime()); + assertEquals(DESCRIPTION, builtDataset.description()); + assertEquals(ETAG, builtDataset.etag()); + assertEquals(FRIENDLY_NAME, builtDataset.friendlyName()); + assertEquals(ID, builtDataset.id()); + assertEquals(LAST_MODIFIED, builtDataset.lastModified()); + assertEquals(LOCATION, builtDataset.location()); + assertEquals(SELF_LINK, builtDataset.selfLink()); + } + + @Test + public void testToBuilder() { + initializeExpectedDataset(4); + replay(bigquery); + compareDataset(expectedDataset, expectedDataset.toBuilder().build()); + } + + @Test + public void testExists_True() throws Exception { + initializeExpectedDataset(1); + BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId(), expectedOptions)) + .andReturn(expectedDataset); + replay(bigquery); + initializeDataset(); + assertTrue(dataset.exists()); + } + + @Test + public void testExists_False() throws Exception { + initializeExpectedDataset(1); + BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId(), expectedOptions)).andReturn(null); + replay(bigquery); + initializeDataset(); + assertFalse(dataset.exists()); + } + + @Test + public void testReload() throws Exception { + initializeExpectedDataset(4); + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + Dataset expectedDataset = + new Dataset(serviceMockReturnsOptions, new DatasetInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(expectedDataset); + replay(bigquery); + initializeDataset(); + Dataset updatedDataset = dataset.reload(); + compareDataset(expectedDataset, updatedDataset); + } + + @Test + public void testReloadNull() throws Exception { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(null); + replay(bigquery); + initializeDataset(); + assertNull(dataset.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + initializeExpectedDataset(4); + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + Dataset expectedDataset = + new Dataset(serviceMockReturnsOptions, new DatasetInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset(), BigQuery.DatasetOption.fields())) + .andReturn(expectedDataset); + replay(bigquery); + initializeDataset(); + Dataset updatedDataset = dataset.reload(BigQuery.DatasetOption.fields()); + compareDataset(expectedDataset, updatedDataset); + } + + @Test + public void testUpdate() { + initializeExpectedDataset(4); + Dataset expectedUpdatedDataset = expectedDataset.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedDataset))).andReturn(expectedUpdatedDataset); + replay(bigquery); + initializeDataset(); + Dataset actualUpdatedDataset = dataset.update(); + compareDataset(expectedUpdatedDataset, actualUpdatedDataset); + } + + @Test + public void testUpdateWithOptions() { + initializeExpectedDataset(4); + Dataset expectedUpdatedDataset = expectedDataset.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedDataset), eq(BigQuery.DatasetOption.fields()))) + .andReturn(expectedUpdatedDataset); + replay(bigquery); + initializeDataset(); + Dataset actualUpdatedDataset = dataset.update(BigQuery.DatasetOption.fields()); + compareDataset(expectedUpdatedDataset, actualUpdatedDataset); + } + + @Test + public void testDeleteTrue() { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(true); + replay(bigquery); + initializeDataset(); + assertTrue(dataset.delete()); + } + + @Test + public void testDeleteFalse() { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(false); + replay(bigquery); + initializeDataset(); + assertFalse(dataset.delete()); + } + + @Test + public void testList() throws Exception { + initializeExpectedDataset(4); + List
tableResults = ImmutableList.of( + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO1)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO2)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO3))); + PageImpl
expectedPage = new PageImpl<>(null, "c", tableResults); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.listTables(DATASET_INFO.datasetId())).andReturn(expectedPage); + replay(bigquery); + initializeDataset(); + Page
tablePage = dataset.list(); + assertArrayEquals(tableResults.toArray(), Iterables.toArray(tablePage.values(), Table.class)); + assertEquals(expectedPage.nextPageCursor(), tablePage.nextPageCursor()); + } + + @Test + public void testListWithOptions() throws Exception { + initializeExpectedDataset(4); + List
tableResults = ImmutableList.of( + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO1)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO2)), + new Table(serviceMockReturnsOptions, new Table.BuilderImpl(TABLE_INFO3))); + PageImpl
expectedPage = new PageImpl<>(null, "c", tableResults); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.listTables(DATASET_INFO.datasetId(), BigQuery.TableListOption.pageSize(10L))) + .andReturn(expectedPage); + replay(bigquery); + initializeDataset(); + Page
tablePage = dataset.list(BigQuery.TableListOption.pageSize(10L)); + assertArrayEquals(tableResults.toArray(), Iterables.toArray(tablePage.values(), Table.class)); + assertEquals(expectedPage.nextPageCursor(), tablePage.nextPageCursor()); + } + + @Test + public void testGet() throws Exception { + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId())).andReturn(expectedTable); + replay(bigquery); + initializeDataset(); + Table table = dataset.get(TABLE_INFO1.tableId().table()); + assertNotNull(table); + assertEquals(expectedTable, table); + } + + @Test + public void testGetNull() throws Exception { + initializeExpectedDataset(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId())).andReturn(null); + replay(bigquery); + initializeDataset(); + assertNull(dataset.get(TABLE_INFO1.tableId().table())); + } + + @Test + public void testGetWithOptions() throws Exception { + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO1.tableId(), BigQuery.TableOption.fields())) + .andReturn(expectedTable); + replay(bigquery); + initializeDataset(); + Table table = dataset.get(TABLE_INFO1.tableId().table(), BigQuery.TableOption.fields()); + assertNotNull(table); + assertEquals(expectedTable, table); + } + + @Test + public void testCreateTable() throws Exception { + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.create(TABLE_INFO1)).andReturn(expectedTable); + replay(bigquery); + initializeDataset(); + Table table = dataset.create(TABLE_INFO1.tableId().table(), TABLE_DEFINITION); + assertEquals(expectedTable, table); + } + + @Test + public void testCreateTableWithOptions() throws Exception { + initializeExpectedDataset(2); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO1)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.create(TABLE_INFO1, BigQuery.TableOption.fields())).andReturn(expectedTable); + replay(bigquery); + initializeDataset(); + Table table = dataset.create(TABLE_INFO1.tableId().table(), TABLE_DEFINITION, + BigQuery.TableOption.fields()); + assertEquals(expectedTable, table); + } + + @Test + public void testBigquery() { + initializeExpectedDataset(1); + replay(bigquery); + assertSame(serviceMockReturnsOptions, expectedDataset.bigquery()); + } + + @Test + public void testToAndFromPb() { + initializeExpectedDataset(4); + replay(bigquery); + compareDataset(expectedDataset, + Dataset.fromPb(serviceMockReturnsOptions, expectedDataset.toPb())); + } + + private void compareDataset(Dataset expected, Dataset value) { + assertEquals(expected, value); + compareDatasetInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareDatasetInfo(DatasetInfo expected, DatasetInfo value) { + assertEquals(expected, value); + assertEquals(expected.datasetId(), value.datasetId()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.defaultTableLifetime(), value.defaultTableLifetime()); + assertEquals(expected.lastModified(), value.lastModified()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalTableDefinitionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalTableDefinitionTest.java new file mode 100644 index 000000000000..247032dff890 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalTableDefinitionTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ExternalTableDefinitionTest { + + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + + @Test + public void testToBuilder() { + compareExternalTableDefinition(EXTERNAL_TABLE_DEFINITION, + EXTERNAL_TABLE_DEFINITION.toBuilder().build()); + ExternalTableDefinition externalTableDefinition = + EXTERNAL_TABLE_DEFINITION.toBuilder().compression("NONE").build(); + assertEquals("NONE", externalTableDefinition.compression()); + externalTableDefinition = externalTableDefinition.toBuilder() + .compression(COMPRESSION) + .build(); + compareExternalTableDefinition(EXTERNAL_TABLE_DEFINITION, externalTableDefinition); + } + + @Test + public void testToBuilderIncomplete() { + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.of(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.json()); + assertEquals(externalTableDefinition, externalTableDefinition.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TableDefinition.Type.EXTERNAL, EXTERNAL_TABLE_DEFINITION.type()); + assertEquals(COMPRESSION, EXTERNAL_TABLE_DEFINITION.compression()); + assertEquals(CSV_OPTIONS, EXTERNAL_TABLE_DEFINITION.formatOptions()); + assertEquals(IGNORE_UNKNOWN_VALUES, EXTERNAL_TABLE_DEFINITION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, EXTERNAL_TABLE_DEFINITION.maxBadRecords()); + assertEquals(TABLE_SCHEMA, EXTERNAL_TABLE_DEFINITION.schema()); + assertEquals(SOURCE_URIS, EXTERNAL_TABLE_DEFINITION.sourceUris()); + } + + @Test + public void testToAndFromPb() { + compareExternalTableDefinition(EXTERNAL_TABLE_DEFINITION, + ExternalTableDefinition.fromPb(EXTERNAL_TABLE_DEFINITION.toPb())); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS).build(); + compareExternalTableDefinition(externalTableDefinition, + ExternalTableDefinition.fromPb(externalTableDefinition.toPb())); + } + + private void compareExternalTableDefinition(ExternalTableDefinition expected, + ExternalTableDefinition value) { + assertEquals(expected, value); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.formatOptions(), value.formatOptions()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.sourceUris(), value.sourceUris()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java new file mode 100644 index 000000000000..7ac67f41b1f8 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java @@ -0,0 +1,139 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ExtractJobConfigurationTest { + + private static final List DESTINATION_URIS = ImmutableList.of("uri1", "uri2"); + private static final String DESTINATION_URI = "uri1"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FIELD_DELIMITER = ","; + private static final String FORMAT = "CSV"; + private static final String JSON_FORMAT = "NEWLINE_DELIMITED_JSON"; + private static final Boolean PRINT_HEADER = true; + private static final String COMPRESSION = "GZIP"; + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URIS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_ONE_URI = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URI) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + + @Test + public void testToBuilder() { + compareExtractJobConfiguration( + EXTRACT_CONFIGURATION, EXTRACT_CONFIGURATION.toBuilder().build()); + ExtractJobConfiguration job = EXTRACT_CONFIGURATION.toBuilder() + .sourceTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", job.sourceTable().table()); + job = job.toBuilder().sourceTable(TABLE_ID).build(); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION, job); + } + + @Test + public void testOf() { + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URI); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS, JSON_FORMAT); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + assertEquals(JSON_FORMAT, job.format()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URI, JSON_FORMAT); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + assertEquals(JSON_FORMAT, job.format()); + } + + @Test + public void testToBuilderIncomplete() { + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobConfiguration(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, EXTRACT_CONFIGURATION.sourceTable()); + assertEquals(DESTINATION_URIS, EXTRACT_CONFIGURATION.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_CONFIGURATION.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_CONFIGURATION.compression()); + assertEquals(PRINT_HEADER, EXTRACT_CONFIGURATION.printHeader()); + assertEquals(FORMAT, EXTRACT_CONFIGURATION.format()); + assertEquals(TABLE_ID, EXTRACT_CONFIGURATION_ONE_URI.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), + EXTRACT_CONFIGURATION_ONE_URI.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_CONFIGURATION_ONE_URI.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_CONFIGURATION_ONE_URI.compression()); + assertEquals(PRINT_HEADER, EXTRACT_CONFIGURATION_ONE_URI.printHeader()); + assertEquals(FORMAT, EXTRACT_CONFIGURATION_ONE_URI.format()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(EXTRACT_CONFIGURATION.toPb().getExtract()); + assertNull(EXTRACT_CONFIGURATION.toPb().getCopy()); + assertNull(EXTRACT_CONFIGURATION.toPb().getLoad()); + assertNull(EXTRACT_CONFIGURATION.toPb().getQuery()); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION, + ExtractJobConfiguration.fromPb(EXTRACT_CONFIGURATION.toPb())); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION_ONE_URI, + ExtractJobConfiguration.fromPb(EXTRACT_CONFIGURATION_ONE_URI.toPb())); + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobConfiguration(job, ExtractJobConfiguration.fromPb(job.toPb())); + } + + @Test + public void testSetProjectId() { + ExtractJobConfiguration configuration = EXTRACT_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.sourceTable().project()); + } + + private void compareExtractJobConfiguration(ExtractJobConfiguration expected, + ExtractJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.sourceTable(), value.sourceTable()); + assertEquals(expected.destinationUris(), value.destinationUris()); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.printHeader(), value.printHeader()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.format(), value.format()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java new file mode 100644 index 000000000000..5f039eaed206 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java @@ -0,0 +1,106 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +public class FieldTest { + + private static final String FIELD_NAME1 = "StringField"; + private static final String FIELD_NAME2 = "IntegerField"; + private static final String FIELD_NAME3 = "RecordField"; + private static final Field.Type FIELD_TYPE1 = Field.Type.string(); + private static final Field.Type FIELD_TYPE2 = Field.Type.integer(); + private static final Field.Mode FIELD_MODE1 = Field.Mode.NULLABLE; + private static final Field.Mode FIELD_MODE2 = Field.Mode.REPEATED; + private static final Field.Mode FIELD_MODE3 = Field.Mode.REQUIRED; + private static final String FIELD_DESCRIPTION1 = "FieldDescription1"; + private static final String FIELD_DESCRIPTION2 = "FieldDescription2"; + private static final String FIELD_DESCRIPTION3 = "FieldDescription3"; + private static final Field FIELD_SCHEMA1 = Field.builder(FIELD_NAME1, FIELD_TYPE1) + .mode(FIELD_MODE1) + .description(FIELD_DESCRIPTION1) + .build(); + private static final Field FIELD_SCHEMA2 = Field.builder(FIELD_NAME2, FIELD_TYPE2) + .mode(FIELD_MODE2) + .description(FIELD_DESCRIPTION2) + .build(); + private static final Field.Type FIELD_TYPE3 = + Field.Type.record(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2)); + private static final Field FIELD_SCHEMA3 = Field + .builder(FIELD_NAME3, FIELD_TYPE3) + .mode(FIELD_MODE3) + .description(FIELD_DESCRIPTION3) + .build(); + + @Test + public void testToBuilder() { + compareFieldSchemas(FIELD_SCHEMA1, FIELD_SCHEMA1.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA2, FIELD_SCHEMA2.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA3, FIELD_SCHEMA3.toBuilder().build()); + Field field = FIELD_SCHEMA1.toBuilder() + .description("New Description") + .build(); + assertEquals("New Description", field.description()); + field = field.toBuilder().description(FIELD_DESCRIPTION1).build(); + compareFieldSchemas(FIELD_SCHEMA1, field); + } + + @Test + public void testToBuilderIncomplete() { + Field field = Field.of(FIELD_NAME1, FIELD_TYPE1); + compareFieldSchemas(field, field.toBuilder().build()); + field = Field.of(FIELD_NAME2, FIELD_TYPE3); + compareFieldSchemas(field, field.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELD_NAME1, FIELD_SCHEMA1.name()); + assertEquals(FIELD_TYPE1, FIELD_SCHEMA1.type()); + assertEquals(FIELD_MODE1, FIELD_SCHEMA1.mode()); + assertEquals(FIELD_DESCRIPTION1, FIELD_SCHEMA1.description()); + assertEquals(null, FIELD_SCHEMA1.fields()); + assertEquals(FIELD_NAME3, FIELD_SCHEMA3.name()); + assertEquals(FIELD_TYPE3, FIELD_SCHEMA3.type()); + assertEquals(FIELD_MODE3, FIELD_SCHEMA3.mode()); + assertEquals(FIELD_DESCRIPTION3, FIELD_SCHEMA3.description()); + assertEquals(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2), FIELD_SCHEMA3.fields()); + } + + @Test + public void testToAndFromPb() { + compareFieldSchemas(FIELD_SCHEMA1, Field.fromPb(FIELD_SCHEMA1.toPb())); + compareFieldSchemas(FIELD_SCHEMA2, Field.fromPb(FIELD_SCHEMA2.toPb())); + compareFieldSchemas(FIELD_SCHEMA3, Field.fromPb(FIELD_SCHEMA3.toPb())); + Field field = Field.builder(FIELD_NAME1, FIELD_TYPE1).build(); + compareFieldSchemas(field, Field.fromPb(field.toPb())); + } + + private void compareFieldSchemas(Field expected, Field value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.mode(), value.mode()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java new file mode 100644 index 000000000000..d6d879dbd58f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java @@ -0,0 +1,111 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableCell; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.Map; + +public class FieldValueTest { + + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final Map INTEGER_FIELD = ImmutableMap.of("v", "1"); + private static final Map FLOAT_FIELD = ImmutableMap.of("v", "1.5"); + private static final Map STRING_FIELD = ImmutableMap.of("v", "string"); + private static final Map TIMESTAMP_FIELD = ImmutableMap.of("v", "42"); + private static final Map NULL_FIELD = + ImmutableMap.of("v", Data.nullOf(String.class)); + private static final Map REPEATED_FIELD = + ImmutableMap.of("v", ImmutableList.of(INTEGER_FIELD, INTEGER_FIELD)); + private static final Map RECORD_FIELD = + ImmutableMap.of("f", ImmutableList.of(FLOAT_FIELD, TIMESTAMP_FIELD)); + + @Test + public void testFromPb() { + FieldValue value = FieldValue.fromPb(BOOLEAN_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertFalse(value.booleanValue()); + value = FieldValue.fromPb(INTEGER_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1, value.longValue()); + value = FieldValue.fromPb(FLOAT_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1.5, value.doubleValue(), 0); + value = FieldValue.fromPb(STRING_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals("string", value.stringValue()); + value = FieldValue.fromPb(TIMESTAMP_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(42000000, value.timestampValue()); + value = FieldValue.fromPb(NULL_FIELD); + assertNull(value.value()); + value = FieldValue.fromPb(REPEATED_FIELD); + assertEquals(FieldValue.Attribute.REPEATED, value.attribute()); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(1)); + value = FieldValue.fromPb(RECORD_FIELD); + assertEquals(FieldValue.Attribute.RECORD, value.attribute()); + assertEquals(FieldValue.fromPb(FLOAT_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(TIMESTAMP_FIELD), value.repeatedValue().get(1)); + } + + @Test + public void testEquals() { + FieldValue booleanValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "false"); + assertEquals(booleanValue, FieldValue.fromPb(BOOLEAN_FIELD)); + assertEquals(booleanValue.hashCode(), FieldValue.fromPb(BOOLEAN_FIELD).hashCode()); + + FieldValue integerValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1"); + assertEquals(integerValue, FieldValue.fromPb(INTEGER_FIELD)); + assertEquals(integerValue.hashCode(), FieldValue.fromPb(INTEGER_FIELD).hashCode()); + + FieldValue floatValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1.5"); + assertEquals(floatValue, FieldValue.fromPb(FLOAT_FIELD)); + assertEquals(floatValue.hashCode(), FieldValue.fromPb(FLOAT_FIELD).hashCode()); + + FieldValue stringValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "string"); + assertEquals(stringValue, FieldValue.fromPb(STRING_FIELD)); + assertEquals(stringValue.hashCode(), FieldValue.fromPb(STRING_FIELD).hashCode()); + + FieldValue timestampValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "42"); + assertEquals(timestampValue, FieldValue.fromPb(TIMESTAMP_FIELD)); + assertEquals(timestampValue.hashCode(), FieldValue.fromPb(TIMESTAMP_FIELD).hashCode()); + + FieldValue nullValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, null); + assertEquals(nullValue, FieldValue.fromPb(NULL_FIELD)); + assertEquals(nullValue.hashCode(), FieldValue.fromPb(NULL_FIELD).hashCode()); + + FieldValue repeatedValue = new FieldValue(FieldValue.Attribute.REPEATED, + ImmutableList.of(integerValue, integerValue)); + assertEquals(repeatedValue, FieldValue.fromPb(REPEATED_FIELD)); + assertEquals(repeatedValue.hashCode(), FieldValue.fromPb(REPEATED_FIELD).hashCode()); + + FieldValue recordValue = new FieldValue(FieldValue.Attribute.RECORD, + ImmutableList.of(floatValue, timestampValue)); + assertEquals(recordValue, FieldValue.fromPb(RECORD_FIELD)); + assertEquals(recordValue.hashCode(), FieldValue.fromPb(RECORD_FIELD).hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java new file mode 100644 index 000000000000..df939143156b --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class FormatOptionsTest { + + @Test + public void testConstructor() { + FormatOptions options = new FormatOptions(FormatOptions.CSV); + assertEquals(FormatOptions.CSV, options.type()); + options = new FormatOptions(FormatOptions.JSON); + assertEquals(FormatOptions.JSON, options.type()); + options = new FormatOptions(FormatOptions.DATASTORE_BACKUP); + assertEquals(FormatOptions.DATASTORE_BACKUP, options.type()); + } + + @Test + public void testFactoryMethods() { + assertEquals(FormatOptions.CSV, FormatOptions.csv().type()); + assertEquals(FormatOptions.JSON, FormatOptions.json().type()); + assertEquals(FormatOptions.DATASTORE_BACKUP, FormatOptions.datastoreBackup().type()); + } + + @Test + public void testEquals() { + assertEquals(FormatOptions.csv(), FormatOptions.csv()); + assertEquals(FormatOptions.csv().hashCode(), FormatOptions.csv().hashCode()); + assertEquals(FormatOptions.json(), FormatOptions.json()); + assertEquals(FormatOptions.json().hashCode(), FormatOptions.json().hashCode()); + assertEquals(FormatOptions.datastoreBackup(), FormatOptions.datastoreBackup()); + assertEquals(FormatOptions.datastoreBackup().hashCode(), + FormatOptions.datastoreBackup().hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java new file mode 100644 index 000000000000..0866f0b9349e --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java @@ -0,0 +1,223 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllRequestTest { + + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final List ROWS = + ImmutableList.of(InsertAllRequest.RowToInsert.of(CONTENT1), + InsertAllRequest.RowToInsert.of(CONTENT2)); + private static final List ROWS_WITH_ID = + ImmutableList.of(InsertAllRequest.RowToInsert.of("id1", CONTENT1), + InsertAllRequest.RowToInsert.of("id2", CONTENT2)); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final Schema TABLE_SCHEMA = Schema.of(); + private static final TableDefinition TABLE_DEFINITION = StandardTableDefinition.of(TABLE_SCHEMA); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_DEFINITION); + private static final boolean SKIP_INVALID_ROWS = true; + private static final boolean IGNORE_UNKNOWN_VALUES = false; + private static final String TEMPLATE_SUFFIX = "templateSuffix"; + private static final InsertAllRequest INSERT_ALL_REQUEST1 = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST2 = InsertAllRequest.builder(TABLE_ID) + .rows(ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST3 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .rows(ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST4 = + InsertAllRequest.builder(TABLE_ID, ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST5 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST6 = + InsertAllRequest.builder(TABLE_ID, ROWS.get(0), ROWS.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST7 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID.get(0), + ROWS_WITH_ID.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST8 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST9 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST10 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST11 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .templateSuffix(TEMPLATE_SUFFIX) + .build(); + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, INSERT_ALL_REQUEST1.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST2.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST3.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST4.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST5.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST6.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST7.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST8.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST9.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST10.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST11.table()); + assertEquals(ROWS, INSERT_ALL_REQUEST1.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST2.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST4.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST6.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST3.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST5.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST7.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST8.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST9.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST10.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST11.rows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST1.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST2.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST3.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST4.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST5.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST6.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST7.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST8.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST9.skipInvalidRows()); + assertFalse(INSERT_ALL_REQUEST10.skipInvalidRows()); + assertFalse(INSERT_ALL_REQUEST11.skipInvalidRows()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST1.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST2.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST3.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST4.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST5.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST6.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST7.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST8.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST9.ignoreUnknownValues()); + assertTrue(INSERT_ALL_REQUEST10.ignoreUnknownValues()); + assertTrue(INSERT_ALL_REQUEST11.ignoreUnknownValues()); + assertNull(INSERT_ALL_REQUEST1.templateSuffix()); + assertNull(INSERT_ALL_REQUEST2.templateSuffix()); + assertNull(INSERT_ALL_REQUEST3.templateSuffix()); + assertNull(INSERT_ALL_REQUEST4.templateSuffix()); + assertNull(INSERT_ALL_REQUEST5.templateSuffix()); + assertNull(INSERT_ALL_REQUEST6.templateSuffix()); + assertNull(INSERT_ALL_REQUEST7.templateSuffix()); + assertNull(INSERT_ALL_REQUEST8.templateSuffix()); + assertNull(INSERT_ALL_REQUEST9.templateSuffix()); + assertNull(INSERT_ALL_REQUEST10.templateSuffix()); + assertEquals(TEMPLATE_SUFFIX, INSERT_ALL_REQUEST11.templateSuffix()); + } + + @Test + public void testOf() { + InsertAllRequest request = InsertAllRequest.of(TABLE_ID, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + } + + @Test + public void testEquals() { + compareInsertAllRequest(INSERT_ALL_REQUEST1, INSERT_ALL_REQUEST2); + compareInsertAllRequest(INSERT_ALL_REQUEST2, INSERT_ALL_REQUEST4); + compareInsertAllRequest(INSERT_ALL_REQUEST3, INSERT_ALL_REQUEST5); + compareInsertAllRequest(INSERT_ALL_REQUEST4, INSERT_ALL_REQUEST6); + compareInsertAllRequest(INSERT_ALL_REQUEST5, INSERT_ALL_REQUEST7); + compareInsertAllRequest(INSERT_ALL_REQUEST7, INSERT_ALL_REQUEST8); + compareInsertAllRequest(INSERT_ALL_REQUEST8, INSERT_ALL_REQUEST9); + compareInsertAllRequest(INSERT_ALL_REQUEST10, INSERT_ALL_REQUEST10); + compareInsertAllRequest(INSERT_ALL_REQUEST11, INSERT_ALL_REQUEST11); + } + + private void compareInsertAllRequest(InsertAllRequest expected, InsertAllRequest value) { + assertEquals(expected, value); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.table(), value.table()); + assertEquals(expected.rows(), value.rows()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.skipInvalidRows(), value.skipInvalidRows()); + assertEquals(expected.templateSuffix(), value.templateSuffix()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java new file mode 100644 index 000000000000..b2eb0458f27f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllResponseTest { + + private static final List ERRORS1 = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1"), + new BigQueryError("reason2", "location2", "message2")); + private static final List ERRORS2 = ImmutableList.of( + new BigQueryError("reason3", "location3", "message3"), + new BigQueryError("reason4", "location4", "message4")); + private static final Map> ERRORS_MAP = ImmutableMap.of( + 0L, ERRORS1, 1L, ERRORS2); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final InsertAllResponse EMPTY_INSERT_ALL_RESPONSE = new InsertAllResponse(null); + + @Test + public void testConstructor() { + assertEquals(INSERT_ALL_RESPONSE, INSERT_ALL_RESPONSE); + } + + @Test + public void testErrorsFor() { + assertEquals(ERRORS1, INSERT_ALL_RESPONSE.errorsFor(0L)); + assertEquals(ERRORS2, INSERT_ALL_RESPONSE.errorsFor(1L)); + assertNull(INSERT_ALL_RESPONSE.errorsFor(2L)); + } + + @Test + public void testHasErrors() { + assertTrue(INSERT_ALL_RESPONSE.hasErrors()); + assertFalse(EMPTY_INSERT_ALL_RESPONSE.hasErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareInsertAllResponse(INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(INSERT_ALL_RESPONSE.toPb())); + compareInsertAllResponse(EMPTY_INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(EMPTY_INSERT_ALL_RESPONSE.toPb())); + } + + private void compareInsertAllResponse(InsertAllResponse expected, InsertAllResponse value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.insertErrors(), value.insertErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java new file mode 100644 index 000000000000..740830f07544 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class JobIdTest { + + private static final JobId JOB = JobId.of("job"); + private static final JobId JOB_COMPLETE = JobId.of("project", "job"); + + @Test + public void testOf() { + assertEquals(null, JOB.project()); + assertEquals("job", JOB.job()); + assertEquals("project", JOB_COMPLETE.project()); + assertEquals("job", JOB_COMPLETE.job()); + } + + @Test + public void testEquals() { + compareJobs(JOB, JobId.of("job")); + compareJobs(JOB_COMPLETE, JobId.of("project", "job")); + } + + @Test + public void testToPbAndFromPb() { + compareJobs(JOB, JobId.fromPb(JOB.toPb())); + compareJobs(JOB_COMPLETE, JobId.fromPb(JOB_COMPLETE.toPb())); + } + + private void compareJobs(JobId expected, JobId value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.project(), value.project()); + assertEquals(expected.job(), value.job()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java new file mode 100644 index 000000000000..260088470aff --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java @@ -0,0 +1,370 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class JobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final JobStatistics COPY_JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final ExtractStatistics EXTRACT_JOB_STATISTICS = + ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final LoadStatistics LOAD_JOB_STATISTICS = + LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final QueryStatistics QUERY_JOB_STATISTICS = + QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); + private static final TableId DESTINATION_TABLE = TableId.of("dataset", "destinationTable"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final CopyJobConfiguration COPY_CONFIGURATION = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLE) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + private static final List DESTINATION_URIS = ImmutableList.of("uri1", "uri2"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final String FIELD_DELIMITER = ","; + private static final String FORMAT = "CSV"; + private static final Boolean PRINT_HEADER = true; + private static final String COMPRESSION = "GZIP"; + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URIS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalTableDefinition TABLE_CONFIGURATION = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final LoadJobConfiguration LOAD_CONFIGURATION = + LoadJobConfiguration.builder(TABLE_ID, SOURCE_URIS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + private static final String QUERY = "BigQuery SQL"; + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final QueryJobConfiguration.Priority PRIORITY = + QueryJobConfiguration.Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of( + UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final QueryJobConfiguration QUERY_CONFIGURATION = + QueryJobConfiguration.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .tableDefinitions(TABLE_DEFINITIONS) + .allowLargeResults(ALLOW_LARGE_RESULTS) + .createDisposition(CREATE_DISPOSITION) + .defaultDataset(DATASET_ID) + .destinationTable(TABLE_ID) + .writeDisposition(WRITE_DISPOSITION) + .priority(PRIORITY) + .flattenResults(FLATTEN_RESULTS) + .userDefinedFunctions(USER_DEFINED_FUNCTIONS) + .dryRun(true) + .build(); + private static final JobInfo COPY_JOB = JobInfo.builder(COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo EXTRACT_JOB = JobInfo.builder(EXTRACT_CONFIGURATION) + .jobId(JOB_ID) + .statistics(EXTRACT_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo LOAD_JOB = JobInfo.builder(LOAD_CONFIGURATION) + .jobId(JOB_ID) + .statistics(LOAD_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo QUERY_JOB = JobInfo.builder(QUERY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(QUERY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + + + @Test + public void testToBuilder() { + compareJobInfo(COPY_JOB, COPY_JOB.toBuilder().build()); + compareJobInfo(EXTRACT_JOB, EXTRACT_JOB.toBuilder().build()); + compareJobInfo(LOAD_JOB, LOAD_JOB.toBuilder().build()); + compareJobInfo(QUERY_JOB, QUERY_JOB.toBuilder().build()); + JobInfo job = COPY_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(COPY_JOB, job); + job = EXTRACT_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(EXTRACT_JOB, job); + job = LOAD_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(LOAD_JOB, job); + job = QUERY_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(QUERY_JOB, job); + } + + @Test + public void testOf() { + JobInfo job = JobInfo.of(COPY_CONFIGURATION); + assertEquals(COPY_CONFIGURATION, job.configuration()); + job = JobInfo.of(EXTRACT_CONFIGURATION); + assertEquals(EXTRACT_CONFIGURATION, job.configuration()); + job = JobInfo.of(LOAD_CONFIGURATION); + assertEquals(LOAD_CONFIGURATION, job.configuration()); + job = JobInfo.of(QUERY_CONFIGURATION); + assertEquals(QUERY_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, COPY_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(COPY_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, EXTRACT_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(EXTRACT_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, LOAD_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(LOAD_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, QUERY_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(QUERY_CONFIGURATION, job.configuration()); + + } + + @Test + public void testToBuilderIncomplete() { + JobInfo job = JobInfo.of(COPY_CONFIGURATION); + compareJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, COPY_JOB.etag()); + assertEquals(ID, COPY_JOB.id()); + assertEquals(SELF_LINK, COPY_JOB.selfLink()); + assertEquals(EMAIL, COPY_JOB.userEmail()); + assertEquals(JOB_ID, COPY_JOB.jobId()); + assertEquals(JOB_STATUS, COPY_JOB.status()); + assertEquals(COPY_CONFIGURATION, COPY_JOB.configuration()); + assertEquals(COPY_JOB_STATISTICS, COPY_JOB.statistics()); + + assertEquals(ETAG, EXTRACT_JOB.etag()); + assertEquals(ID, EXTRACT_JOB.id()); + assertEquals(SELF_LINK, EXTRACT_JOB.selfLink()); + assertEquals(EMAIL, EXTRACT_JOB.userEmail()); + assertEquals(JOB_ID, EXTRACT_JOB.jobId()); + assertEquals(JOB_STATUS, EXTRACT_JOB.status()); + assertEquals(EXTRACT_CONFIGURATION, EXTRACT_JOB.configuration()); + assertEquals(EXTRACT_JOB_STATISTICS, EXTRACT_JOB.statistics()); + + assertEquals(ETAG, LOAD_JOB.etag()); + assertEquals(ID, LOAD_JOB.id()); + assertEquals(SELF_LINK, LOAD_JOB.selfLink()); + assertEquals(EMAIL, LOAD_JOB.userEmail()); + assertEquals(JOB_ID, LOAD_JOB.jobId()); + assertEquals(JOB_STATUS, LOAD_JOB.status()); + assertEquals(LOAD_CONFIGURATION, LOAD_JOB.configuration()); + assertEquals(LOAD_JOB_STATISTICS, LOAD_JOB.statistics()); + + assertEquals(ETAG, QUERY_JOB.etag()); + assertEquals(ID, QUERY_JOB.id()); + assertEquals(SELF_LINK, QUERY_JOB.selfLink()); + assertEquals(EMAIL, QUERY_JOB.userEmail()); + assertEquals(JOB_ID, QUERY_JOB.jobId()); + assertEquals(JOB_STATUS, QUERY_JOB.status()); + assertEquals(QUERY_CONFIGURATION, QUERY_JOB.configuration()); + assertEquals(QUERY_JOB_STATISTICS, QUERY_JOB.statistics()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(COPY_JOB.toPb().getConfiguration().getCopy()); + assertNull(COPY_JOB.toPb().getConfiguration().getExtract()); + assertNull(COPY_JOB.toPb().getConfiguration().getLoad()); + assertNull(COPY_JOB.toPb().getConfiguration().getQuery()); + assertEquals(COPY_JOB_STATISTICS, JobStatistics.fromPb(COPY_JOB.statistics().toPb())); + compareJobInfo(COPY_JOB, JobInfo.fromPb(COPY_JOB.toPb())); + assertTrue(JobInfo.fromPb(COPY_JOB.toPb()).configuration() instanceof CopyJobConfiguration); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getCopy()); + assertNotNull(EXTRACT_JOB.toPb().getConfiguration().getExtract()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getLoad()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getQuery()); + assertEquals(EXTRACT_JOB_STATISTICS, JobStatistics.fromPb(EXTRACT_JOB.statistics().toPb())); + compareJobInfo(EXTRACT_JOB, JobInfo.fromPb(EXTRACT_JOB.toPb())); + assertTrue( + JobInfo.fromPb(EXTRACT_JOB.toPb()).configuration() instanceof ExtractJobConfiguration); + assertTrue(JobInfo.fromPb(EXTRACT_JOB.toPb()).statistics() instanceof ExtractStatistics); + assertNull(LOAD_JOB.toPb().getConfiguration().getCopy()); + assertNull(LOAD_JOB.toPb().getConfiguration().getExtract()); + assertNotNull(LOAD_JOB.toPb().getConfiguration().getLoad()); + assertNull(LOAD_JOB.toPb().getConfiguration().getQuery()); + assertEquals(LOAD_JOB_STATISTICS, JobStatistics.fromPb(LOAD_JOB.statistics().toPb())); + compareJobInfo(LOAD_JOB, JobInfo.fromPb(LOAD_JOB.toPb())); + assertTrue(JobInfo.fromPb(LOAD_JOB.toPb()).configuration() instanceof LoadJobConfiguration); + assertTrue(JobInfo.fromPb(LOAD_JOB.toPb()).statistics() instanceof LoadStatistics); + assertNull(QUERY_JOB.toPb().getConfiguration().getCopy()); + assertNull(QUERY_JOB.toPb().getConfiguration().getExtract()); + assertNull(QUERY_JOB.toPb().getConfiguration().getLoad()); + assertNotNull(QUERY_JOB.toPb().getConfiguration().getQuery()); + assertEquals(QUERY_JOB_STATISTICS, JobStatistics.fromPb(QUERY_JOB.statistics().toPb())); + compareJobInfo(QUERY_JOB, JobInfo.fromPb(QUERY_JOB.toPb())); + assertTrue(JobInfo.fromPb(QUERY_JOB.toPb()).configuration() instanceof QueryJobConfiguration); + assertTrue(JobInfo.fromPb(QUERY_JOB.toPb()).statistics() instanceof QueryStatistics); + } + + @Test + public void testSetProjectId() { + CopyJobConfiguration copyConfiguration = COPY_JOB.setProjectId("p").configuration(); + assertEquals("p", copyConfiguration.destinationTable().project()); + for (TableId sourceTable : copyConfiguration.sourceTables()) { + assertEquals("p", sourceTable.project()); + } + ExtractJobConfiguration extractConfiguration = EXTRACT_JOB.setProjectId("p").configuration(); + assertEquals("p", extractConfiguration.sourceTable().project()); + LoadJobConfiguration loadConfiguration = LOAD_JOB.setProjectId("p").configuration(); + assertEquals("p", loadConfiguration.destinationTable().project()); + QueryJobConfiguration queryConfiguration = QUERY_JOB.setProjectId("p").configuration(); + assertEquals("p", queryConfiguration.defaultDataset().project()); + assertEquals("p", queryConfiguration.destinationTable().project()); + } + + private void compareJobInfo(JobInfo expected, JobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.configuration(), value.configuration()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java new file mode 100644 index 000000000000..1ec67d034754 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java @@ -0,0 +1,204 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; +import com.google.gcloud.bigquery.QueryStage.QueryStep; + +import org.junit.Test; + +import java.util.List; + +public class JobStatisticsTest { + + private static final Integer BILLING_TIER = 42; + private static final Boolean CACHE_HIT = true; + private static final Long TOTAL_BYTES_BILLED = 24L; + private static final Long TOTAL_BYTES_PROCESSED = 42L; + private static final Long INPUT_BYTES = 1L; + private static final Long INPUT_FILES = 2L; + private static final Long OUTPUT_BYTES = 3L; + private static final Long OUTPUT_ROWS = 4L; + private static final List FILE_COUNT = ImmutableList.of(1L, 2L, 3L); + private static final Long CREATION_TIME = 10L; + private static final Long END_TIME = 20L; + private static final Long START_TIME = 15L; + private static final ExtractStatistics EXTRACT_STATISTICS = ExtractStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .destinationUriFileCounts(FILE_COUNT) + .build(); + private static final LoadStatistics LOAD_STATISTICS = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .outputBytes(OUTPUT_BYTES) + .outputRows(OUTPUT_ROWS) + .build(); + private static final LoadStatistics LOAD_STATISTICS_INCOMPLETE = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .build(); + private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); + private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); + private static final QueryStep QUERY_STEP1 = new QueryStep("KIND", SUBSTEPS1); + private static final QueryStep QUERY_STEP2 = new QueryStep("KIND", SUBSTEPS2); + private static final QueryStage QUERY_STAGE = QueryStage.builder() + .computeRatioAvg(1.1) + .computeRatioMax(2.2) + .id(42L) + .name("stage") + .readRatioAvg(3.3) + .readRatioMax(4.4) + .recordsRead(5L) + .recordsWritten(6L) + .steps(ImmutableList.of(QUERY_STEP1, QUERY_STEP2)) + .waitRatioAvg(7.7) + .waitRatioMax(8.8) + .writeRatioAvg(9.9) + .writeRatioMax(10.10) + .build(); + private static final List QUERY_PLAN = ImmutableList.of(QUERY_STAGE); + private static final QueryStatistics QUERY_STATISTICS = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .totalBytesBilled(TOTAL_BYTES_BILLED) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .queryPlan(QUERY_PLAN) + .build(); + private static final QueryStatistics QUERY_STATISTICS_INCOMPLETE = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .build(); + private static final JobStatistics STATISTICS = JobStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .build(); + + @Test + public void testBuilder() { + assertEquals(CREATION_TIME, STATISTICS.creationTime()); + assertEquals(START_TIME, STATISTICS.startTime()); + assertEquals(END_TIME, STATISTICS.endTime()); + + assertEquals(CREATION_TIME, EXTRACT_STATISTICS.creationTime()); + assertEquals(START_TIME, EXTRACT_STATISTICS.startTime()); + assertEquals(END_TIME, EXTRACT_STATISTICS.endTime()); + assertEquals(FILE_COUNT, EXTRACT_STATISTICS.destinationUriFileCounts()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS.inputFiles()); + assertEquals(OUTPUT_BYTES, LOAD_STATISTICS.outputBytes()); + assertEquals(OUTPUT_ROWS, LOAD_STATISTICS.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS.cacheHit()); + assertEquals(TOTAL_BYTES_BILLED, QUERY_STATISTICS.totalBytesBilled()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.totalBytesProcessed()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.totalBytesProcessed()); + assertEquals(QUERY_PLAN, QUERY_STATISTICS.queryPlan()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS_INCOMPLETE.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS_INCOMPLETE.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS_INCOMPLETE.inputFiles()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputBytes()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS_INCOMPLETE.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS_INCOMPLETE.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesBilled()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesProcessed()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.queryPlan()); + } + + @Test + public void testToPbAndFromPb() { + compareExtractStatistics(EXTRACT_STATISTICS, + ExtractStatistics.fromPb(EXTRACT_STATISTICS.toPb())); + compareLoadStatistics(LOAD_STATISTICS, LoadStatistics.fromPb(LOAD_STATISTICS.toPb())); + compareQueryStatistics(QUERY_STATISTICS, QueryStatistics.fromPb(QUERY_STATISTICS.toPb())); + compareStatistics(STATISTICS, JobStatistics.fromPb(STATISTICS.toPb())); + + compareLoadStatistics(LOAD_STATISTICS_INCOMPLETE, + LoadStatistics.fromPb(LOAD_STATISTICS_INCOMPLETE.toPb())); + compareQueryStatistics(QUERY_STATISTICS_INCOMPLETE, + QueryStatistics.fromPb(QUERY_STATISTICS_INCOMPLETE.toPb())); + } + + private void compareExtractStatistics(ExtractStatistics expected, ExtractStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.destinationUriFileCounts(), value.destinationUriFileCounts()); + } + + private void compareLoadStatistics(LoadStatistics expected, LoadStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.inputBytes(), value.inputBytes()); + assertEquals(expected.inputFiles(), value.inputFiles()); + assertEquals(expected.outputBytes(), value.outputBytes()); + assertEquals(expected.outputRows(), value.outputRows()); + } + + private void compareQueryStatistics(QueryStatistics expected, QueryStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.billingTier(), value.billingTier()); + assertEquals(expected.cacheHit(), value.cacheHit()); + assertEquals(expected.totalBytesBilled(), value.totalBytesBilled()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + assertEquals(expected.queryPlan(), value.queryPlan()); + } + + private void compareStatistics(JobStatistics expected, JobStatistics value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.endTime(), value.endTime()); + assertEquals(expected.startTime(), value.startTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java new file mode 100644 index 000000000000..c44386a3e72c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class JobStatusTest { + + private static final JobStatus.State STATE = JobStatus.State.DONE; + private static final BigQueryError ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final List ALL_ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2")); + private static final JobStatus JOB_STATUS = new JobStatus(STATE, ERROR, ALL_ERRORS); + private static final JobStatus JOB_STATUS_INCOMPLETE1 = new JobStatus(STATE, ERROR, null); + private static final JobStatus JOB_STATUS_INCOMPLETE2 = new JobStatus(STATE, null, null); + + @Test + public void testConstructor() { + assertEquals(STATE, JOB_STATUS.state()); + assertEquals(ERROR, JOB_STATUS.error()); + assertEquals(ALL_ERRORS, JOB_STATUS.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE1.state()); + assertEquals(ERROR, JOB_STATUS_INCOMPLETE1.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE1.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE2.state()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.executionErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareStatus(JOB_STATUS, JobStatus.fromPb(JOB_STATUS.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE1, JobStatus.fromPb(JOB_STATUS_INCOMPLETE1.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE2, JobStatus.fromPb(JOB_STATUS_INCOMPLETE2.toPb())); + } + + private void compareStatus(JobStatus expected, JobStatus value) { + assertEquals(expected, value); + assertEquals(expected.state(), value.state()); + assertEquals(expected.error(), value.error()); + assertEquals(expected.executionErrors(), value.executionErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java new file mode 100644 index 000000000000..db51706fff5a --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java @@ -0,0 +1,258 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import org.junit.After; +import org.junit.Test; + +public class JobTest { + + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); + private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final JobStatistics COPY_JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final CopyJobConfiguration COPY_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2); + private static final JobInfo JOB_INFO = JobInfo.builder(COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); + private BigQuery bigquery; + private Job expectedJob; + private Job job; + + private void initializeExpectedJob(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); + bigquery = createStrictMock(BigQuery.class); + expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(JOB_INFO)); + } + + private void initializeJob() { + job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); + } + + @After + public void tearDown() throws Exception { + verify(bigquery, serviceMockReturnsOptions); + } + + @Test + public void testBuilder() { + initializeExpectedJob(2); + replay(bigquery); + Job builtJob = new Job.Builder(serviceMockReturnsOptions, COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + assertEquals(ETAG, builtJob.etag()); + assertEquals(ID, builtJob.id()); + assertEquals(SELF_LINK, builtJob.selfLink()); + assertEquals(EMAIL, builtJob.userEmail()); + assertEquals(JOB_ID, builtJob.jobId()); + assertEquals(JOB_STATUS, builtJob.status()); + assertEquals(COPY_CONFIGURATION, builtJob.configuration()); + assertEquals(COPY_JOB_STATISTICS, builtJob.statistics()); + assertSame(serviceMockReturnsOptions, builtJob.bigquery()); + } + + @Test + public void testToBuilder() { + initializeExpectedJob(4); + replay(bigquery); + compareJob(expectedJob, expectedJob.toBuilder().build()); + } + + @Test + public void testExists_True() throws Exception { + initializeExpectedJob(1); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(expectedJob); + replay(bigquery); + initializeJob(); + assertTrue(job.exists()); + } + + @Test + public void testExists_False() throws Exception { + initializeExpectedJob(1); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); + replay(bigquery); + initializeJob(); + assertFalse(job.exists()); + } + + @Test + public void testIsDone_True() throws Exception { + initializeExpectedJob(2); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + JobStatus status = createStrictMock(JobStatus.class); + expect(status.state()).andReturn(JobStatus.State.DONE); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) + .andReturn(expectedJob.toBuilder().status(status).build()); + replay(status, bigquery); + initializeJob(); + assertTrue(job.isDone()); + verify(status); + } + + @Test + public void testIsDone_False() throws Exception { + initializeExpectedJob(2); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + JobStatus status = createStrictMock(JobStatus.class); + expect(status.state()).andReturn(JobStatus.State.RUNNING); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) + .andReturn(expectedJob.toBuilder().status(status).build()); + replay(status, bigquery); + initializeJob(); + assertFalse(job.isDone()); + verify(status); + } + + @Test + public void testIsDone_NotExists() throws Exception { + initializeExpectedJob(1); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); + replay(bigquery); + initializeJob(); + assertFalse(job.isDone()); + } + + @Test + public void testReload() throws Exception { + initializeExpectedJob(4); + JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(expectedJob); + replay(bigquery); + initializeJob(); + Job updatedJob = job.reload(); + compareJob(expectedJob, updatedJob); + } + + @Test + public void testReloadNull() throws Exception { + initializeExpectedJob(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(null); + replay(bigquery); + initializeJob(); + assertNull(job.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + initializeExpectedJob(4); + JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getJob(JOB_INFO.jobId().job(), BigQuery.JobOption.fields())) + .andReturn(expectedJob); + replay(bigquery); + initializeJob(); + Job updatedJob = job.reload(BigQuery.JobOption.fields()); + compareJob(expectedJob, updatedJob); + } + + @Test + public void testCancel() throws Exception { + initializeExpectedJob(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.cancel(JOB_INFO.jobId())).andReturn(true); + replay(bigquery); + initializeJob(); + assertTrue(job.cancel()); + } + + @Test + public void testBigquery() { + initializeExpectedJob(1); + replay(bigquery); + assertSame(serviceMockReturnsOptions, expectedJob.bigquery()); + } + + @Test + public void testToAndFromPb() { + initializeExpectedJob(4); + replay(bigquery); + compareJob(expectedJob, Job.fromPb(serviceMockReturnsOptions, expectedJob.toPb())); + } + + private void compareJob(Job expected, Job value) { + assertEquals(expected, value); + compareJobInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareJobInfo(JobInfo expected, JobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.configuration(), value.configuration()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java new file mode 100644 index 000000000000..88ae6a4fc1b8 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java @@ -0,0 +1,140 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class LoadJobConfigurationTest { + + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final LoadJobConfiguration LOAD_CONFIGURATION = + LoadJobConfiguration.builder(TABLE_ID, SOURCE_URIS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareLoadJobConfiguration(LOAD_CONFIGURATION, LOAD_CONFIGURATION.toBuilder().build()); + LoadJobConfiguration configuration = LOAD_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", configuration.destinationTable().table()); + configuration = configuration.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadJobConfiguration(LOAD_CONFIGURATION, configuration); + } + + @Test + public void testOf() { + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(SOURCE_URIS, configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS, CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + assertEquals(SOURCE_URIS, configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, "uri1"); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(ImmutableList.of("uri1"), configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, "uri1", CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + assertEquals(ImmutableList.of("uri1"), configuration.sourceUris()); + } + + @Test + public void testToBuilderIncomplete() { + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + compareLoadJobConfiguration(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, LOAD_CONFIGURATION.destinationTable()); + assertEquals(CREATE_DISPOSITION, LOAD_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_CONFIGURATION.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_CONFIGURATION.csvOptions()); + assertEquals(FORMAT, LOAD_CONFIGURATION.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_CONFIGURATION.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_CONFIGURATION.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_CONFIGURATION.schema()); + } + + @Test + public void testToPbAndFromPb() { + compareLoadJobConfiguration(LOAD_CONFIGURATION, + LoadJobConfiguration.fromPb(LOAD_CONFIGURATION.toPb())); + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + compareLoadJobConfiguration(configuration, LoadJobConfiguration.fromPb(configuration.toPb())); + } + + @Test + public void testSetProjectId() { + LoadConfiguration configuration = LOAD_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.destinationTable().project()); + } + + private void compareLoadJobConfiguration(LoadJobConfiguration expected, + LoadJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java new file mode 100644 index 000000000000..2c89ececedb8 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.gcloud.bigquery.spi.BigQueryRpc; + +import org.junit.Test; + +public class OptionTest { + + @Test + public void testOption() { + Option option = new Option(BigQueryRpc.Option.PAGE_TOKEN, "token"); + assertEquals(BigQueryRpc.Option.PAGE_TOKEN, option.rpcOption()); + assertEquals("token", option.value()); + } + + @Test(expected = NullPointerException.class) + public void testNullRpcOption() { + new Option(null, "token"); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java new file mode 100644 index 000000000000..1ef270ee69cf --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java @@ -0,0 +1,169 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.QueryJobConfiguration.Priority; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class QueryJobConfigurationTest { + + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalTableDefinition TABLE_CONFIGURATION = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Priority PRIORITY = Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of( + UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .tableDefinitions(TABLE_DEFINITIONS) + .allowLargeResults(ALLOW_LARGE_RESULTS) + .createDisposition(CREATE_DISPOSITION) + .defaultDataset(DATASET_ID) + .destinationTable(TABLE_ID) + .writeDisposition(WRITE_DISPOSITION) + .priority(PRIORITY) + .flattenResults(FLATTEN_RESULTS) + .userDefinedFunctions(USER_DEFINED_FUNCTIONS) + .dryRun(true) + .build(); + + @Test + public void testToBuilder() { + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, + QUERY_JOB_CONFIGURATION.toBuilder().build()); + QueryJobConfiguration job = QUERY_JOB_CONFIGURATION.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", job.query()); + job = job.toBuilder().query(QUERY).build(); + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, job); + } + + @Test + public void testOf() { + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + assertEquals(QUERY, job.query()); + } + + @Test + public void testToBuilderIncomplete() { + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + compareQueryJobConfiguration(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ALLOW_LARGE_RESULTS, QUERY_JOB_CONFIGURATION.allowLargeResults()); + assertEquals(CREATE_DISPOSITION, QUERY_JOB_CONFIGURATION.createDisposition()); + assertEquals(DATASET_ID, QUERY_JOB_CONFIGURATION.defaultDataset()); + assertEquals(TABLE_ID, QUERY_JOB_CONFIGURATION.destinationTable()); + assertEquals(FLATTEN_RESULTS, QUERY_JOB_CONFIGURATION.flattenResults()); + assertEquals(PRIORITY, QUERY_JOB_CONFIGURATION.priority()); + assertEquals(QUERY, QUERY_JOB_CONFIGURATION.query()); + assertEquals(TABLE_DEFINITIONS, QUERY_JOB_CONFIGURATION.tableDefinitions()); + assertEquals(USE_QUERY_CACHE, QUERY_JOB_CONFIGURATION.useQueryCache()); + assertEquals(USER_DEFINED_FUNCTIONS, QUERY_JOB_CONFIGURATION.userDefinedFunctions()); + assertEquals(WRITE_DISPOSITION, QUERY_JOB_CONFIGURATION.writeDisposition()); + assertTrue(QUERY_JOB_CONFIGURATION.dryRun()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(QUERY_JOB_CONFIGURATION.toPb().getQuery()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getExtract()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getCopy()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getLoad()); + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, + QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + compareQueryJobConfiguration(job, QueryJobConfiguration.fromPb(job.toPb())); + } + + @Test + public void testSetProjectId() { + QueryJobConfiguration configuration = QUERY_JOB_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.defaultDataset().project()); + assertEquals("p", configuration.destinationTable().project()); + } + + private void compareQueryJobConfiguration(QueryJobConfiguration expected, + QueryJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.allowLargeResults(), value.allowLargeResults()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.flattenResults(), value.flattenResults()); + assertEquals(expected.priority(), value.priority()); + assertEquals(expected.query(), value.query()); + assertEquals(expected.tableDefinitions(), value.tableDefinitions()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java new file mode 100644 index 000000000000..7875dee9e315 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java @@ -0,0 +1,108 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class QueryRequestTest { + + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final Boolean USE_QUERY_CACHE = true; + private static final Boolean DRY_RUN = false; + private static final Long PAGE_SIZE = 42L; + private static final Long MAX_WAIT_TIME = 42000L; + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .defaultDataset(DATASET_ID) + .dryRun(DRY_RUN) + .pageSize(PAGE_SIZE) + .maxWaitTime(MAX_WAIT_TIME) + .build(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testToBuilder() { + compareQueryRequest(QUERY_REQUEST, QUERY_REQUEST.toBuilder().build()); + QueryRequest queryRequest = QUERY_REQUEST.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", queryRequest.query()); + queryRequest = queryRequest.toBuilder().query(QUERY).build(); + compareQueryRequest(QUERY_REQUEST, queryRequest); + } + + @Test + public void testToBuilderIncomplete() { + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, queryRequest.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(QUERY, QUERY_REQUEST.query()); + assertEquals(USE_QUERY_CACHE, QUERY_REQUEST.useQueryCache()); + assertEquals(DATASET_ID, QUERY_REQUEST.defaultDataset()); + assertEquals(DRY_RUN, QUERY_REQUEST.dryRun()); + assertEquals(PAGE_SIZE, QUERY_REQUEST.pageSize()); + assertEquals(MAX_WAIT_TIME, QUERY_REQUEST.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.builder(null); + } + + @Test + public void testOf() { + QueryRequest request = QueryRequest.of(QUERY); + assertEquals(QUERY, request.query()); + assertNull(request.useQueryCache()); + assertNull(request.defaultDataset()); + assertNull(request.dryRun()); + assertNull(request.pageSize()); + assertNull(request.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.of(null); + } + + @Test + public void testToPbAndFromPb() { + compareQueryRequest(QUERY_REQUEST, QueryRequest.fromPb(QUERY_REQUEST.toPb())); + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, QueryRequest.fromPb(queryRequest.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals("p", QUERY_REQUEST.setProjectId("p").defaultDataset().project()); + } + + private void compareQueryRequest(QueryRequest expected, QueryRequest value) { + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.pageSize(), value.pageSize()); + assertEquals(expected.maxWaitTime(), value.maxWaitTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java new file mode 100644 index 000000000000..08e885c8b3aa --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResponseTest { + + private static final String ETAG = "etag"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final Long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final Long TOTAL_BYTES_PROCESSED = 4200L; + private static final Boolean JOB_COMPLETE = true; + private static final List ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2") + ); + private static final Boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor("cursor") + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobCompleted(JOB_COMPLETE) + .executionErrors(ERRORS) + .result(QUERY_RESULT) + .build(); + + @Test + public void testBuilder() { + assertEquals(ETAG, QUERY_RESPONSE.etag()); + assertEquals(QUERY_RESULT, QUERY_RESPONSE.result()); + assertEquals(JOB_ID, QUERY_RESPONSE.jobId()); + assertEquals(JOB_COMPLETE, QUERY_RESPONSE.jobCompleted()); + assertEquals(ERRORS, QUERY_RESPONSE.executionErrors()); + assertTrue(QUERY_RESPONSE.hasErrors()); + } + + @Test + public void testBuilderIncomplete() { + QueryResponse queryResponse = QueryResponse.builder().jobCompleted(false).build(); + assertNull(queryResponse.etag()); + assertNull(queryResponse.result()); + assertNull(queryResponse.jobId()); + assertFalse(queryResponse.jobCompleted()); + assertEquals(ImmutableList.of(), queryResponse.executionErrors()); + assertFalse(queryResponse.hasErrors()); + } + + @Test + public void testEquals() { + compareQueryResponse(QUERY_RESPONSE, QUERY_RESPONSE); + } + + private void compareQueryResponse(QueryResponse expected, QueryResponse value) { + assertEquals(expected, value); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.result(), value.result()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.jobCompleted(), value.jobCompleted()); + assertEquals(expected.executionErrors(), value.executionErrors()); + assertEquals(expected.hasErrors(), value.hasErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java new file mode 100644 index 000000000000..b6810ed93143 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResultTest { + + private static final String CURSOR = "cursor"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final long TOTAL_BYTES_PROCESSED = 4200L; + private static final boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor(CURSOR) + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResult QUERY_RESULT_INCOMPLETE = QueryResult.builder() + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .build(); + + @Test + public void testBuilder() { + assertEquals(SCHEMA, QUERY_RESULT.schema()); + assertEquals(TOTAL_ROWS, QUERY_RESULT.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT.totalBytesProcessed()); + assertEquals(CACHE_HIT, QUERY_RESULT.cacheHit()); + assertEquals(CURSOR, QUERY_RESULT.nextPageCursor()); + assertEquals(null, QUERY_RESULT.nextPage()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.schema()); + assertEquals(0L, QUERY_RESULT_INCOMPLETE.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT_INCOMPLETE.totalBytesProcessed()); + assertEquals(false, QUERY_RESULT_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPageCursor()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPage()); + } + + @Test + public void testEquals() { + compareQueryResult(QUERY_RESULT, QUERY_RESULT); + compareQueryResult(QUERY_RESULT_INCOMPLETE, QUERY_RESULT_INCOMPLETE); + } + + private void compareQueryResult(QueryResult expected, QueryResult value) { + assertEquals(expected, value); + assertEquals(expected.nextPage(), value.nextPage()); + assertEquals(expected.nextPageCursor(), value.nextPageCursor()); + assertEquals(expected.values(), value.values()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.totalRows(), value.totalRows()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + assertEquals(expected.cacheHit(), value.cacheHit()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java new file mode 100644 index 000000000000..99a7c8096454 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java @@ -0,0 +1,131 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.ExplainQueryStep; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.QueryStage.QueryStep; + +import org.junit.Test; + +import java.util.List; + +public class QueryStageTest { + + private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); + private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); + private static final QueryStep QUERY_STEP1 = new QueryStep("KIND", SUBSTEPS1); + private static final QueryStep QUERY_STEP2 = new QueryStep("KIND", SUBSTEPS2); + private static final double COMPUTE_RATIO_AVG = 1.1; + private static final double COMPUTE_RATIO_MAX = 2.2; + private static final long ID = 42L; + private static final String NAME = "StageName"; + private static final double READ_RATIO_AVG = 3.3; + private static final double READ_RATIO_MAX = 4.4; + private static final long RECORDS_READ = 5L; + private static final long RECORDS_WRITTEN = 6L; + private static final List STEPS = ImmutableList.of(QUERY_STEP1, QUERY_STEP2); + private static final double WAIT_RATIO_AVG = 7.7; + private static final double WAIT_RATIO_MAX = 8.8; + private static final double WRITE_RATIO_AVG = 9.9; + private static final double WRITE_RATIO_MAX = 10.10; + private static final QueryStage QUERY_STAGE = QueryStage.builder() + .computeRatioAvg(COMPUTE_RATIO_AVG) + .computeRatioMax(COMPUTE_RATIO_MAX) + .id(ID) + .name(NAME) + .readRatioAvg(READ_RATIO_AVG) + .readRatioMax(READ_RATIO_MAX) + .recordsRead(RECORDS_READ) + .recordsWritten(RECORDS_WRITTEN) + .steps(STEPS) + .waitRatioAvg(WAIT_RATIO_AVG) + .waitRatioMax(WAIT_RATIO_MAX) + .writeRatioAvg(WRITE_RATIO_AVG) + .writeRatioMax(WRITE_RATIO_MAX) + .build(); + + @Test + public void testQueryStepConstructor() { + assertEquals("KIND", QUERY_STEP1.name()); + assertEquals("KIND", QUERY_STEP2.name()); + assertEquals(SUBSTEPS1, QUERY_STEP1.substeps()); + assertEquals(SUBSTEPS2, QUERY_STEP2.substeps()); + } + + @Test + public void testBuilder() { + assertEquals(COMPUTE_RATIO_AVG, QUERY_STAGE.computeRatioAvg(), 0); + assertEquals(COMPUTE_RATIO_MAX, QUERY_STAGE.computeRatioMax(), 0); + assertEquals(ID, QUERY_STAGE.id()); + assertEquals(NAME, QUERY_STAGE.name()); + assertEquals(READ_RATIO_AVG, QUERY_STAGE.readRatioAvg(), 0); + assertEquals(READ_RATIO_MAX, QUERY_STAGE.readRatioMax(), 0); + assertEquals(RECORDS_READ, QUERY_STAGE.recordsRead()); + assertEquals(RECORDS_WRITTEN, QUERY_STAGE.recordsWritten()); + assertEquals(STEPS, QUERY_STAGE.steps()); + assertEquals(WAIT_RATIO_AVG, QUERY_STAGE.waitRatioAvg(), 0); + assertEquals(WAIT_RATIO_MAX, QUERY_STAGE.waitRatioMax(), 0); + assertEquals(WRITE_RATIO_AVG, QUERY_STAGE.writeRatioAvg(), 0); + assertEquals(WRITE_RATIO_MAX, QUERY_STAGE.writeRatioMax(), 0); + } + + @Test + public void testToAndFromPb() { + compareQueryStep(QUERY_STEP1, QueryStep.fromPb(QUERY_STEP1.toPb())); + compareQueryStep(QUERY_STEP2, QueryStep.fromPb(QUERY_STEP2.toPb())); + compareQueryStage(QUERY_STAGE, QueryStage.fromPb(QUERY_STAGE.toPb())); + ExplainQueryStep stepPb = new ExplainQueryStep(); + stepPb.setKind("KIND"); + stepPb.setSubsteps(null); + compareQueryStep(new QueryStep("KIND", ImmutableList.of()), QueryStep.fromPb(stepPb)); + } + + @Test + public void testEquals() { + compareQueryStep(QUERY_STEP1, QUERY_STEP1); + compareQueryStep(QUERY_STEP2, QUERY_STEP2); + compareQueryStage(QUERY_STAGE, QUERY_STAGE); + } + + private void compareQueryStage(QueryStage expected, QueryStage value) { + assertEquals(expected, value); + assertEquals(expected.computeRatioAvg(), value.computeRatioAvg(), 0); + assertEquals(expected.computeRatioMax(), value.computeRatioMax(), 0); + assertEquals(expected.id(), value.id()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.readRatioAvg(), value.readRatioAvg(), 0); + assertEquals(expected.readRatioMax(), value.readRatioMax(), 0); + assertEquals(expected.recordsRead(), value.recordsRead()); + assertEquals(expected.recordsWritten(), value.recordsWritten()); + assertEquals(expected.steps(), value.steps()); + assertEquals(expected.waitRatioAvg(), value.waitRatioAvg(), 0); + assertEquals(expected.waitRatioMax(), value.waitRatioMax(), 0); + assertEquals(expected.writeRatioAvg(), value.writeRatioAvg(), 0); + assertEquals(expected.writeRatioMax(), value.writeRatioMax(), 0); + assertEquals(expected.hashCode(), value.hashCode()); + } + + private void compareQueryStep(QueryStep expected, QueryStep value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.substeps(), value.substeps()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java new file mode 100644 index 000000000000..267ae161b7aa --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java @@ -0,0 +1,92 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.bigquery.BigQuery.DatasetDeleteOption; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; + +import org.easymock.EasyMock; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.concurrent.ExecutionException; + +public class RemoteBigQueryHelperTest { + + private static final String DATASET_NAME = "dataset-name"; + private static final String PROJECT_ID = "project-id"; + private static final String JSON_KEY = "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testForceDelete() throws InterruptedException, ExecutionException { + BigQuery bigqueryMock = EasyMock.createMock(BigQuery.class); + EasyMock.expect(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents())) + .andReturn(true); + EasyMock.replay(bigqueryMock); + assertTrue(RemoteBigQueryHelper.forceDelete(bigqueryMock, DATASET_NAME)); + EasyMock.verify(bigqueryMock); + } + + @Test + public void testCreateFromStream() { + RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM); + BigQueryOptions options = helper.options(); + assertEquals(PROJECT_ID, options.projectId()); + assertEquals(60000, options.connectTimeout()); + assertEquals(60000, options.readTimeout()); + assertEquals(10, options.retryParams().retryMaxAttempts()); + assertEquals(6, options.retryParams().retryMinAttempts()); + assertEquals(30000, options.retryParams().maxRetryDelayMillis()); + assertEquals(120000, options.retryParams().totalRetryPeriodMillis()); + assertEquals(250, options.retryParams().initialRetryDelayMillis()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java new file mode 100644 index 000000000000..d24268d2e7cd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class SchemaTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final List FIELDS = ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2, + FIELD_SCHEMA3); + private static final Schema TABLE_SCHEMA = Schema.builder().fields(FIELDS).build(); + + @Test + public void testToBuilder() { + compareTableSchema(TABLE_SCHEMA, TABLE_SCHEMA.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELDS, TABLE_SCHEMA.fields()); + Schema schema = TABLE_SCHEMA.toBuilder() + .fields(FIELD_SCHEMA1, FIELD_SCHEMA2) + .addField(FIELD_SCHEMA3) + .build(); + compareTableSchema(TABLE_SCHEMA, schema); + } + + @Test + public void testOf() { + compareTableSchema(TABLE_SCHEMA, Schema.of(FIELDS)); + } + + @Test + public void testToAndFromPb() { + compareTableSchema(TABLE_SCHEMA, Schema.fromPb(TABLE_SCHEMA.toPb())); + } + + private void compareTableSchema(Schema expected, Schema value) { + assertEquals(expected, value); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java new file mode 100644 index 000000000000..254c8954bf30 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java @@ -0,0 +1,304 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.StandardTableDefinition.StreamingBuffer; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +public class SerializationTest { + + private static final Acl DOMAIN_ACCESS = + Acl.of(new Acl.Domain("domain"), Acl.Role.WRITER); + private static final Acl GROUP_ACCESS = + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER); + private static final Acl USER_ACCESS = Acl.of(new Acl.User("user"), Acl.Role.OWNER); + private static final Acl VIEW_ACCESS = + Acl.of(new Acl.View(TableId.of("project", "dataset", "table")), Acl.Role.WRITER); + private static final List ACCESS_RULES = ImmutableList.of(DOMAIN_ACCESS, GROUP_ACCESS, + VIEW_ACCESS, USER_ACCESS); + private static final Long CREATION_TIME = System.currentTimeMillis() - 10; + private static final Long DEFAULT_TABLE_EXPIRATION = 100L; + private static final String DESCRIPTION = "Description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.ISO_8859_1) + .fieldDelimiter(",") + .quote("\"") + .skipLeadingRows(42) + .build(); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(42) + .build(); + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction("inline"); + private static final UserDefinedFunction URI_FUNCTION = + new UserDefinedFunction.UriFunction("URI"); + private static final TableDefinition TABLE_DEFINITION = StandardTableDefinition.builder() + .schema(TABLE_SCHEMA) + .location(LOCATION) + .streamingBuffer(STREAMING_BUFFER) + .build(); + private static final TableInfo TABLE_INFO = TableInfo.builder(TABLE_ID, TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final TableDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY"); + private static final TableInfo VIEW_INFO = TableInfo.builder(TABLE_ID, VIEW_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final TableInfo EXTERNAL_TABLE_INFO = + TableInfo.builder(TABLE_ID, EXTERNAL_TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final JobStatistics JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final JobStatistics.ExtractStatistics EXTRACT_STATISTICS = + JobStatistics.ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final JobStatistics.LoadStatistics LOAD_STATISTICS = + JobStatistics.LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final JobStatistics.QueryStatistics QUERY_STATISTICS = + JobStatistics.QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final BigQueryError BIGQUERY_ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE, BIGQUERY_ERROR, + ImmutableList.of(BIGQUERY_ERROR)); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID, TABLE_ID); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION = + ExtractJobConfiguration.of(TABLE_ID, SOURCE_URIS); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .schema(TABLE_SCHEMA) + .build(); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION = + LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.of("query"); + private static final JobInfo JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final InsertAllRequest INSERT_ALL_REQUEST = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + private static final Map> ERRORS_MAP = + ImmutableMap.>of(0L, ImmutableList.of(BIGQUERY_ERROR)); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final FieldValue FIELD_VALUE = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "value"); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("query") + .useQueryCache(true) + .defaultDataset(DATASET_ID) + .dryRun(false) + .pageSize(42L) + .maxWaitTime(10L) + .build(); + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(TABLE_SCHEMA) + .totalRows(1L) + .totalBytesProcessed(42L) + .cursor("cursor") + .pageFetcher(null) + .results(ImmutableList.>of()) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobCompleted(true) + .result(QUERY_RESULT) + .build(); + private static final BigQuery BIGQUERY = + BigQueryOptions.builder().projectId("p1").build().service(); + private static final Dataset DATASET = + new Dataset(BIGQUERY, new DatasetInfo.BuilderImpl(DATASET_INFO)); + private static final Table TABLE = new Table(BIGQUERY, new TableInfo.BuilderImpl(TABLE_INFO)); + private static final Job JOB = new Job(BIGQUERY, new JobInfo.BuilderImpl(JOB_INFO)); + + @Test + public void testServiceOptions() throws Exception { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p1") + .authCredentials(AuthCredentials.createForAppEngine()) + .build(); + BigQueryOptions serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + + options = options.toBuilder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .authCredentials(null) + .build(); + serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + } + + @Test + public void testModelAndRequests() throws Exception { + Serializable[] objects = {DOMAIN_ACCESS, GROUP_ACCESS, USER_ACCESS, VIEW_ACCESS, DATASET_ID, + DATASET_INFO, TABLE_ID, CSV_OPTIONS, STREAMING_BUFFER, TABLE_DEFINITION, + EXTERNAL_TABLE_DEFINITION, VIEW_DEFINITION, TABLE_SCHEMA, TABLE_INFO, VIEW_INFO, + EXTERNAL_TABLE_INFO, INLINE_FUNCTION, URI_FUNCTION, JOB_STATISTICS, EXTRACT_STATISTICS, + LOAD_STATISTICS, QUERY_STATISTICS, BIGQUERY_ERROR, JOB_STATUS, JOB_ID, + COPY_JOB_CONFIGURATION, EXTRACT_JOB_CONFIGURATION, LOAD_CONFIGURATION, + LOAD_JOB_CONFIGURATION, QUERY_JOB_CONFIGURATION, JOB_INFO, INSERT_ALL_REQUEST, + INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, + BigQuery.DatasetOption.fields(), BigQuery.DatasetDeleteOption.deleteContents(), + BigQuery.DatasetListOption.all(), BigQuery.TableOption.fields(), + BigQuery.TableListOption.pageSize(42L), BigQuery.JobOption.fields(), + BigQuery.JobListOption.allUsers(), DATASET, TABLE, JOB}; + for (Serializable obj : objects) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @Test + public void testWriteChannelState() throws IOException, ClassNotFoundException { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .build(); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + TableDataWriteChannel writer = + new TableDataWriteChannel(options, LOAD_CONFIGURATION, "upload-id"); + RestorableState state = writer.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); + assertEquals(state, deserializedState); + assertEquals(state.hashCode(), deserializedState.hashCode()); + assertEquals(state.toString(), deserializedState.toString()); + } + + @SuppressWarnings("unchecked") + private T serializeAndDeserialize(T obj) + throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java new file mode 100644 index 000000000000..4c1be470ff57 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java @@ -0,0 +1,249 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.captureLong; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.expectLastCall; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.gcloud.RestorableState; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.spi.BigQueryRpc; +import com.google.gcloud.bigquery.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.CaptureType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +public class TableDataWriteChannelTest { + + private static final String UPLOAD_ID = "uploadid"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(FormatOptions.json()) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .build(); + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private TableDataWriteChannel writer; + + @Before + public void setUp() { + rpcFactoryMock = createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = createMock(BigQueryRpc.class); + expect(rpcFactoryMock.create(anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId("projectid") + .serviceRpcFactory(rpcFactoryMock) + .build(); + } + + @After + public void tearDown() throws Exception { + verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testCreate() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + } + + @Test + public void testWriteWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); + } + + @Test + public void testWriteWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(CUSTOM_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.chunkSize(CUSTOM_CHUNK_SIZE); + ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); + assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); + assertArrayEquals(buffer.array(), capturedBuffer.getValue()); + } + + @Test + public void testWritesAndFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(DEFAULT_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + for (int i = 0; i < buffers.length; i++) { + assertArrayEquals( + buffers[i].array(), + Arrays.copyOfRange( + capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1))); + } + } + + @Test + public void testCloseWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.close(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertTrue(!writer.isOpen()); + } + + @Test + public void testCloseWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), + eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.write(buffer); + writer.close(); + assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length); + assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE)); + assertTrue(!writer.isOpen()); + } + + @Test + public void testWriteClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + try { + writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); + fail("Expected TableDataWriteChannel write to throw IOException"); + } catch (IOException ex) { + // expected + } + } + + @Test + public void testSaveAndRestore() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); + Capture capturedPosition = Capture.newInstance(CaptureType.ALL); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), + captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false)); + expectLastCall().times(2); + replay(bigqueryRpcMock); + ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); + ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); + assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); + assertEquals(new Long(0L), capturedPosition.getValues().get(0)); + RestorableState writerState = writer.capture(); + WriteChannel restoredWriter = writerState.restore(); + assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); + assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); + assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); + } + + @Test + public void testSaveAndRestoreClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + RestorableState writerState = writer.capture(); + RestorableState expectedWriterState = + TableDataWriteChannel.StateImpl.builder(options, LOAD_CONFIGURATION, UPLOAD_ID) + .buffer(null) + .chunkSize(DEFAULT_CHUNK_SIZE) + .isOpen(false) + .position(0) + .build(); + WriteChannel restoredWriter = writerState.restore(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertEquals(expectedWriterState, restoredWriter.capture()); + } + + @Test + public void testStateEquals() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID).times(2); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + WriteChannel writer2 = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + RestorableState state = writer.capture(); + RestorableState state2 = writer2.capture(); + assertEquals(state, state2); + assertEquals(state.hashCode(), state2.hashCode()); + assertEquals(state.toString(), state2.toString()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDefinitionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDefinitionTest.java new file mode 100644 index 000000000000..d1e3635d00cb --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDefinitionTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.bigquery.StandardTableDefinition.StreamingBuffer; + +import org.junit.Test; + +public class TableDefinitionTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Long NUM_BYTES = 42L; + private static final Long NUM_ROWS = 43L; + private static final String LOCATION = "US"; + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final StandardTableDefinition TABLE_DEFINITION = + StandardTableDefinition.builder() + .location(LOCATION) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .streamingBuffer(STREAMING_BUFFER) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareTableDefinition(TABLE_DEFINITION, TABLE_DEFINITION.toBuilder().build()); + StandardTableDefinition tableDefinition = TABLE_DEFINITION.toBuilder().location("EU").build(); + assertEquals("EU", tableDefinition.location()); + tableDefinition = tableDefinition.toBuilder() + .location(LOCATION) + .build(); + compareTableDefinition(TABLE_DEFINITION, tableDefinition); + } + + @Test + public void testToBuilderIncomplete() { + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + assertEquals(tableDefinition, tableDefinition.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TableDefinition.Type.TABLE, TABLE_DEFINITION.type()); + assertEquals(TABLE_SCHEMA, TABLE_DEFINITION.schema()); + assertEquals(LOCATION, TABLE_DEFINITION.location()); + assertEquals(NUM_BYTES, TABLE_DEFINITION.numBytes()); + assertEquals(NUM_ROWS, TABLE_DEFINITION.numRows()); + assertEquals(STREAMING_BUFFER, TABLE_DEFINITION.streamingBuffer()); + } + + @Test + public void testToAndFromPb() { + assertTrue(TableDefinition.fromPb(TABLE_DEFINITION.toPb()) instanceof StandardTableDefinition); + compareTableDefinition(TABLE_DEFINITION, + TableDefinition.fromPb(TABLE_DEFINITION.toPb())); + } + + private void compareTableDefinition(StandardTableDefinition expected, + StandardTableDefinition value) { + assertEquals(expected, value); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.numBytes(), value.numBytes()); + assertEquals(expected.numRows(), value.numRows()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.streamingBuffer(), value.streamingBuffer()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java new file mode 100644 index 000000000000..bc013bfa5c31 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class TableIdTest { + + private static final TableId TABLE = TableId.of("dataset", "table"); + private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); + + @Test + public void testOf() { + assertEquals(null, TABLE.project()); + assertEquals("dataset", TABLE.dataset()); + assertEquals("table", TABLE.table()); + assertEquals("project", TABLE_COMPLETE.project()); + assertEquals("dataset", TABLE_COMPLETE.dataset()); + assertEquals("table", TABLE_COMPLETE.table()); + } + + @Test + public void testEquals() { + compareTableIds(TABLE, TableId.of("dataset", "table")); + compareTableIds(TABLE_COMPLETE, TableId.of("project", "dataset", "table")); + } + + @Test + public void testToPbAndFromPb() { + compareTableIds(TABLE, TableId.fromPb(TABLE.toPb())); + compareTableIds(TABLE_COMPLETE, TableId.fromPb(TABLE_COMPLETE.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(TABLE_COMPLETE, TABLE.setProjectId("project")); + } + + private void compareTableIds(TableId expected, TableId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java new file mode 100644 index 000000000000..18b8be10d71e --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java @@ -0,0 +1,207 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class TableInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "project:dataset:table"; + private static final String SELF_LINK = "selfLink"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FRIENDLY_NAME = "friendlyName"; + private static final String DESCRIPTION = "description"; + private static final Long CREATION_TIME = 10L; + private static final Long EXPIRATION_TIME = 100L; + private static final Long LAST_MODIFIED_TIME = 20L; + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Long NUM_BYTES = 42L; + private static final Long NUM_ROWS = 43L; + private static final String LOCATION = "US"; + private static final StandardTableDefinition.StreamingBuffer STREAMING_BUFFER = + new StandardTableDefinition.StreamingBuffer(1L, 2L, 3L); + private static final StandardTableDefinition TABLE_DEFINITION = StandardTableDefinition.builder() + .location(LOCATION) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .streamingBuffer(STREAMING_BUFFER) + .schema(TABLE_SCHEMA) + .build(); + + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = + ExternalTableDefinition.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + + private static final String VIEW_QUERY = "VIEW QUERY"; + private static final List USER_DEFINED_FUNCTIONS = + ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final ViewDefinition VIEW_TYPE = + ViewDefinition.builder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).build(); + + private static final TableInfo TABLE_INFO = TableInfo.builder(TABLE_ID, TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .selfLink(SELF_LINK) + .build(); + private static final TableInfo VIEW_INFO = TableInfo.builder(TABLE_ID, VIEW_TYPE) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .selfLink(SELF_LINK) + .build(); + private static final TableInfo EXTERNAL_TABLE_INFO = + TableInfo.builder(TABLE_ID, EXTERNAL_TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .selfLink(SELF_LINK) + .build(); + + @Test + public void testToBuilder() { + compareTableInfo(TABLE_INFO, TABLE_INFO.toBuilder().build()); + compareTableInfo(VIEW_INFO, VIEW_INFO.toBuilder().build()); + compareTableInfo(EXTERNAL_TABLE_INFO, EXTERNAL_TABLE_INFO.toBuilder().build()); + TableInfo tableInfo = TABLE_INFO.toBuilder() + .description("newDescription") + .build(); + assertEquals("newDescription", tableInfo.description()); + tableInfo = tableInfo.toBuilder() + .description("description") + .build(); + compareTableInfo(TABLE_INFO, tableInfo); + } + + @Test + public void testToBuilderIncomplete() { + TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = TableInfo.of(TABLE_ID, VIEW_TYPE); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = TableInfo.of(TABLE_ID, EXTERNAL_TABLE_DEFINITION); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, TABLE_INFO.tableId()); + assertEquals(CREATION_TIME, TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, TABLE_INFO.description()); + assertEquals(ETAG, TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, TABLE_INFO.friendlyName()); + assertEquals(ID, TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, TABLE_INFO.lastModifiedTime()); + assertEquals(TABLE_DEFINITION, TABLE_INFO.definition()); + assertEquals(SELF_LINK, TABLE_INFO.selfLink()); + assertEquals(TABLE_ID, VIEW_INFO.tableId()); + assertEquals(VIEW_TYPE, VIEW_INFO.definition()); + assertEquals(CREATION_TIME, VIEW_INFO.creationTime()); + assertEquals(DESCRIPTION, VIEW_INFO.description()); + assertEquals(ETAG, VIEW_INFO.etag()); + assertEquals(EXPIRATION_TIME, VIEW_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, VIEW_INFO.friendlyName()); + assertEquals(ID, VIEW_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, VIEW_INFO.lastModifiedTime()); + assertEquals(VIEW_TYPE, VIEW_INFO.definition()); + assertEquals(SELF_LINK, VIEW_INFO.selfLink()); + assertEquals(TABLE_ID, EXTERNAL_TABLE_INFO.tableId()); + assertEquals(CREATION_TIME, EXTERNAL_TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, EXTERNAL_TABLE_INFO.description()); + assertEquals(ETAG, EXTERNAL_TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, EXTERNAL_TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, EXTERNAL_TABLE_INFO.friendlyName()); + assertEquals(ID, EXTERNAL_TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, EXTERNAL_TABLE_INFO.lastModifiedTime()); + assertEquals(EXTERNAL_TABLE_DEFINITION, EXTERNAL_TABLE_INFO.definition()); + assertEquals(SELF_LINK, EXTERNAL_TABLE_INFO.selfLink()); + } + + @Test + public void testToAndFromPb() { + compareTableInfo(TABLE_INFO, TableInfo.fromPb(TABLE_INFO.toPb())); + compareTableInfo(VIEW_INFO, TableInfo.fromPb(VIEW_INFO.toPb())); + compareTableInfo(EXTERNAL_TABLE_INFO, TableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals("project", TABLE_INFO.setProjectId("project").tableId().project()); + assertEquals("project", EXTERNAL_TABLE_INFO.setProjectId("project").tableId().project()); + assertEquals("project", VIEW_INFO.setProjectId("project").tableId().project()); + } + + private void compareTableInfo(TableInfo expected, TableInfo value) { + assertEquals(expected, value); + assertEquals(expected.tableId(), value.tableId()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.expirationTime(), value.expirationTime()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.lastModifiedTime(), value.lastModifiedTime()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java new file mode 100644 index 000000000000..c7828ebeadf4 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java @@ -0,0 +1,407 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterators; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; + +import org.junit.After; +import org.junit.Test; + +import java.util.Iterator; +import java.util.List; + +public class TableTest { + + private static final String ETAG = "etag"; + private static final String ID = "project:dataset:table1"; + private static final String SELF_LINK = "selfLink"; + private static final String FRIENDLY_NAME = "friendlyName"; + private static final String DESCRIPTION = "description"; + private static final Long CREATION_TIME = 10L; + private static final Long EXPIRATION_TIME = 100L; + private static final Long LAST_MODIFIED_TIME = 20L; + private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); + private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID2, TABLE_ID1); + private static final JobInfo COPY_JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final JobInfo LOAD_JOB_INFO = + JobInfo.of(LoadJobConfiguration.of(TABLE_ID1, ImmutableList.of("URI"), FormatOptions.json())); + private static final JobInfo EXTRACT_JOB_INFO = + JobInfo.of(ExtractJobConfiguration.of(TABLE_ID1, ImmutableList.of("URI"), "CSV")); + private static final Field FIELD = Field.of("FieldName", Field.Type.integer()); + private static final TableDefinition TABLE_DEFINITION = + StandardTableDefinition.of(Schema.of(FIELD)); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID1, TABLE_DEFINITION); + private static final List ROWS_TO_INSERT = ImmutableList.of( + RowToInsert.of("id1", ImmutableMap.of("key", "val1")), + RowToInsert.of("id2", ImmutableMap.of("key", "val2"))); + private static final InsertAllRequest INSERT_ALL_REQUEST = + InsertAllRequest.of(TABLE_ID1, ROWS_TO_INSERT); + private static final InsertAllRequest INSERT_ALL_REQUEST_COMPLETE = + InsertAllRequest.builder(TABLE_ID1, ROWS_TO_INSERT) + .skipInvalidRows(true) + .ignoreUnknownValues(true) + .build(); + private static final InsertAllResponse EMPTY_INSERT_ALL_RESPONSE = + new InsertAllResponse(ImmutableMap.>of()); + private static final FieldValue FIELD_VALUE1 = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "val1"); + private static final FieldValue FIELD_VALUE2 = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "val1"); + private static final Iterable> ROWS = ImmutableList.of( + (List) ImmutableList.of(FIELD_VALUE1), ImmutableList.of(FIELD_VALUE2)); + + private BigQuery serviceMockReturnsOptions = createStrictMock(BigQuery.class); + private BigQueryOptions mockOptions = createMock(BigQueryOptions.class); + private BigQuery bigquery; + private Table expectedTable; + private Table table; + + private void initializeExpectedTable(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); + bigquery = createStrictMock(BigQuery.class); + expectedTable = new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(TABLE_INFO)); + } + + private void initializeTable() { + table = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO)); + } + + @After + public void tearDown() throws Exception { + verify(bigquery, serviceMockReturnsOptions); + } + + @Test + public void testBuilder() { + initializeExpectedTable(2); + replay(bigquery); + Table builtTable = new Table.Builder(serviceMockReturnsOptions, TABLE_ID1, TABLE_DEFINITION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .selfLink(SELF_LINK) + .build(); + assertEquals(TABLE_ID1, builtTable.tableId()); + assertEquals(CREATION_TIME, builtTable.creationTime()); + assertEquals(DESCRIPTION, builtTable.description()); + assertEquals(ETAG, builtTable.etag()); + assertEquals(EXPIRATION_TIME, builtTable.expirationTime()); + assertEquals(FRIENDLY_NAME, builtTable.friendlyName()); + assertEquals(ID, builtTable.id()); + assertEquals(LAST_MODIFIED_TIME, builtTable.lastModifiedTime()); + assertEquals(TABLE_DEFINITION, builtTable.definition()); + assertEquals(SELF_LINK, builtTable.selfLink()); + assertSame(serviceMockReturnsOptions, builtTable.bigquery()); + } + + @Test + public void testToBuilder() { + initializeExpectedTable(4); + replay(bigquery); + compareTable(expectedTable, expectedTable.toBuilder().build()); + } + + @Test + public void testExists_True() throws Exception { + initializeExpectedTable(1); + BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(expectedTable); + replay(bigquery); + initializeTable(); + assertTrue(table.exists()); + } + + @Test + public void testExists_False() throws Exception { + initializeExpectedTable(1); + BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(null); + replay(bigquery); + initializeTable(); + assertFalse(table.exists()); + } + + @Test + public void testReload() throws Exception { + initializeExpectedTable(4); + TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(expectedTable); + replay(bigquery); + initializeTable(); + Table updatedTable = table.reload(); + compareTable(expectedTable, updatedTable); + } + + @Test + public void testReloadNull() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); + replay(bigquery); + initializeTable(); + assertNull(table.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + initializeExpectedTable(4); + TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + Table expectedTable = + new Table(serviceMockReturnsOptions, new TableInfo.BuilderImpl(updatedInfo)); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) + .andReturn(expectedTable); + replay(bigquery); + initializeTable(); + Table updatedTable = table.reload(BigQuery.TableOption.fields()); + compareTable(expectedTable, updatedTable); + } + + @Test + public void testUpdate() { + initializeExpectedTable(4); + Table expectedUpdatedTable = expectedTable.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedTable))).andReturn(expectedUpdatedTable); + replay(bigquery); + initializeTable(); + Table actualUpdatedTable = table.update(); + compareTable(expectedUpdatedTable, actualUpdatedTable); + } + + @Test + public void testUpdateWithOptions() { + initializeExpectedTable(4); + Table expectedUpdatedTable = expectedTable.toBuilder().description("Description").build(); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.update(eq(expectedTable), eq(BigQuery.TableOption.fields()))) + .andReturn(expectedUpdatedTable); + replay(bigquery); + initializeTable(); + Table actualUpdatedTable = table.update(BigQuery.TableOption.fields()); + compareTable(expectedUpdatedTable, actualUpdatedTable); + } + + @Test + public void testDeleteTrue() { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(true); + replay(bigquery); + initializeTable(); + assertTrue(table.delete()); + } + + @Test + public void testDeleteFalse() { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(false); + replay(bigquery); + initializeTable(); + assertFalse(table.delete()); + } + + @Test + public void testInsert() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.insertAll(INSERT_ALL_REQUEST)).andReturn(EMPTY_INSERT_ALL_RESPONSE); + replay(bigquery); + initializeTable(); + InsertAllResponse response = table.insert(ROWS_TO_INSERT); + assertSame(EMPTY_INSERT_ALL_RESPONSE, response); + } + + @Test + public void testInsertComplete() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + expect(bigquery.insertAll(INSERT_ALL_REQUEST_COMPLETE)).andReturn(EMPTY_INSERT_ALL_RESPONSE); + replay(bigquery); + initializeTable(); + InsertAllResponse response = table.insert(ROWS_TO_INSERT, true, true); + assertSame(EMPTY_INSERT_ALL_RESPONSE, response); + } + + @Test + public void testList() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); + expect(bigquery.listTableData(TABLE_ID1)).andReturn(tableDataPage); + replay(bigquery); + initializeTable(); + Page> dataPage = table.list(); + Iterator> tableDataIterator = tableDataPage.values().iterator(); + Iterator> dataIterator = dataPage.values().iterator(); + assertTrue(Iterators.elementsEqual(tableDataIterator, dataIterator)); + } + + @Test + public void testListWithOptions() throws Exception { + initializeExpectedTable(1); + expect(bigquery.options()).andReturn(mockOptions); + PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); + expect(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.pageSize(10L))) + .andReturn(tableDataPage); + replay(bigquery); + initializeTable(); + Page> dataPage = table.list(BigQuery.TableDataListOption.pageSize(10L)); + Iterator> tableDataIterator = tableDataPage.values().iterator(); + Iterator> dataIterator = dataPage.values().iterator(); + assertTrue(Iterators.elementsEqual(tableDataIterator, dataIterator)); + } + + @Test + public void testCopyFromString() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(COPY_JOB_INFO)); + expect(bigquery.create(COPY_JOB_INFO)) + .andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.copy(TABLE_ID2.dataset(), TABLE_ID2.table()); + assertSame(expectedJob, job); + } + + @Test + public void testCopyFromId() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(COPY_JOB_INFO)); + expect(bigquery.create(COPY_JOB_INFO)).andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.copy(TABLE_ID2.dataset(), TABLE_ID2.table()); + assertSame(expectedJob, job); + } + + @Test + public void testLoadDataUri() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(LOAD_JOB_INFO)); + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.load(FormatOptions.json(), "URI"); + assertSame(expectedJob, job); + } + + @Test + public void testLoadDataUris() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(LOAD_JOB_INFO)); + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.load(FormatOptions.json(), ImmutableList.of("URI")); + assertSame(expectedJob, job); + } + + @Test + public void testExtractDataUri() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(EXTRACT_JOB_INFO)); + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.extract("CSV", "URI"); + assertSame(expectedJob, job); + } + + @Test + public void testExtractDataUris() throws Exception { + initializeExpectedTable(2); + expect(bigquery.options()).andReturn(mockOptions); + Job expectedJob = new Job(serviceMockReturnsOptions, new JobInfo.BuilderImpl(EXTRACT_JOB_INFO)); + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(expectedJob); + replay(bigquery); + initializeTable(); + Job job = table.extract("CSV", ImmutableList.of("URI")); + assertSame(expectedJob, job); + } + + @Test + public void testBigquery() { + initializeExpectedTable(1); + replay(bigquery); + assertSame(serviceMockReturnsOptions, expectedTable.bigquery()); + } + + @Test + public void testToAndFromPb() { + initializeExpectedTable(4); + replay(bigquery); + compareTable(expectedTable, Table.fromPb(serviceMockReturnsOptions, expectedTable.toPb())); + } + + private void compareTable(Table expected, Table value) { + assertEquals(expected, value); + compareTableInfo(expected, value); + assertEquals(expected.bigquery().options(), value.bigquery().options()); + } + + private void compareTableInfo(TableInfo expected, TableInfo value) { + assertEquals(expected, value); + assertEquals(expected.tableId(), value.tableId()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.expirationTime(), value.expirationTime()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.lastModifiedTime(), value.lastModifiedTime()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.definition(), value.definition()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java new file mode 100644 index 000000000000..2741aaed89a5 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class UserDefinedFunctionTest { + + private static final String INLINE = "inline"; + private static final String URI = "uri"; + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction(INLINE); + private static final UserDefinedFunction URI_FUNCTION = new UserDefinedFunction.UriFunction(URI); + + @Test + public void testConstructor() { + assertEquals(INLINE, INLINE_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.INLINE, INLINE_FUNCTION.type()); + assertEquals(URI, URI_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.FROM_URI, URI_FUNCTION.type()); + } + + @Test + public void testFactoryMethod() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.inline(INLINE)); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromUri(URI)); + } + + @Test + public void testToAndFromPb() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.fromPb(INLINE_FUNCTION.toPb())); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromPb(URI_FUNCTION.toPb())); + } + + private void compareUserDefinedFunction(UserDefinedFunction expected, UserDefinedFunction value) { + assertEquals(expected, value); + assertEquals(expected.type(), value.type()); + assertEquals(expected.content(), value.content()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ViewDefinitionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ViewDefinitionTest.java new file mode 100644 index 000000000000..ebab7a6e87ca --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ViewDefinitionTest.java @@ -0,0 +1,75 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ViewDefinitionTest { + + private static final String VIEW_QUERY = "VIEW QUERY"; + private static final List USER_DEFINED_FUNCTIONS = + ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final ViewDefinition VIEW_DEFINITION = + ViewDefinition.builder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).build(); + + @Test + public void testToBuilder() { + compareViewDefinition(VIEW_DEFINITION, VIEW_DEFINITION.toBuilder().build()); + ViewDefinition viewDefinition = VIEW_DEFINITION.toBuilder() + .query("NEW QUERY") + .build(); + assertEquals("NEW QUERY", viewDefinition.query()); + viewDefinition = viewDefinition.toBuilder() + .query(VIEW_QUERY) + .build(); + compareViewDefinition(VIEW_DEFINITION, viewDefinition); + } + + @Test + public void testToBuilderIncomplete() { + TableDefinition viewDefinition = ViewDefinition.of(VIEW_QUERY); + assertEquals(viewDefinition, viewDefinition.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(VIEW_QUERY, VIEW_DEFINITION.query()); + assertEquals(TableDefinition.Type.VIEW, VIEW_DEFINITION.type()); + assertEquals(USER_DEFINED_FUNCTIONS, VIEW_DEFINITION.userDefinedFunctions()); + } + + @Test + public void testToAndFromPb() { + assertTrue(TableDefinition.fromPb(VIEW_DEFINITION.toPb()) instanceof ViewDefinition); + compareViewDefinition(VIEW_DEFINITION, + TableDefinition.fromPb(VIEW_DEFINITION.toPb())); + } + + private void compareViewDefinition(ViewDefinition expected, ViewDefinition value) { + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java new file mode 100644 index 000000000000..dfde4795dacd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class WriteChannelConfigurationTest { + + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareLoadConfiguration(LOAD_CONFIGURATION, LOAD_CONFIGURATION.toBuilder().build()); + WriteChannelConfiguration configuration = LOAD_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", configuration.destinationTable().table()); + configuration = configuration.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadConfiguration(LOAD_CONFIGURATION, configuration); + } + + @Test + public void testOf() { + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + assertEquals(TABLE_ID, configuration.destinationTable()); + configuration = WriteChannelConfiguration.of(TABLE_ID, CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + } + + @Test + public void testToBuilderIncomplete() { + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, LOAD_CONFIGURATION.destinationTable()); + assertEquals(CREATE_DISPOSITION, LOAD_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_CONFIGURATION.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_CONFIGURATION.csvOptions()); + assertEquals(FORMAT, LOAD_CONFIGURATION.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_CONFIGURATION.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_CONFIGURATION.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_CONFIGURATION.schema()); + } + + @Test + public void testToPbAndFromPb() { + assertNull(LOAD_CONFIGURATION.toPb().getLoad().getSourceUris()); + compareLoadConfiguration(LOAD_CONFIGURATION, + WriteChannelConfiguration.fromPb(LOAD_CONFIGURATION.toPb())); + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, WriteChannelConfiguration.fromPb(configuration.toPb())); + } + + private void compareLoadConfiguration(WriteChannelConfiguration expected, + WriteChannelConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java new file mode 100644 index 000000000000..50780b4fc9a9 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/it/ITBigQueryTest.java @@ -0,0 +1,955 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.it; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Page; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQuery.DatasetField; +import com.google.gcloud.bigquery.BigQuery.DatasetOption; +import com.google.gcloud.bigquery.BigQuery.JobField; +import com.google.gcloud.bigquery.BigQuery.JobListOption; +import com.google.gcloud.bigquery.BigQuery.JobOption; +import com.google.gcloud.bigquery.BigQuery.TableField; +import com.google.gcloud.bigquery.BigQuery.TableOption; +import com.google.gcloud.bigquery.BigQueryError; +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.CopyJobConfiguration; +import com.google.gcloud.bigquery.Dataset; +import com.google.gcloud.bigquery.DatasetId; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.ExternalTableDefinition; +import com.google.gcloud.bigquery.ExtractJobConfiguration; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.FormatOptions; +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; +import com.google.gcloud.bigquery.Job; +import com.google.gcloud.bigquery.JobInfo; +import com.google.gcloud.bigquery.JobStatistics; +import com.google.gcloud.bigquery.LoadJobConfiguration; +import com.google.gcloud.bigquery.QueryJobConfiguration; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; +import com.google.gcloud.bigquery.TableDefinition; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +import com.google.gcloud.bigquery.ViewDefinition; +import com.google.gcloud.bigquery.WriteChannelConfiguration; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.testing.RemoteGcsHelper; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +public class ITBigQueryTest { + + private static final Logger LOG = Logger.getLogger(ITBigQueryTest.class.getName()); + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final String DESCRIPTION = "Test dataset"; + private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final Field TIMESTAMP_FIELD_SCHEMA = + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .description("TimestampDescription") + .build(); + private static final Field STRING_FIELD_SCHEMA = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("StringDescription") + .build(); + private static final Field INTEGER_FIELD_SCHEMA = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("IntegerDescription") + .build(); + private static final Field BOOLEAN_FIELD_SCHEMA = + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .description("BooleanDescription") + .build(); + private static final Field RECORD_FIELD_SCHEMA = + Field.builder("RecordField", Field.Type.record(TIMESTAMP_FIELD_SCHEMA, + STRING_FIELD_SCHEMA, INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA)) + .mode(Field.Mode.REQUIRED) + .description("RecordDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA, + INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA, RECORD_FIELD_SCHEMA); + private static final Schema SIMPLE_SCHEMA = Schema.of(STRING_FIELD_SCHEMA); + private static final Schema QUERY_RESULT_SCHEMA = Schema.builder() + .addField(Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + private static final String LOAD_FILE = "load.csv"; + private static final String JSON_LOAD_FILE = "load.json"; + private static final String EXTRACT_FILE = "extract.csv"; + private static final String BUCKET = RemoteGcsHelper.generateBucketName(); + private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table"); + private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; + private static final String JSON_CONTENT = "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}\n" + + "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}"; + + private static BigQuery bigquery; + private static Storage storage; + + @Rule + public Timeout globalTimeout = Timeout.seconds(300); + + @BeforeClass + public static void beforeClass() throws InterruptedException { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); + bigquery = bigqueryHelper.options().service(); + storage = gcsHelper.options().service(); + storage.create(BucketInfo.of(BUCKET)); + storage.create(BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(), + CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); + storage.create(BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(), + JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); + DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); + bigquery.create(info); + LoadJobConfiguration configuration = LoadJobConfiguration.builder( + TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .build(); + Job job = bigquery.create(JobInfo.of(configuration)); + while (!job.isDone()) { + Thread.sleep(1000); + } + assertNull(job.status().error()); + } + + @AfterClass + public static void afterClass() throws ExecutionException, InterruptedException { + if (bigquery != null) { + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + if (storage != null) { + boolean wasDeleted = RemoteGcsHelper.forceDelete(storage, BUCKET, 10, TimeUnit.SECONDS); + if (!wasDeleted && LOG.isLoggable(Level.WARNING)) { + LOG.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); + } + } + } + + @Test + public void testGetDataset() { + Dataset dataset = bigquery.getDataset(DATASET); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertEquals(DESCRIPTION, dataset.description()); + assertNotNull(dataset.acl()); + assertNotNull(dataset.etag()); + assertNotNull(dataset.id()); + assertNotNull(dataset.lastModified()); + assertNotNull(dataset.selfLink()); + } + + @Test + public void testGetDatasetWithSelectedFields() { + Dataset dataset = bigquery.getDataset(DATASET, + DatasetOption.fields(DatasetField.CREATION_TIME)); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertNotNull(dataset.creationTime()); + assertNull(dataset.description()); + assertNull(dataset.defaultTableLifetime()); + assertNull(dataset.acl()); + assertNull(dataset.etag()); + assertNull(dataset.friendlyName()); + assertNull(dataset.id()); + assertNull(dataset.lastModified()); + assertNull(dataset.location()); + assertNull(dataset.selfLink()); + } + + @Test + public void testUpdateDataset() { + Dataset dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + Dataset updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build()); + assertEquals("Updated Description", updatedDataset.description()); + assertTrue(dataset.delete()); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + Dataset dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + Dataset updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build(), + DatasetOption.fields(DatasetField.DESCRIPTION)); + assertEquals("Updated Description", updatedDataset.description()); + assertNull(updatedDataset.creationTime()); + assertNull(updatedDataset.defaultTableLifetime()); + assertNull(updatedDataset.acl()); + assertNull(updatedDataset.etag()); + assertNull(updatedDataset.friendlyName()); + assertNull(updatedDataset.id()); + assertNull(updatedDataset.lastModified()); + assertNull(updatedDataset.location()); + assertNull(updatedDataset.selfLink()); + assertTrue(dataset.delete()); + } + + @Test + public void testGetNonExistingTable() { + assertNull(bigquery.getTable(DATASET, "test_get_non_existing_table")); + } + + @Test + public void testCreateAndGetTable() { + String tableName = "test_create_and_get_table"; + TableId tableId = TableId.of(DATASET, tableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof StandardTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.definition().type()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); + assertNotNull(remoteTable.creationTime()); + assertNotNull(remoteTable.lastModifiedTime()); + assertNotNull(remoteTable.definition().numBytes()); + assertNotNull(remoteTable.definition().numRows()); + assertTrue(remoteTable.delete()); + } + + @Test + public void testCreateAndGetTableWithSelectedField() { + String tableName = "test_create_and_get_selected_fields_table"; + TableId tableId = TableId.of(DATASET, tableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName, + TableOption.fields(TableField.CREATION_TIME)); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof StandardTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.definition().type()); + assertNotNull(remoteTable.creationTime()); + assertNull(remoteTable.definition().schema()); + assertNull(remoteTable.lastModifiedTime()); + assertNull(remoteTable.definition().numBytes()); + assertNull(remoteTable.definition().numRows()); + assertTrue(remoteTable.delete()); + } + + @Test + public void testCreateExternalTable() throws InterruptedException { + String tableName = "test_create_external_table"; + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.definition() instanceof ExternalTableDefinition); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); + QueryRequest request = QueryRequest.builder( + "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." + + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .pageSize(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + long integerValue = 0; + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(integerValue, integerCell.longValue()); + assertEquals(false, booleanCell.booleanValue()); + integerValue = ~integerValue & 0x1; + rowCount++; + } + assertEquals(4, rowCount); + assertTrue(remoteTable.delete()); + } + + @Test + public void testCreateViewTable() throws InterruptedException { + String tableName = "test_create_view_table"; + TableId tableId = TableId.of(DATASET, tableName); + ViewDefinition viewDefinition = + ViewDefinition.of("SELECT TimestampField, StringField, BooleanField FROM " + DATASET + "." + + TABLE_ID.table()); + TableInfo tableInfo = TableInfo.of(tableId, viewDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(tableName, createdTable.tableId().table()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertEquals(createdTable.tableId(), remoteTable.tableId()); + assertTrue(remoteTable.definition() instanceof ViewDefinition); + Schema expectedSchema = Schema.builder() + .addField( + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + assertEquals(expectedSchema, remoteTable.definition().schema()); + QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .pageSize(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(remoteTable.delete()); + } + + @Test + public void testListTables() { + String tableName = "test_list_tables"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Page
tables = bigquery.listTables(DATASET); + boolean found = false; + Iterator
tableIterator = tables.values().iterator(); + while (tableIterator.hasNext() && !found) { + if (tableIterator.next().tableId().equals(createdTable.tableId())) { + found = true; + } + } + assertTrue(found); + assertTrue(createdTable.delete()); + } + + @Test + public void testUpdateTable() { + String tableName = "test_update_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Table updatedTable = + bigquery.update(tableInfo.toBuilder().description("newDescription").build()); + assertEquals(DATASET, updatedTable.tableId().dataset()); + assertEquals(tableName, updatedTable.tableId().table()); + assertEquals(TABLE_SCHEMA, updatedTable.definition().schema()); + assertEquals("newDescription", updatedTable.description()); + assertTrue(updatedTable.delete()); + } + + @Test + public void testUpdateTableWithSelectedFields() { + String tableName = "test_update_with_selected_fields_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Table updatedTable = bigquery.update(tableInfo.toBuilder().description("newDescr").build(), + TableOption.fields(TableField.DESCRIPTION)); + assertTrue(updatedTable.definition() instanceof StandardTableDefinition); + assertEquals(DATASET, updatedTable.tableId().dataset()); + assertEquals(tableName, updatedTable.tableId().table()); + assertEquals("newDescr", updatedTable.description()); + assertNull(updatedTable.definition().schema()); + assertNull(updatedTable.lastModifiedTime()); + assertNull(updatedTable.definition().numBytes()); + assertNull(updatedTable.definition().numRows()); + assertTrue(createdTable.delete()); + } + + @Test + public void testUpdateNonExistingTable() { + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, "test_update_non_existing_table"), + StandardTableDefinition.of(SIMPLE_SCHEMA)); + try { + bigquery.update(tableInfo); + fail("BigQueryException was expected"); + } catch (BigQueryException e) { + BigQueryError error = e.error(); + assertNotNull(error); + assertEquals("notFound", error.reason()); + assertNotNull(error.message()); + } + } + + @Test + public void testDeleteNonExistingTable() { + assertFalse(bigquery.delete(DATASET, "test_delete_non_existing_table")); + } + + @Test + public void testInsertAll() { + String tableName = "test_insert_all_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.insertErrors().size()); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testInsertAllWithSuffix() throws InterruptedException { + String tableName = "test_insert_all_with_suffix_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .templateSuffix("_suffix") + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.insertErrors().size()); + String newTableName = tableName + "_suffix"; + Table suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); + // wait until the new table is created. If the table is never created the test will time-out + while (suffixTable == null) { + Thread.sleep(1000L); + suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); + } + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + assertTrue(suffixTable.delete()); + } + + @Test + public void testInsertAllWithErrors() { + String tableName = "test_insert_all_with_errors_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "invalidDate", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false)) + .skipInvalidRows(true) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertTrue(response.hasErrors()); + assertEquals(2, response.insertErrors().size()); + assertNotNull(response.errorsFor(1L)); + assertNotNull(response.errorsFor(2L)); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testListAllTableData() { + Page> rows = bigquery.listTableData(TABLE_ID); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + } + + @Test + public void testQuery() throws InterruptedException { + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + QueryRequest request = QueryRequest.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .pageSize(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + Job queryJob = bigquery.getJob(response.jobId()); + JobStatistics.QueryStatistics statistics = queryJob.statistics(); + assertNotNull(statistics.queryPlan()); + } + + @Test + public void testListJobs() { + Page jobs = bigquery.listJobs(); + for (Job job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.statistics()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNotNull(job.id()); + } + } + + @Test + public void testListJobsWithSelectedFields() { + Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); + for (Job job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNull(job.statistics()); + assertNull(job.id()); + } + } + + @Test + public void testCreateAndGetJob() { + String sourceTableName = "test_create_and_get_job_source_table"; + String destinationTableName = "test_create_and_get_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration copyJobConfiguration = + CopyJobConfiguration.of(destinationTable, sourceTable); + Job createdJob = bigquery.create(JobInfo.of(copyJobConfiguration)); + Job remoteJob = bigquery.getJob(createdJob.jobId()); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + CopyJobConfiguration createdConfiguration = createdJob.configuration(); + CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); + assertEquals(createdConfiguration.sourceTables(), remoteConfiguration.sourceTables()); + assertEquals(createdConfiguration.destinationTable(), remoteConfiguration.destinationTable()); + assertEquals(createdConfiguration.createDisposition(), remoteConfiguration.createDisposition()); + assertEquals(createdConfiguration.writeDisposition(), remoteConfiguration.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNotNull(remoteJob.statistics()); + assertNotNull(remoteJob.status()); + assertEquals(createdJob.selfLink(), remoteJob.selfLink()); + assertEquals(createdJob.userEmail(), remoteJob.userEmail()); + assertTrue(createdTable.delete()); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCreateAndGetJobWithSelectedFields() { + String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; + String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); + Job createdJob = bigquery.create(JobInfo.of(configuration), JobOption.fields(JobField.ETAG)); + CopyJobConfiguration createdConfiguration = createdJob.configuration(); + assertNotNull(createdJob.jobId()); + assertNotNull(createdConfiguration.sourceTables()); + assertNotNull(createdConfiguration.destinationTable()); + assertNotNull(createdJob.etag()); + assertNull(createdJob.statistics()); + assertNull(createdJob.status()); + assertNull(createdJob.selfLink()); + assertNull(createdJob.userEmail()); + Job remoteJob = bigquery.getJob(createdJob.jobId(), JobOption.fields(JobField.ETAG)); + CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + assertEquals(createdConfiguration.sourceTables(), remoteConfiguration.sourceTables()); + assertEquals(createdConfiguration.destinationTable(), remoteConfiguration.destinationTable()); + assertEquals(createdConfiguration.createDisposition(), remoteConfiguration.createDisposition()); + assertEquals(createdConfiguration.writeDisposition(), remoteConfiguration.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNull(remoteJob.statistics()); + assertNull(remoteJob.status()); + assertNull(remoteJob.selfLink()); + assertNull(remoteJob.userEmail()); + assertTrue(createdTable.delete()); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCopyJob() throws InterruptedException { + String sourceTableName = "test_copy_job_source_table"; + String destinationTableName = "test_copy_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.tableId().dataset()); + assertEquals(sourceTableName, createdTable.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteJob.isDone()) { + Thread.sleep(1000); + } + assertNull(remoteJob.status().error()); + Table remoteTable = bigquery.getTable(DATASET, destinationTableName); + assertNotNull(remoteTable); + assertEquals(destinationTable.dataset(), remoteTable.tableId().dataset()); + assertEquals(destinationTableName, remoteTable.tableId().table()); + assertEquals(TABLE_SCHEMA, remoteTable.definition().schema()); + assertTrue(createdTable.delete()); + assertTrue(remoteTable.delete()); + } + + @Test + public void testQueryJob() throws InterruptedException { + String tableName = "test_query_job_table"; + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + TableId destinationTable = TableId.of(DATASET, tableName); + QueryJobConfiguration configuration = QueryJobConfiguration.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteJob.isDone()) { + Thread.sleep(1000); + } + assertNull(remoteJob.status().error()); + + QueryResponse response = bigquery.getQueryResults(remoteJob.jobId()); + while (!response.jobCompleted()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertFalse(response.hasErrors()); + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + Job queryJob = bigquery.getJob(remoteJob.jobId()); + JobStatistics.QueryStatistics statistics = queryJob.statistics(); + assertNotNull(statistics.queryPlan()); + } + + @Test + public void testExtractJob() throws InterruptedException { + String tableName = "test_export_job_table"; + TableId destinationTable = TableId.of(DATASET, tableName); + LoadJobConfiguration configuration = + LoadJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) + .schema(SIMPLE_SCHEMA) + .build(); + Job remoteLoadJob = bigquery.create(JobInfo.of(configuration)); + while (!remoteLoadJob.isDone()) { + Thread.sleep(1000); + } + assertNull(remoteLoadJob.status().error()); + + ExtractJobConfiguration extractConfiguration = + ExtractJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) + .printHeader(false) + .build(); + Job remoteExtractJob = bigquery.create(JobInfo.of(extractConfiguration)); + while (!remoteExtractJob.isDone()) { + Thread.sleep(1000); + } + assertNull(remoteExtractJob.status().error()); + assertEquals(CSV_CONTENT, + new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8)); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCancelJob() throws InterruptedException { + String destinationTableName = "test_cancel_query_job_table"; + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.table(); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + QueryJobConfiguration configuration = QueryJobConfiguration.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + assertTrue(remoteJob.cancel()); + while (!remoteJob.isDone()) { + Thread.sleep(1000); + } + assertNull(remoteJob.status().error()); + } + + @Test + public void testCancelNonExistingJob() { + assertFalse(bigquery.cancel("test_cancel_non_existing_job")); + } + + @Test + public void testInsertFromFile() throws InterruptedException { + String destinationTableName = "test_insert_from_file_table"; + TableId tableId = TableId.of(DATASET, destinationTableName); + WriteChannelConfiguration configuration = WriteChannelConfiguration.builder(tableId) + .formatOptions(FormatOptions.json()) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .build(); + try (WriteChannel channel = bigquery.writer(configuration)) { + channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); + } catch (IOException e) { + fail("IOException was not expected"); + } + // wait until the new table is created. If the table is never created the test will time-out + while (bigquery.getTable(tableId) == null) { + Thread.sleep(1000L); + } + Page> rows = bigquery.listTableData(tableId); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } +} diff --git a/gcloud-java-contrib/README.md b/gcloud-java-contrib/README.md new file mode 100644 index 000000000000..426417d54e87 --- /dev/null +++ b/gcloud-java-contrib/README.md @@ -0,0 +1,64 @@ +Google Cloud Java Contributions +=============================== + +Packages that provide higher-level abstraction/functionality for common gcloud-java use cases. + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) +[![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-contrib + 0.1.5 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-contrib:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-contrib" % "0.1.5" +``` + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[cloud-platform]: https://cloud.google.com/ +[developers-console]:https://console.developers.google.com/ diff --git a/gcloud-java-contrib/pom.xml b/gcloud-java-contrib/pom.xml new file mode 100644 index 000000000000..bd4a6458dc38 --- /dev/null +++ b/gcloud-java-contrib/pom.xml @@ -0,0 +1,36 @@ + + + 4.0.0 + gcloud-java-contrib + jar + GCloud Java contributions + + Contains packages that provide higher-level abstraction/functionality for common gcloud-java use cases. + + + com.google.gcloud + gcloud-java-pom + 0.1.6-SNAPSHOT + + + gcloud-java-contrib + + + + ${project.groupId} + gcloud-java + ${project.version} + + + + + + org.codehaus.mojo + exec-maven-plugin + + false + + + + + diff --git a/gcloud-java-core/README.md b/gcloud-java-core/README.md index 2a3be300f4ac..fc5f481f8ec3 100644 --- a/gcloud-java-core/README.md +++ b/gcloud-java-core/README.md @@ -6,20 +6,35 @@ This module provides common functionality required by service-specific modules o [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-core.svg)](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-core.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) - [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) - [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/package-summary.html) Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-core - 0.0.10 + 0.1.5 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-core:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.1.5" +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). Java Versions ------------- @@ -31,7 +46,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. Versioning ---------- @@ -49,5 +66,6 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ diff --git a/gcloud-java-core/pom.xml b/gcloud-java-core/pom.xml index e849594226db..6d0ed675b423 100644 --- a/gcloud-java-core/pom.xml +++ b/gcloud-java-core/pom.xml @@ -1,7 +1,6 @@ 4.0.0 - com.google.gcloud gcloud-java-core jar GCloud Java core @@ -11,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-core @@ -20,34 +19,40 @@ com.google.auth google-auth-library-credentials - 0.1.0 + 0.3.1 com.google.auth google-auth-library-oauth2-http - 0.1.0 + 0.3.1 + + + com.google.guava + guava-jdk5 + + com.google.http-client google-http-client - 1.20.0 + 1.21.0 compile com.google.oauth-client google-oauth-client - 1.20.0 + 1.21.0 compile com.google.guava guava - 18.0 + 19.0 com.google.api-client google-api-client-appengine - 1.20.0 + 1.21.0 compile @@ -59,7 +64,7 @@ com.google.http-client google-http-client-jackson - 1.20.0 + 1.21.0 compile @@ -77,19 +82,19 @@ joda-time joda-time - 2.8.2 + 2.9.2 compile org.json json - 20090211 + 20151123 compile org.easymock easymock - 3.3 + 3.4 test diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java index 73c66279ea53..6f9e09ca04bc 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java @@ -18,24 +18,17 @@ import static com.google.common.base.Preconditions.checkNotNull; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.compute.ComputeCredential; -import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.javanet.NetHttpTransport; -import com.google.api.client.json.jackson.JacksonFactory; -import com.google.auth.http.HttpCredentialsAdapter; +import com.google.auth.oauth2.AccessToken; import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; -import java.security.GeneralSecurityException; +import java.lang.reflect.Method; import java.security.PrivateKey; +import java.util.Collection; import java.util.Objects; -import java.util.Set; /** * Credentials for accessing Google Cloud services. @@ -45,8 +38,67 @@ public abstract class AuthCredentials implements Restorable { private static class AppEngineAuthCredentials extends AuthCredentials { private static final AuthCredentials INSTANCE = new AppEngineAuthCredentials(); - private static final AppEngineAuthCredentialsState STATE = - new AppEngineAuthCredentialsState(); + private static final AppEngineAuthCredentialsState STATE = new AppEngineAuthCredentialsState(); + + private static class AppEngineCredentials extends GoogleCredentials { + + private final Object appIdentityService; + private final Method getAccessToken; + private final Method getAccessTokenResult; + private final Collection scopes; + + AppEngineCredentials() { + try { + Class factoryClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityServiceFactory"); + Method method = factoryClass.getMethod("getAppIdentityService"); + this.appIdentityService = method.invoke(null); + Class serviceClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityService"); + Class tokenResultClass = Class.forName( + "com.google.appengine.api.appidentity.AppIdentityService$GetAccessTokenResult"); + this.getAccessTokenResult = serviceClass.getMethod("getAccessToken", Iterable.class); + this.getAccessToken = tokenResultClass.getMethod("getAccessToken"); + this.scopes = null; + } catch (Exception e) { + throw new RuntimeException("Could not create AppEngineCredentials.", e); + } + } + + AppEngineCredentials(Collection scopes, AppEngineCredentials unscoped) { + this.appIdentityService = unscoped.appIdentityService; + this.getAccessToken = unscoped.getAccessToken; + this.getAccessTokenResult = unscoped.getAccessTokenResult; + this.scopes = scopes; + } + + /** + * Refresh the access token by getting it from the App Identity service + */ + @Override + public AccessToken refreshAccessToken() throws IOException { + if (createScopedRequired()) { + throw new IOException("AppEngineCredentials requires createScoped call before use."); + } + try { + Object accessTokenResult = getAccessTokenResult.invoke(appIdentityService, scopes); + String accessToken = (String) getAccessToken.invoke(accessTokenResult); + return new AccessToken(accessToken, null); + } catch (Exception e) { + throw new IOException("Could not get the access token.", e); + } + } + + @Override + public boolean createScopedRequired() { + return scopes == null || scopes.isEmpty(); + } + + @Override + public GoogleCredentials createScoped(Collection scopes) { + return new AppEngineCredentials(scopes, this); + } + } private static class AppEngineAuthCredentialsState implements RestorableState, Serializable { @@ -70,9 +122,8 @@ public boolean equals(Object obj) { } @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return new AppIdentityCredential(scopes); + public GoogleCredentials credentials() { + return new AppEngineCredentials(); } @Override @@ -81,13 +132,17 @@ public RestorableState capture() { } } + /** + * Represents service account credentials. + * + * @see + * User accounts and service accounts + */ public static class ServiceAccountAuthCredentials extends AuthCredentials { private final String account; private final PrivateKey privateKey; - private static final AuthCredentials NO_CREDENTIALS = new ServiceAccountAuthCredentials(); - private static class ServiceAccountAuthCredentialsState implements RestorableState, Serializable { @@ -103,9 +158,6 @@ private ServiceAccountAuthCredentialsState(String account, PrivateKey privateKey @Override public AuthCredentials restore() { - if (account == null && privateKey == null) { - return NO_CREDENTIALS; - } return new ServiceAccountAuthCredentials(account, privateKey); } @@ -130,23 +182,9 @@ public boolean equals(Object obj) { this.privateKey = checkNotNull(privateKey); } - ServiceAccountAuthCredentials() { - account = null; - privateKey = null; - } - @Override - protected HttpRequestInitializer httpRequestInitializer( - HttpTransport transport, Set scopes) { - GoogleCredential.Builder builder = new GoogleCredential.Builder() - .setTransport(transport) - .setJsonFactory(new JacksonFactory()); - if (privateKey != null) { - builder.setServiceAccountPrivateKey(privateKey); - builder.setServiceAccountId(account); - builder.setServiceAccountScopes(scopes); - } - return builder.build(); + public ServiceAccountCredentials credentials() { + return new ServiceAccountCredentials(null, account, privateKey, null, null); } public String account() { @@ -163,25 +201,33 @@ public RestorableState capture() { } } - private static class ComputeEngineAuthCredentials extends AuthCredentials { + /** + * Represents Application Default Credentials, which are credentials that are inferred from the + * runtime environment. + * + * @see + * Google Application Default Credentials + */ + public static class ApplicationDefaultAuthCredentials extends AuthCredentials { - private ComputeCredential computeCredential; + private GoogleCredentials googleCredentials; - private static final ComputeEngineAuthCredentialsState STATE = - new ComputeEngineAuthCredentialsState(); + private static final ApplicationDefaultAuthCredentialsState STATE = + new ApplicationDefaultAuthCredentialsState(); - private static class ComputeEngineAuthCredentialsState + private static class ApplicationDefaultAuthCredentialsState implements RestorableState, Serializable { - private static final long serialVersionUID = -6168594072854417404L; + private static final long serialVersionUID = -8839085552021212257L; @Override public AuthCredentials restore() { try { - return new ComputeEngineAuthCredentials(); - } catch (IOException | GeneralSecurityException e) { + return new ApplicationDefaultAuthCredentials(); + } catch (IOException e) { throw new IllegalStateException( - "Could not restore " + ComputeEngineAuthCredentials.class.getSimpleName(), e); + "Could not restore " + ApplicationDefaultAuthCredentials.class.getSimpleName(), e); } } @@ -192,18 +238,17 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof ComputeEngineAuthCredentialsState; + return obj instanceof ApplicationDefaultAuthCredentialsState; } } - ComputeEngineAuthCredentials() throws IOException, GeneralSecurityException { - computeCredential = getComputeCredential(); + ApplicationDefaultAuthCredentials() throws IOException { + googleCredentials = GoogleCredentials.getApplicationDefault(); } @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return computeCredential; + public GoogleCredentials credentials() { + return googleCredentials; } @Override @@ -212,47 +257,30 @@ public RestorableState capture() { } } - private static class ApplicationDefaultAuthCredentials extends AuthCredentials { + /** + * A placeholder for credentials to signify that requests sent to the server should not be + * authenticated. This is typically useful when using the local service emulators, such as + * {@code LocalGcdHelper} and {@code LocalResourceManagerHelper}. + */ + public static class NoAuthCredentials extends AuthCredentials { - private GoogleCredentials googleCredentials; + private static final AuthCredentials INSTANCE = new NoAuthCredentials(); + private static final NoAuthCredentialsState STATE = new NoAuthCredentialsState(); - private static final ApplicationDefaultAuthCredentialsState STATE = - new ApplicationDefaultAuthCredentialsState(); - - private static class ApplicationDefaultAuthCredentialsState + private static class NoAuthCredentialsState implements RestorableState, Serializable { - private static final long serialVersionUID = -8839085552021212257L; + private static final long serialVersionUID = -4022100563954640465L; @Override public AuthCredentials restore() { - try { - return new ApplicationDefaultAuthCredentials(); - } catch (IOException e) { - throw new IllegalStateException( - "Could not restore " + ApplicationDefaultAuthCredentials.class.getSimpleName(), e); - } - } - - @Override - public int hashCode() { - return getClass().getName().hashCode(); - } - - @Override - public boolean equals(Object obj) { - return obj instanceof ApplicationDefaultAuthCredentialsState; + return INSTANCE; } } - ApplicationDefaultAuthCredentials() throws IOException { - googleCredentials = GoogleCredentials.getApplicationDefault(); - } - @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return new HttpCredentialsAdapter(googleCredentials); + public GoogleCredentials credentials() { + return null; } @Override @@ -261,18 +289,12 @@ public RestorableState capture() { } } - protected abstract HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes); + public abstract GoogleCredentials credentials(); public static AuthCredentials createForAppEngine() { return AppEngineAuthCredentials.INSTANCE; } - public static AuthCredentials createForComputeEngine() - throws IOException, GeneralSecurityException { - return new ComputeEngineAuthCredentials(); - } - /** * Returns the Application Default Credentials. * @@ -282,8 +304,8 @@ public static AuthCredentials createForComputeEngine() * variable GOOGLE_APPLICATION_CREDENTIALS. *

* - * @return the credentials instance. - * @throws IOException if the credentials cannot be created in the current environment. + * @return the credentials instance + * @throws IOException if the credentials cannot be created in the current environment */ public static AuthCredentials createApplicationDefaults() throws IOException { return new ApplicationDefaultAuthCredentials(); @@ -299,12 +321,21 @@ public static AuthCredentials createApplicationDefaults() throws IOException { * * @param account id of the Service Account * @param privateKey private key associated to the account - * @return the credentials instance. + * @return the credentials instance */ public static ServiceAccountAuthCredentials createFor(String account, PrivateKey privateKey) { return new ServiceAccountAuthCredentials(account, privateKey); } + /** + * Creates a placeholder denoting that no credentials should be used. This is typically useful + * when using the local service emulators, such as {@code LocalGcdHelper} and + * {@code LocalResourceManagerHelper}. + */ + public static AuthCredentials noAuth() { + return NoAuthCredentials.INSTANCE; + } + /** * Creates Service Account Credentials given a stream for credentials in JSON format. * @@ -314,26 +345,20 @@ public static ServiceAccountAuthCredentials createFor(String account, PrivateKey *

* * @param jsonCredentialStream stream for Service Account Credentials in JSON format - * @return the credentials instance. - * @throws IOException if the credentials cannot be created from the stream. + * @return the credentials instance + * @throws IOException if the credentials cannot be created from the stream */ public static ServiceAccountAuthCredentials createForJson(InputStream jsonCredentialStream) throws IOException { - GoogleCredential tempCredentials = GoogleCredential.fromStream(jsonCredentialStream); - return new ServiceAccountAuthCredentials(tempCredentials.getServiceAccountId(), - tempCredentials.getServiceAccountPrivateKey()); - } - - public static AuthCredentials noCredentials() { - return ServiceAccountAuthCredentials.NO_CREDENTIALS; - } - - static ComputeCredential getComputeCredential() throws IOException, GeneralSecurityException { - NetHttpTransport transport = GoogleNetHttpTransport.newTrustedTransport(); - // Try to connect using Google Compute Engine service account credentials. - ComputeCredential credential = new ComputeCredential(transport, new JacksonFactory()); - // Force token refresh to detect if we are running on Google Compute Engine. - credential.refreshToken(); - return credential; + GoogleCredentials tempCredentials = GoogleCredentials.fromStream(jsonCredentialStream); + if (tempCredentials instanceof ServiceAccountCredentials) { + ServiceAccountCredentials tempServiceAccountCredentials = + (ServiceAccountCredentials) tempCredentials; + return new ServiceAccountAuthCredentials( + tempServiceAccountCredentials.getClientEmail(), + tempServiceAccountCredentials.getPrivateKey()); + } + throw new IOException( + "The given JSON Credentials Stream is not for a service account credential."); } } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java index 7600d25411fd..d9e6f2db7c95 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java @@ -16,9 +16,39 @@ package com.google.gcloud; +import com.google.gcloud.ExceptionHandler.Interceptor; + +/** + * Base class for service objects. + * + * @param the {@code ServiceOptions} subclass corresponding to the service + */ public abstract class BaseService> implements Service { + public static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { + + private static final long serialVersionUID = -8429573486870467828L; + + @Override + public RetryResult afterEval(Exception exception, RetryResult retryResult) { + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + + @Override + public RetryResult beforeEval(Exception exception) { + if (exception instanceof BaseServiceException) { + boolean retriable = ((BaseServiceException) exception).retryable(); + return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; + } + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + }; + public static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() + .abortOn(RuntimeException.class) + .interceptor(EXCEPTION_HANDLER_INTERCEPTOR) + .build(); + private final OptionsT options; protected BaseService(OptionsT options) { diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java new file mode 100644 index 000000000000..365243904436 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -0,0 +1,230 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.net.SocketTimeoutException; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + +/** + * Base class for all service exceptions. + */ +public class BaseServiceException extends RuntimeException { + + protected static final class Error implements Serializable { + + private static final long serialVersionUID = -4019600198652965721L; + + private final Integer code; + private final String reason; + + public Error(Integer code, String reason) { + this.code = code; + this.reason = reason; + } + + /** + * Returns the code associated with this exception. + */ + public Integer code() { + return code; + } + + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + + boolean isRetryable(Set retryableErrors) { + for (Error retryableError : retryableErrors) { + if ((retryableError.code() == null || retryableError.code().equals(this.code())) + && (retryableError.reason() == null || retryableError.reason().equals(this.reason()))) { + return true; + } + } + return false; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("code", code).add("reason", reason).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(code, reason); + } + } + + private static final long serialVersionUID = 759921776378760835L; + public static final int UNKNOWN_CODE = 0; + + private final int code; + private final boolean retryable; + private final String reason; + private final boolean idempotent; + private final String location; + private final String debugInfo; + + public BaseServiceException(IOException exception, boolean idempotent) { + super(message(exception), exception); + int code = UNKNOWN_CODE; + String reason = null; + String location = null; + String debugInfo = null; + if (exception instanceof GoogleJsonResponseException) { + GoogleJsonError jsonError = ((GoogleJsonResponseException) exception).getDetails(); + if (jsonError != null) { + Error error = error(jsonError); + code = error.code; + reason = error.reason; + if (reason != null) { + GoogleJsonError.ErrorInfo errorInfo = jsonError.getErrors().get(0); + location = errorInfo.getLocation(); + debugInfo = (String) errorInfo.get("debugInfo"); + } + } else { + code = ((GoogleJsonResponseException) exception).getStatusCode(); + } + } + this.code = code; + this.retryable = idempotent && isRetryable(exception); + this.reason = reason; + this.idempotent = idempotent; + this.location = location; + this.debugInfo = debugInfo; + } + + public BaseServiceException(GoogleJsonError error, boolean idempotent) { + super(error.getMessage()); + this.code = error.getCode(); + this.reason = reason(error); + this.idempotent = idempotent; + this.retryable = idempotent && isRetryable(error); + this.location = null; + this.debugInfo = null; + } + + public BaseServiceException(int code, String message, String reason, boolean idempotent) { + this(code, message, reason, idempotent, null); + } + + public BaseServiceException(int code, String message, String reason, boolean idempotent, + Throwable cause) { + super(message, cause); + this.code = code; + this.reason = reason; + this.idempotent = idempotent; + this.retryable = idempotent && new Error(code, reason).isRetryable(retryableErrors()); + this.location = null; + this.debugInfo = null; + } + + protected Set retryableErrors() { + return Collections.emptySet(); + } + + protected boolean isRetryable(GoogleJsonError error) { + return error != null && error(error).isRetryable(retryableErrors()); + } + + protected boolean isRetryable(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + return isRetryable(((GoogleJsonResponseException) exception).getDetails()); + } + return exception instanceof SocketTimeoutException; + } + + /** + * Returns the code associated with this exception. + */ + public int code() { + return code; + } + + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + + /** + * Returns {@code true} when it is safe to retry the operation that caused this exception. + */ + public boolean retryable() { + return retryable; + } + + /** + * Returns {@code true} when the operation that caused this exception had no side effects. + */ + public boolean idempotent() { + return idempotent; + } + + /** + * Returns the service location where the error causing the exception occurred. Returns + * {@code null} if not set. + */ + public String location() { + return location; + } + + protected String debugInfo() { + return debugInfo; + } + + protected static String reason(GoogleJsonError error) { + if (error.getErrors() != null && !error.getErrors().isEmpty()) { + return error.getErrors().get(0).getReason(); + } + return null; + } + + protected static Error error(GoogleJsonError error) { + return new Error(error.getCode(), reason(error)); + } + + protected static String message(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + GoogleJsonError details = ((GoogleJsonResponseException) exception).getDetails(); + if (details != null) { + return details.getMessage(); + } + } + return exception.getMessage(); + } + + protected static void translateAndPropagateIfPossible(RetryHelper.RetryHelperException ex) { + if (ex.getCause() instanceof BaseServiceException) { + throw (BaseServiceException) ex.getCause(); + } + if (ex instanceof RetryHelper.RetryInterruptedException) { + RetryHelper.RetryInterruptedException.propagate(); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java new file mode 100644 index 000000000000..1d18a5a27e81 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java @@ -0,0 +1,294 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.nio.channels.ClosedChannelException; +import java.util.Arrays; +import java.util.Objects; + +/** + * Base implementation for a {@link WriteChannel}. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional configuration + */ +public abstract class BaseWriteChannel< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> implements WriteChannel { + + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + + private final ServiceOptionsT options; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer = new byte[0]; + private int limit; + private boolean isOpen = true; + private int chunkSize = defaultChunkSize(); + + protected int minChunkSize() { + return MIN_CHUNK_SIZE; + } + + protected int defaultChunkSize() { + return DEFAULT_CHUNK_SIZE; + } + + /** + * Writes {@code length} bytes of {@link #buffer()} to the {@link #uploadId()} URL. + * + * @param length the number of bytes to write from {@link #buffer()} + * @param last if {@code true} the resumable session is closed + */ + protected abstract void flushBuffer(int length, boolean last); + + protected ServiceOptionsT options() { + return options; + } + + protected EntityT entity() { + return entity; + } + + protected String uploadId() { + return uploadId; + } + + protected int position() { + return position; + } + + protected byte[] buffer() { + return buffer; + } + + protected int limit() { + return limit; + } + + protected int chunkSize() { + return chunkSize; + } + + @Override + public final void chunkSize(int chunkSize) { + chunkSize = (chunkSize / minChunkSize()) * minChunkSize(); + this.chunkSize = Math.max(minChunkSize(), chunkSize); + } + + protected BaseWriteChannel(ServiceOptionsT options, EntityT entity, String uploadId) { + this.options = options; + this.entity = entity; + this.uploadId = uploadId; + } + + private void flush() { + if (limit >= chunkSize) { + final int length = limit - limit % minChunkSize(); + flushBuffer(length, false); + position += length; + limit -= length; + byte[] temp = new byte[chunkSize]; + System.arraycopy(buffer, length, temp, 0, limit); + buffer = temp; + } + } + + private void validateOpen() throws ClosedChannelException { + if (!isOpen) { + throw new ClosedChannelException(); + } + } + + @Override + public final int write(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + int toWrite = byteBuffer.remaining(); + int spaceInBuffer = buffer.length - limit; + if (spaceInBuffer >= toWrite) { + byteBuffer.get(buffer, limit, toWrite); + } else { + buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); + byteBuffer.get(buffer, limit, toWrite); + } + limit += toWrite; + flush(); + return toWrite; + } + + @Override + public boolean isOpen() { + return isOpen; + } + + @Override + public final void close() throws IOException { + if (isOpen) { + flushBuffer(limit, true); + position += buffer.length; + isOpen = false; + buffer = null; + } + } + + /** + * Creates a {@link BaseState.Builder} for the current write channel. + */ + protected abstract BaseState.Builder stateBuilder(); + + @Override + public RestorableState capture() { + byte[] bufferToSave = null; + if (isOpen) { + flush(); + bufferToSave = Arrays.copyOf(buffer, limit); + } + return stateBuilder() + .position(position) + .buffer(bufferToSave) + .isOpen(isOpen) + .chunkSize(chunkSize) + .build(); + } + + /** + * Restores the state of the current write channel given a {@link BaseState} object. + */ + protected void restore(BaseState state) { + if (state.buffer != null) { + this.buffer = state.buffer.clone(); + this.limit = state.buffer.length; + } + this.position = state.position; + this.isOpen = state.isOpen; + this.chunkSize = state.chunkSize; + } + + protected abstract static class BaseState< + ServiceOptionsT extends ServiceOptions, EntityT extends Serializable> + implements RestorableState, Serializable { + + private static final long serialVersionUID = 8541062465055125619L; + + protected final ServiceOptionsT serviceOptions; + protected final EntityT entity; + protected final String uploadId; + protected final int position; + protected final byte[] buffer; + protected final boolean isOpen; + protected final int chunkSize; + + protected BaseState(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.entity = builder.entity; + this.uploadId = builder.uploadId; + this.position = builder.position; + this.buffer = builder.buffer; + this.isOpen = builder.isOpen; + this.chunkSize = builder.chunkSize; + } + + /** + * Base builder for a write channel's state. Users are not supposed to access this class + * directly. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional + * configuration + */ + public abstract static class Builder< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> { + private final ServiceOptionsT serviceOptions; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer; + private boolean isOpen; + private int chunkSize; + + protected Builder(ServiceOptionsT options, EntityT entity, String uploadId) { + this.serviceOptions = options; + this.entity = entity; + this.uploadId = uploadId; + } + + public Builder position(int position) { + this.position = position; + return this; + } + + public Builder buffer(byte[] buffer) { + this.buffer = buffer; + return this; + } + + public Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + public Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + public abstract RestorableState build(); + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, entity, uploadId, position, isOpen, chunkSize, + Arrays.hashCode(buffer)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof BaseState)) { + return false; + } + final BaseState other = (BaseState) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.entity, other.entity) + && Objects.equals(this.uploadId, other.uploadId) + && Objects.deepEquals(this.buffer, other.buffer) + && this.position == other.position + && this.isOpen == other.isOpen + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("entity", entity) + .add("uploadId", uploadId) + .add("position", position) + .add("isOpen", isOpen) + .toString(); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java index a0fab3dca566..39d4c4e75a1a 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java @@ -64,7 +64,7 @@ enum RetryResult { * This method is called after the evaluation and could alter its result. * * @param exception the exception that is being evaluated - * @param retryResult the result of the evaluation so far. + * @param retryResult the result of the evaluation so far * @return {@link RetryResult} to indicate if the exception should be ignored ( * {@link RetryResult#RETRY}), propagated ({@link RetryResult#NO_RETRY}), or evaluation * should proceed ({@link RetryResult#CONTINUE_EVALUATION}). @@ -231,11 +231,11 @@ void verifyCaller(Callable callable) { } } - public Set> getRetriableExceptions() { + public Set> retriableExceptions() { return retriableExceptions; } - public Set> getNonRetriableExceptions() { + public Set> nonRetriableExceptions() { return nonRetriableExceptions; } @@ -250,7 +250,7 @@ boolean shouldRetry(Exception ex) { Interceptor.RetryResult retryResult = retryInfo == null ? Interceptor.RetryResult.NO_RETRY : retryInfo.retry; for (Interceptor interceptor : interceptors) { - Interceptor.RetryResult interceptorRetry = + Interceptor.RetryResult interceptorRetry = checkNotNull(interceptor.afterEval(ex, retryResult)); if (interceptorRetry != Interceptor.RetryResult.CONTINUE_EVALUATION) { retryResult = interceptorRetry; @@ -262,7 +262,7 @@ boolean shouldRetry(Exception ex) { /** * Returns an instance which retry any checked exception and abort on any runtime exception. */ - public static ExceptionHandler getDefaultInstance() { + public static ExceptionHandler defaultInstance() { return DEFAULT_INSTANCE; } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java b/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java new file mode 100644 index 000000000000..748eaba2ab4c --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/IamPolicy.java @@ -0,0 +1,256 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static com.google.common.base.Preconditions.checkArgument; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import java.io.Serializable; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Base class for Identity and Access Management (IAM) policies. IAM policies are used to specify + * access settings for Cloud Platform resources. A policy is a map of bindings. A binding assigns + * a set of identities to a role, where the identities can be user accounts, Google groups, Google + * domains, and service accounts. A role is a named list of permissions defined by IAM. + * + * @param the data type of roles (should be serializable) + * @see Policy + */ +public abstract class IamPolicy implements Serializable { + + private static final long serialVersionUID = 1114489978726897720L; + + private final Map> bindings; + private final String etag; + private final Integer version; + + /** + * Builder for an IAM Policy. + * + * @param the data type of roles + * @param the subclass extending this abstract builder + */ + public abstract static class Builder> { + + private final Map> bindings = new HashMap<>(); + private String etag; + private Integer version; + + /** + * Constructor for IAM Policy builder. + */ + protected Builder() {} + + /** + * Replaces the builder's map of bindings with the given map of bindings. + * + * @throws IllegalArgumentException if the provided map is null or contain any null values + */ + public final B bindings(Map> bindings) { + checkArgument(bindings != null, "The provided map of bindings cannot be null."); + for (Map.Entry> binding : bindings.entrySet()) { + verifyBinding(binding.getKey(), binding.getValue()); + } + this.bindings.clear(); + for (Map.Entry> binding : bindings.entrySet()) { + this.bindings.put(binding.getKey(), new HashSet(binding.getValue())); + } + return self(); + } + + /** + * Adds a binding to the policy. + * + * @throws IllegalArgumentException if the policy already contains a binding with the same role + * or if the role or any identities are null + */ + public final B addBinding(R role, Set identities) { + verifyBinding(role, identities); + checkArgument(!bindings.containsKey(role), + "The policy already contains a binding with the role " + role.toString() + "."); + bindings.put(role, new HashSet(identities)); + return self(); + } + + /** + * Adds a binding to the policy. + * + * @throws IllegalArgumentException if the policy already contains a binding with the same role + * or if the role or any identities are null + */ + public final B addBinding(R role, Identity first, Identity... others) { + HashSet identities = new HashSet<>(); + identities.add(first); + identities.addAll(Arrays.asList(others)); + return addBinding(role, identities); + } + + private void verifyBinding(R role, Collection identities) { + checkArgument(role != null, "The role cannot be null."); + verifyIdentities(identities); + } + + private void verifyIdentities(Collection identities) { + checkArgument(identities != null, "A role cannot be assigned to a null set of identities."); + checkArgument(!identities.contains(null), "Null identities are not permitted."); + } + + /** + * Removes the binding associated with the specified role. + */ + public final B removeBinding(R role) { + bindings.remove(role); + return self(); + } + + /** + * Adds one or more identities to an existing binding. + * + * @throws IllegalArgumentException if the policy doesn't contain a binding with the specified + * role or any identities are null + */ + public final B addIdentity(R role, Identity first, Identity... others) { + checkArgument(bindings.containsKey(role), + "The policy doesn't contain the role " + role.toString() + "."); + List toAdd = new LinkedList<>(); + toAdd.add(first); + toAdd.addAll(Arrays.asList(others)); + verifyIdentities(toAdd); + bindings.get(role).addAll(toAdd); + return self(); + } + + /** + * Removes one or more identities from an existing binding. + * + * @throws IllegalArgumentException if the policy doesn't contain a binding with the specified + * role + */ + public final B removeIdentity(R role, Identity first, Identity... others) { + checkArgument(bindings.containsKey(role), + "The policy doesn't contain the role " + role.toString() + "."); + bindings.get(role).remove(first); + bindings.get(role).removeAll(Arrays.asList(others)); + return self(); + } + + /** + * Sets the policy's etag. + * + *

Etags are used for optimistic concurrency control as a way to help prevent simultaneous + * updates of a policy from overwriting each other. It is strongly suggested that systems make + * use of the etag in the read-modify-write cycle to perform policy updates in order to avoid + * race conditions. An etag is returned in the response to getIamPolicy, and systems are + * expected to put that etag in the request to setIamPolicy to ensure that their change will be + * applied to the same version of the policy. If no etag is provided in the call to + * setIamPolicy, then the existing policy is overwritten blindly. + */ + protected final B etag(String etag) { + this.etag = etag; + return self(); + } + + /** + * Sets the version of the policy. The default version is 0, meaning only the "owner", "editor", + * and "viewer" roles are permitted. If the version is 1, you may also use other roles. + */ + protected final B version(Integer version) { + this.version = version; + return self(); + } + + @SuppressWarnings("unchecked") + private B self() { + return (B) this; + } + + public abstract IamPolicy build(); + } + + protected IamPolicy(Builder> builder) { + ImmutableMap.Builder> bindingsBuilder = ImmutableMap.builder(); + for (Map.Entry> binding : builder.bindings.entrySet()) { + bindingsBuilder.put(binding.getKey(), ImmutableSet.copyOf(binding.getValue())); + } + this.bindings = bindingsBuilder.build(); + this.etag = builder.etag; + this.version = builder.version; + } + + /** + * Returns a builder containing the properties of this IAM Policy. + */ + public abstract Builder> toBuilder(); + + /** + * The map of bindings that comprises the policy. + */ + public Map> bindings() { + return bindings; + } + + /** + * The policy's etag. + * + *

Etags are used for optimistic concurrency control as a way to help prevent simultaneous + * updates of a policy from overwriting each other. It is strongly suggested that systems make + * use of the etag in the read-modify-write cycle to perform policy updates in order to avoid + * race conditions. An etag is returned in the response to getIamPolicy, and systems are + * expected to put that etag in the request to setIamPolicy to ensure that their change will be + * applied to the same version of the policy. If no etag is provided in the call to + * setIamPolicy, then the existing policy is overwritten blindly. + */ + public String etag() { + return etag; + } + + /** + * Sets the version of the policy. The default version is 0, meaning only the "owner", "editor", + * and "viewer" roles are permitted. If the version is 1, you may also use other roles. + */ + public Integer version() { + return version; + } + + @Override + public final int hashCode() { + return Objects.hash(getClass(), bindings, etag, version); + } + + @Override + public final boolean equals(Object obj) { + if (obj == null || !getClass().equals(obj.getClass())) { + return false; + } + @SuppressWarnings("rawtypes") + IamPolicy other = (IamPolicy) obj; + return Objects.equals(bindings, other.bindings()) + && Objects.equals(etag, other.etag()) + && Objects.equals(version, other.version()); + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java b/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java new file mode 100644 index 000000000000..687a76ffc42c --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Identity.java @@ -0,0 +1,225 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.CaseFormat; + +import java.io.Serializable; +import java.util.Objects; + +/** + * An identity in an {@link IamPolicy}. The following types of identities are permitted in IAM + * policies: + *

    + *
  • Google account + *
  • Service account + *
  • Google group + *
  • Google Apps domain + *
+ * + *

There are also two special identities that represent all users and all Google-authenticated + * accounts. + * + * @see Concepts + * related to identity + */ +public final class Identity implements Serializable { + + private static final long serialVersionUID = -8181841964597657446L; + + private final Type type; + private final String value; + + /** + * The types of IAM identities. + */ + public enum Type { + + /** + * Represents anyone who is on the internet; with or without a Google account. + */ + ALL_USERS, + + /** + * Represents anyone who is authenticated with a Google account or a service account. + */ + ALL_AUTHENTICATED_USERS, + + /** + * Represents a specific Google account. + */ + USER, + + /** + * Represents a service account. + */ + SERVICE_ACCOUNT, + + /** + * Represents a Google group. + */ + GROUP, + + /** + * Represents all the users of a Google Apps domain name. + */ + DOMAIN + } + + private Identity(Type type, String value) { + this.type = type; + this.value = value; + } + + public Type type() { + return type; + } + + /** + * Returns the string identifier for this identity. The value corresponds to: + *

    + *
  • email address (for identities of type {@code USER}, {@code SERVICE_ACCOUNT}, and + * {@code GROUP}) + *
  • domain (for identities of type {@code DOMAIN}) + *
  • {@code null} (for identities of type {@code ALL_USERS} and + * {@code ALL_AUTHENTICATED_USERS}) + *
+ */ + public String value() { + return value; + } + + /** + * Returns a new identity representing anyone who is on the internet; with or without a Google + * account. + */ + public static Identity allUsers() { + return new Identity(Type.ALL_USERS, null); + } + + /** + * Returns a new identity representing anyone who is authenticated with a Google account or a + * service account. + */ + public static Identity allAuthenticatedUsers() { + return new Identity(Type.ALL_AUTHENTICATED_USERS, null); + } + + /** + * Returns a new user identity. + * + * @param email An email address that represents a specific Google account. For example, + * alice@gmail.com or joe@example.com. + */ + public static Identity user(String email) { + return new Identity(Type.USER, checkNotNull(email)); + } + + /** + * Returns a new service account identity. + * + * @param email An email address that represents a service account. For example, + * my-other-app@appspot.gserviceaccount.com. + */ + public static Identity serviceAccount(String email) { + return new Identity(Type.SERVICE_ACCOUNT, checkNotNull(email)); + } + + /** + * Returns a new group identity. + * + * @param email An email address that represents a Google group. For example, + * admins@example.com. + */ + public static Identity group(String email) { + return new Identity(Type.GROUP, checkNotNull(email)); + } + + /** + * Returns a new domain identity. + * + * @param domain A Google Apps domain name that represents all the users of that domain. For + * example, google.com or example.com. + */ + public static Identity domain(String domain) { + return new Identity(Type.DOMAIN, checkNotNull(domain)); + } + + @Override + public int hashCode() { + return Objects.hash(value, type); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Identity)) { + return false; + } + Identity other = (Identity) obj; + return Objects.equals(value, other.value()) && Objects.equals(type, other.type()); + } + + /** + * Returns the string value associated with the identity. Used primarily for converting from + * {@code Identity} objects to strings for protobuf-generated policies. + */ + public String strValue() { + switch (type) { + case ALL_USERS: + return "allUsers"; + case ALL_AUTHENTICATED_USERS: + return "allAuthenticatedUsers"; + case USER: + return "user:" + value; + case SERVICE_ACCOUNT: + return "serviceAccount:" + value; + case GROUP: + return "group:" + value; + case DOMAIN: + return "domain:" + value; + default: + throw new IllegalStateException("Unexpected identity type: " + type); + } + } + + /** + * Converts a string to an {@code Identity}. Used primarily for converting protobuf-generated + * policy identities to {@code Identity} objects. + */ + public static Identity valueOf(String identityStr) { + String[] info = identityStr.split(":"); + Type type = Type.valueOf(CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, info[0])); + switch (type) { + case ALL_USERS: + return Identity.allUsers(); + case ALL_AUTHENTICATED_USERS: + return Identity.allAuthenticatedUsers(); + case USER: + return Identity.user(info[1]); + case SERVICE_ACCOUNT: + return Identity.serviceAccount(info[1]); + case GROUP: + return Identity.group(info[1]); + case DOMAIN: + return Identity.domain(info[1]); + default: + throw new IllegalStateException("Unexpected identity type " + type); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Page.java b/gcloud-java-core/src/main/java/com/google/gcloud/Page.java new file mode 100644 index 000000000000..53f3a3842a18 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Page.java @@ -0,0 +1,69 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.util.Iterator; + +/** + * Interface for Google Cloud paginated results. + * + *

+ * Use {@code Page} to iterate through all values (also in next pages): + *

 {@code
+ * Page page = ...; // get a Page instance
+ * Iterator iterator = page.iterateAll();
+ * while (iterator.hasNext()) {
+ *   T value = iterator.next();
+ *   // do something with value
+ * }}
+ *

+ * Or handle pagination explicitly: + *

 {@code
+ * Page page = ...; // get a Page instance
+ * while (page != null) {
+ *   for (T value : page.values()) {
+ *     // do something with value
+ *   }
+ *   page = page.nextPage();
+ * }}
+ * + * @param the value type that the page holds + */ +public interface Page { + + /** + * Returns the values contained in this page. + */ + Iterable values(); + + /** + * Returns an iterator for all values, possibly also in the next pages. Once current page's values + * are traversed the iterator fetches next page, if any. + */ + Iterator iterateAll(); + + /** + * Returns the cursor for the nextPage or {@code null} if no more results. + */ + String nextPageCursor(); + + /** + * Returns the next page of results or {@code null} if no more result. + */ + Page nextPage(); + +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java b/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java new file mode 100644 index 000000000000..2dc031ab9bd4 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java @@ -0,0 +1,144 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.collect.AbstractIterator; +import com.google.common.collect.ImmutableMap; + +import java.io.Serializable; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; + +/** + * Base implementation for Google Cloud paginated results. + * + * @param the value type that the page holds + */ +public class PageImpl implements Page, Serializable { + + private static final long serialVersionUID = 3914827379823557934L; + + private final String cursor; + private final Iterable results; + private final NextPageFetcher pageFetcher; + + /** + * Interface for fetching the next page of results from the service. + * + * @param the value type that the page holds + */ + public interface NextPageFetcher extends Serializable { + Page nextPage(); + } + + static class PageIterator extends AbstractIterator { + + private Iterator currentPageIterator; + private Page currentPage; + + PageIterator(Page currentPage) { + this.currentPageIterator = currentPage.values().iterator(); + this.currentPage = currentPage; + } + + @Override + protected T computeNext() { + while (!currentPageIterator.hasNext()) { + currentPage = currentPage.nextPage(); + if (currentPage == null) { + return endOfData(); + } + currentPageIterator = currentPage.values().iterator(); + } + return currentPageIterator.next(); + } + } + + /** + * Creates a {@code PageImpl} object. In order for the object to be serializable the {@code + * results} parameter must be serializable. + */ + public PageImpl(NextPageFetcher pageFetcher, String cursor, Iterable results) { + this.pageFetcher = pageFetcher; + this.cursor = cursor; + this.results = results; + } + + @Override + public Iterable values() { + return results == null ? Collections.emptyList() : results; + } + + @Override + public Iterator iterateAll() { + return new PageIterator<>(this); + } + + @Override + public String nextPageCursor() { + return cursor; + } + + @Override + public Page nextPage() { + if (cursor == null || pageFetcher == null) { + return null; + } + return pageFetcher.nextPage(); + } + + @Override + public int hashCode() { + return Objects.hash(cursor, results); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof PageImpl)) { + return false; + } + PageImpl other = (PageImpl) obj; + return Objects.equals(cursor, other.cursor) + && Objects.equals(results, other.results); + } + + /** + * Utility method to construct the options map for the next page request. + * + * @param the value type that the page holds. Instances of {@code T} should be + * {@code Serializable} + * @param pageTokenOption the key for the next page cursor option in the options map + * @param cursor the cursor for the next page + * @param optionMap the previous options map + * @return the options map for the next page request + */ + public static Map nextRequestOptions( + T pageTokenOption, String cursor, Map optionMap) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + if (cursor != null) { + builder.put(pageTokenOption, cursor); + } + for (Map.Entry option : optionMap.entrySet()) { + if (!Objects.equals(option.getKey(), pageTokenOption)) { + builder.put(option.getKey(), option.getValue()); + } + } + return builder.build(); + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java new file mode 100644 index 000000000000..7537c5a8ce0b --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java @@ -0,0 +1,57 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.channels.ReadableByteChannel; + +/** + * A channel for reading data from a Google Cloud object. + * + *

Implementations of this class may buffer data internally to reduce remote calls. This + * interface implements {@link Restorable} to allow saving the reader's state to continue reading + * afterwards. + *

+ */ +public interface ReadChannel extends ReadableByteChannel, Closeable, Restorable { + + /** + * Overridden to remove IOException. + * + * @see java.nio.channels.Channel#close() + */ + @Override + void close(); + + void seek(int position) throws IOException; + + /** + * Sets the minimum size that will be read by a single RPC. + * Read data will be locally buffered until consumed. + */ + void chunkSize(int chunkSize); + + /** + * Captures the read channel state so that it can be saved and restored afterwards. + * + * @return a {@link RestorableState} object that contains the read channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java b/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java index 51391e33bd7d..0b573522e370 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java @@ -21,18 +21,20 @@ * *

* A typical capture usage: - *

  {@code
+ * 
 {@code
  * X restorableObj; // X instanceof Restorable
  * RestorableState state = restorableObj.capture();
  * .. persist state
  * }
* * A typical restore usage: - *
  {@code
+ * 
 {@code
  * RestorableState state = ... // read from persistence
  * X restorableObj = state.restore();
  * ...
  * }
+ * + * @param the restorable object's type */ public interface Restorable> { diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java b/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java index 0c60411cb285..d6ce736ae856 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java @@ -22,6 +22,8 @@ * * Implementations of this class must implement {@link java.io.Serializable} to ensure that the * state of a the object can be correctly serialized. + * + * @param the restored object's type */ public interface RestorableState> { diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java b/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java index 7b47209cd3ff..9b9c1f6a3124 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java @@ -194,9 +194,9 @@ private V doRetry() throws RetryHelperException { } exception = e; } - if (attemptNumber >= params.getRetryMaxAttempts() - || attemptNumber >= params.getRetryMinAttempts() - && stopwatch.elapsed(MILLISECONDS) >= params.getTotalRetryPeriodMillis()) { + if (attemptNumber >= params.retryMaxAttempts() + || attemptNumber >= params.retryMinAttempts() + && stopwatch.elapsed(MILLISECONDS) >= params.totalRetryPeriodMillis()) { throw new RetriesExhaustedException(this + ": Too many failures, giving up", exception); } long sleepDurationMillis = getSleepDuration(params, attemptNumber); @@ -215,9 +215,9 @@ private V doRetry() throws RetryHelperException { @VisibleForTesting static long getSleepDuration(RetryParams retryParams, int attemptsSoFar) { - long initialDelay = retryParams.getInitialRetryDelayMillis(); - double backoffFactor = retryParams.getRetryDelayBackoffFactor(); - long maxDelay = retryParams.getMaxRetryDelayMillis(); + long initialDelay = retryParams.initialRetryDelayMillis(); + double backoffFactor = retryParams.retryDelayBackoffFactor(); + long maxDelay = retryParams.maxRetryDelayMillis(); long retryDelay = getExponentialValue(initialDelay, backoffFactor, maxDelay, attemptsSoFar); return (long) ((random() / 2.0 + .75) * retryDelay); } @@ -228,8 +228,8 @@ private static long getExponentialValue(long initialDelay, double backoffFactor, } public static V runWithRetries(Callable callable) throws RetryHelperException { - return runWithRetries(callable, RetryParams.getDefaultInstance(), - ExceptionHandler.getDefaultInstance()); + return runWithRetries(callable, RetryParams.defaultInstance(), + ExceptionHandler.defaultInstance()); } public static V runWithRetries(Callable callable, RetryParams params, diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java b/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java index 461dbac77ff2..ab3644c6d747 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java @@ -38,8 +38,8 @@ * {@code RetryParams}, first create a {@link RetryParams.Builder}. The builder is mutable and each * of the parameters can be set (any unset parameters will fallback to the defaults). The * {@code Builder} can be then used to create an immutable {@code RetryParams} object. For default - * {@code RetryParams} use {@link #getDefaultInstance}. Default settings are subject to change - * release to release. If you require specific settings, explicitly create an instance of + * {@code RetryParams} use {@link #defaultInstance}. Default settings are subject to change release + * to release. If you require specific settings, explicitly create an instance of * {@code RetryParams} with all the required settings. * * @see RetryHelper @@ -91,12 +91,12 @@ private Builder() { retryDelayBackoffFactor = DEFAULT_RETRY_DELAY_BACKOFF_FACTOR; totalRetryPeriodMillis = DEFAULT_TOTAL_RETRY_PERIOD_MILLIS; } else { - retryMinAttempts = retryParams.getRetryMinAttempts(); - retryMaxAttempts = retryParams.getRetryMaxAttempts(); - initialRetryDelayMillis = retryParams.getInitialRetryDelayMillis(); - maxRetryDelayMillis = retryParams.getMaxRetryDelayMillis(); - retryDelayBackoffFactor = retryParams.getRetryDelayBackoffFactor(); - totalRetryPeriodMillis = retryParams.getTotalRetryPeriodMillis(); + retryMinAttempts = retryParams.retryMinAttempts(); + retryMaxAttempts = retryParams.retryMaxAttempts(); + initialRetryDelayMillis = retryParams.initialRetryDelayMillis(); + maxRetryDelayMillis = retryParams.maxRetryDelayMillis(); + retryDelayBackoffFactor = retryParams.retryDelayBackoffFactor(); + totalRetryPeriodMillis = retryParams.totalRetryPeriodMillis(); } } @@ -201,7 +201,7 @@ private RetryParams(Builder builder) { /** * Returns an instance with the default parameters. */ - public static RetryParams getDefaultInstance() { + public static RetryParams defaultInstance() { return DEFAULT_INSTANCE; } @@ -216,14 +216,14 @@ public static RetryParams noRetries() { /** * Returns the retryMinAttempts. Default value is {@value #DEFAULT_RETRY_MIN_ATTEMPTS}. */ - public int getRetryMinAttempts() { + public int retryMinAttempts() { return retryMinAttempts; } /** * Returns the retryMaxAttempts. Default value is {@value #DEFAULT_RETRY_MAX_ATTEMPTS}. */ - public int getRetryMaxAttempts() { + public int retryMaxAttempts() { return retryMaxAttempts; } @@ -231,14 +231,14 @@ public int getRetryMaxAttempts() { * Returns the initialRetryDelayMillis. Default value is * {@value #DEFAULT_INITIAL_RETRY_DELAY_MILLIS}. */ - public long getInitialRetryDelayMillis() { + public long initialRetryDelayMillis() { return initialRetryDelayMillis; } /** * Returns the maxRetryDelayMillis. Default values is {@value #DEFAULT_MAX_RETRY_DELAY_MILLIS}. */ - public long getMaxRetryDelayMillis() { + public long maxRetryDelayMillis() { return maxRetryDelayMillis; } @@ -246,7 +246,7 @@ public long getMaxRetryDelayMillis() { * Returns the maxRetryDelayBackoffFactor. Default values is * {@value #DEFAULT_RETRY_DELAY_BACKOFF_FACTOR}. */ - public double getRetryDelayBackoffFactor() { + public double retryDelayBackoffFactor() { return retryDelayBackoffFactor; } @@ -254,7 +254,7 @@ public double getRetryDelayBackoffFactor() { * Returns the totalRetryPeriodMillis. Default value is * {@value #DEFAULT_TOTAL_RETRY_PERIOD_MILLIS}. */ - public long getTotalRetryPeriodMillis() { + public long totalRetryPeriodMillis() { return totalRetryPeriodMillis; } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Service.java b/gcloud-java-core/src/main/java/com/google/gcloud/Service.java index 2748c55058b4..60bc26670f2e 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/Service.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Service.java @@ -16,6 +16,11 @@ package com.google.gcloud; +/** + * Interface for service objects. + * + * @param the {@code ServiceOptions} subclass corresponding to the service + */ public interface Service> { OptionsT options(); } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java index b59fc1e9a10e..1727e9c3976f 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java @@ -19,9 +19,13 @@ /** * A base interface for all service factories. * - * Implementation must provide a public no-arg constructor. + *

Implementation must provide a public no-arg constructor. * Loading of a factory implementation is done via {@link java.util.ServiceLoader}. + * + * @param the service subclass + * @param the {@code ServiceOptions} subclass corresponding to the service */ +@SuppressWarnings("rawtypes") public interface ServiceFactory { ServiceT create(ServiceOptionsT serviceOptions); diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java index 1be1f16115ad..d45069434a26 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java @@ -17,7 +17,7 @@ package com.google.gcloud; import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.client.extensions.appengine.http.UrlFetchTransport; @@ -25,11 +25,13 @@ import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.http.HttpCredentialsAdapter; import com.google.common.collect.Iterables; import com.google.gcloud.spi.ServiceRpcFactory; import java.io.BufferedReader; import java.io.File; +import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; @@ -51,11 +53,15 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -public abstract class ServiceOptions< - ServiceT extends Service, - ServiceRpcT, - OptionsT extends ServiceOptions> - implements Serializable { +/** + * Abstract class representing service options. + * + * @param the service subclass + * @param the spi-layer class corresponding to the service + * @param the {@code ServiceOptions} subclass corresponding to the service + */ +public abstract class ServiceOptions, ServiceRpcT, + OptionsT extends ServiceOptions> implements Serializable { private static final String DEFAULT_HOST = "https://www.googleapis.com"; private static final long serialVersionUID = 1203687993961393350L; @@ -108,12 +114,6 @@ public HttpTransport create() { // Maybe not on App Engine } } - // Consider Compute - try { - return AuthCredentials.getComputeCredential().getTransport(); - } catch (Exception e) { - // Maybe not on GCE - } return new NetHttpTransport(); } } @@ -157,9 +157,15 @@ private Object readResolve() throws ObjectStreamException { } } - protected abstract static class Builder< - ServiceT extends Service, - ServiceRpcT, + /** + * Builder for {@code ServiceOptions}. + * + * @param the service subclass + * @param the spi-layer class corresponding to the service + * @param the {@code ServiceOptions} subclass corresponding to the service + * @param the {@code ServiceOptions} builder + */ + protected abstract static class Builder, ServiceRpcT, OptionsT extends ServiceOptions, B extends Builder> { @@ -209,7 +215,7 @@ public B serviceFactory(ServiceFactory serviceFactory) { * replaced by Java8's {@code java.time.Clock}. * * @param clock the clock to set - * @return the builder. + * @return the builder */ public B clock(Clock clock) { this.clock = clock; @@ -219,7 +225,7 @@ public B clock(Clock clock) { /** * Sets project id. * - * @return the builder. + * @return the builder */ public B projectId(String projectId) { this.projectId = projectId; @@ -229,7 +235,7 @@ public B projectId(String projectId) { /** * Sets service host. * - * @return the builder. + * @return the builder */ public B host(String host) { this.host = host; @@ -239,7 +245,7 @@ public B host(String host) { /** * Sets the transport factory. * - * @return the builder. + * @return the builder */ public B httpTransportFactory(HttpTransportFactory httpTransportFactory) { this.httpTransportFactory = httpTransportFactory; @@ -249,7 +255,7 @@ public B httpTransportFactory(HttpTransportFactory httpTransportFactory) { /** * Sets the service authentication credentials. * - * @return the builder. + * @return the builder */ public B authCredentials(AuthCredentials authCredentials) { this.authCredentials = authCredentials; @@ -258,9 +264,10 @@ public B authCredentials(AuthCredentials authCredentials) { /** * Sets configuration parameters for request retries. If no configuration is set - * {@link RetryParams#noRetries()} is used. + * {@link RetryParams#defaultInstance()} is used. To disable retries, supply + * {@link RetryParams#noRetries()} here. * - * @return the builder. + * @return the builder */ public B retryParams(RetryParams retryParams) { this.retryParams = retryParams; @@ -282,7 +289,7 @@ public B serviceRpcFactory(ServiceRpcFactory serviceRpcFa * * @param connectTimeout connection timeout in milliseconds. 0 for an infinite timeout, a * negative number for the default value (20000). - * @return the builder. + * @return the builder */ public B connectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; @@ -294,7 +301,7 @@ public B connectTimeout(int connectTimeout) { * * @param readTimeout read timeout in milliseconds. 0 for an infinite timeout, a negative number * for the default value (20000). - * @return the builder. + * @return the builder */ public B readTimeout(int readTimeout) { this.readTimeout = readTimeout; @@ -305,14 +312,21 @@ public B readTimeout(int readTimeout) { protected ServiceOptions(Class> serviceFactoryClass, Class> rpcFactoryClass, Builder builder) { - projectId = checkNotNull(builder.projectId != null ? builder.projectId : defaultProject()); + projectId = builder.projectId != null ? builder.projectId : defaultProject(); + if (projectIdRequired()) { + checkArgument( + projectId != null, + "A project ID is required for this service but could not be determined from the builder " + + "or the environment. Please set a project ID using the builder."); + } host = firstNonNull(builder.host, defaultHost()); httpTransportFactory = firstNonNull(builder.httpTransportFactory, getFromServiceLoader(HttpTransportFactory.class, DefaultHttpTransportFactory.INSTANCE)); httpTransportFactoryClassName = httpTransportFactory.getClass().getName(); - authCredentials = firstNonNull(builder.authCredentials, defaultAuthCredentials()); - authCredentialsState = authCredentials.capture(); - retryParams = builder.retryParams; + authCredentials = + builder.authCredentials != null ? builder.authCredentials : defaultAuthCredentials(); + authCredentialsState = authCredentials != null ? authCredentials.capture() : null; + retryParams = firstNonNull(builder.retryParams, RetryParams.defaultInstance()); serviceFactory = firstNonNull(builder.serviceFactory, getFromServiceLoader(serviceFactoryClass, defaultServiceFactory())); serviceFactoryClassName = serviceFactory.getClass().getName(); @@ -324,8 +338,18 @@ protected ServiceOptions(Class> ser clock = firstNonNull(builder.clock, Clock.defaultClock()); } + /** + * Returns whether a service requires a project ID. This method may be overridden in + * service-specific Options objects. + * + * @return true if a project ID is required to use the service, false if not + */ + protected boolean projectIdRequired() { + return true; + } + private static AuthCredentials defaultAuthCredentials() { - // Consider App Engine. This will not be needed once issue #21 is fixed. + // Consider App Engine. if (appEngineAppId() != null) { try { return AuthCredentials.createForAppEngine(); @@ -337,16 +361,8 @@ private static AuthCredentials defaultAuthCredentials() { try { return AuthCredentials.createApplicationDefaults(); } catch (Exception ex) { - // fallback to old-style - } - - // Consider old-style Compute. This will not be needed once issue #21 is fixed. - try { - return AuthCredentials.createForComputeEngine(); - } catch (Exception ignore) { - // Maybe not on GCE + return null; } - return AuthCredentials.noCredentials(); } protected static String appEngineAppId() { @@ -366,19 +382,6 @@ protected String defaultProject() { } protected static String googleCloudProjectId() { - try { - URL url = new URL("http://metadata/computeMetadata/v1/project/project-id"); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestProperty("X-Google-Metadata-Request", "True"); - InputStream input = connection.getInputStream(); - if (connection.getResponseCode() == 200) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, UTF_8))) { - return reader.readLine(); - } - } - } catch (IOException ignore) { - // ignore - } File configDir; if (System.getenv().containsKey("CLOUDSDK_CONFIG")) { configDir = new File(System.getenv("CLOUDSDK_CONFIG")); @@ -387,28 +390,52 @@ protected static String googleCloudProjectId() { } else { configDir = new File(System.getProperty("user.home"), ".config/gcloud"); } - try (BufferedReader reader = - new BufferedReader(new FileReader(new File(configDir, "properties")))) { - String line; - String section = null; - Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); - Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); - while ((line = reader.readLine()) != null) { - if (line.isEmpty() || line.startsWith(";")) { - continue; - } - line = line.trim(); - Matcher matcher = sectionPattern.matcher(line); - if (matcher.matches()) { - section = matcher.group(1); - } else if (section == null || section.equals("core")) { - matcher = projectPattern.matcher(line); + FileReader fileReader = null; + try { + fileReader = new FileReader(new File(configDir, "configurations/config_default")); + } catch (FileNotFoundException newConfigFileNotFoundEx) { + try { + fileReader = new FileReader(new File(configDir, "properties")); + } catch (FileNotFoundException oldConfigFileNotFoundEx) { + // ignore + } + } + if (fileReader != null) { + try (BufferedReader reader = new BufferedReader(fileReader)) { + String line; + String section = null; + Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); + Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.startsWith(";")) { + continue; + } + line = line.trim(); + Matcher matcher = sectionPattern.matcher(line); if (matcher.matches()) { - return matcher.group(1); + section = matcher.group(1); + } else if (section == null || section.equals("core")) { + matcher = projectPattern.matcher(line); + if (matcher.matches()) { + return matcher.group(1); + } } } + } catch (IOException ex) { + // ignore } - } catch (IOException ex) { + } + try { + URL url = new URL("http://metadata/computeMetadata/v1/project/project-id"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestProperty("X-Google-Metadata-Request", "True"); + InputStream input = connection.getInputStream(); + if (connection.getResponseCode() == 200) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, UTF_8))) { + return reader.readLine(); + } + } + } catch (IOException ignore) { // ignore } // return null if can't determine @@ -423,9 +450,11 @@ protected static String getAppEngineProjectId() { try { Class factoryClass = Class.forName("com.google.appengine.api.appidentity.AppIdentityServiceFactory"); + Class serviceClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityService"); Method method = factoryClass.getMethod("getAppIdentityService"); Object appIdentityService = method.invoke(null); - method = appIdentityService.getClass().getMethod("getServiceAccountName"); + method = serviceClass.getMethod("getServiceAccountName"); String serviceAccountName = (String) method.invoke(appIdentityService); int indexOfAtSign = serviceAccountName.indexOf('@'); return serviceAccountName.substring(0, indexOfAtSign); @@ -435,6 +464,7 @@ protected static String getAppEngineProjectId() { } } + @SuppressWarnings("unchecked") public ServiceT service() { if (service == null) { service = serviceFactory.create((OptionsT) this); @@ -442,6 +472,7 @@ public ServiceT service() { return service; } + @SuppressWarnings("unchecked") public ServiceRpcT rpc() { if (rpc == null) { rpc = serviceRpcFactory.create((OptionsT) this); @@ -451,6 +482,8 @@ public ServiceRpcT rpc() { /** * Returns the project id. + * + * Return value can be null (for services that don't require a project id). */ public String projectId() { return projectId; @@ -478,11 +511,11 @@ public AuthCredentials authCredentials() { } /** - * Returns configuration parameters for request retries. By default requests are not retried: - * {@link RetryParams#noRetries()} is used. + * Returns configuration parameters for request retries. By default requests are retried: + * {@link RetryParams#defaultInstance()} is used. */ public RetryParams retryParams() { - return retryParams != null ? retryParams : RetryParams.noRetries(); + return retryParams; } /** @@ -490,13 +523,16 @@ public RetryParams retryParams() { * options. */ public HttpRequestInitializer httpRequestInitializer() { - HttpTransport httpTransport = httpTransportFactory.create(); - final HttpRequestInitializer baseRequestInitializer = - authCredentials().httpRequestInitializer(httpTransport, scopes()); + final HttpRequestInitializer delegate = + authCredentials() != null && authCredentials.credentials() != null + ? new HttpCredentialsAdapter(authCredentials().credentials().createScoped(scopes())) + : null; return new HttpRequestInitializer() { @Override public void initialize(HttpRequest httpRequest) throws IOException { - baseRequestInitializer.initialize(httpRequest); + if (delegate != null) { + delegate.initialize(httpRequest); + } if (connectTimeout >= 0) { httpRequest.setConnectTimeout(connectTimeout); } @@ -562,9 +598,10 @@ private void readObject(ObjectInputStream input) throws IOException, ClassNotFou httpTransportFactory = newInstance(httpTransportFactoryClassName); serviceFactory = newInstance(serviceFactoryClassName); serviceRpcFactory = newInstance(serviceRpcFactoryClassName); - authCredentials = authCredentialsState.restore(); + authCredentials = authCredentialsState != null ? authCredentialsState.restore() : null; } + @SuppressWarnings("unchecked") private static T newInstance(String className) throws IOException, ClassNotFoundException { try { return (T) Class.forName(className).newInstance(); diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java new file mode 100644 index 000000000000..e6f06e23dc04 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java @@ -0,0 +1,48 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.nio.channels.WritableByteChannel; + +/** + * A channel for writing data to Google Cloud services. + * + *

Implementations of this class may further buffer data internally to reduce remote calls. + * Written data will only be visible after calling {@link #close()}. This interface implements + * {@link Restorable} to allow saving the writer's state to continue writing afterwards. + *

+ */ +public interface WriteChannel extends WritableByteChannel, Closeable, Restorable { + + /** + * Sets the minimum size that will be written by a single RPC. + * Written data will be buffered and only flushed upon reaching this size or closing the channel. + */ + void chunkSize(int chunkSize); + + /** + * Captures the write channel state so that it can be saved and restored afterwards. The original + * {@code WriteChannel} and the restored one should not both be used. Closing one channel + * causes the other channel to close; subsequent writes will fail. + * + * @return a {@link RestorableState} object that contains the write channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java b/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java new file mode 100644 index 000000000000..d527640c99f9 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Core classes for the {@code gcloud-java} library. + */ +package com.google.gcloud; diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java b/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java index d20b690167a1..d19f6047e4b2 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java @@ -24,6 +24,7 @@ * Implementation must provide a public no-arg constructor. * Loading of a factory implementation is done via {@link java.util.ServiceLoader}. */ +@SuppressWarnings("rawtypes") public interface ServiceRpcFactory { ServiceRpcT create(OptionsT options); diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java new file mode 100644 index 000000000000..e3c6abb7d1ee --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java @@ -0,0 +1,155 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static com.google.gcloud.BaseServiceException.UNKNOWN_CODE; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Set; + +/** + * Tests for {@link BaseServiceException}. + */ +public class BaseServiceExceptionTest { + + private static final int CODE = 1; + private static final int CODE_NO_REASON = 2; + private static final String MESSAGE = "some message"; + private static final String REASON = "some reason"; + private static final boolean RETRYABLE = true; + private static final boolean IDEMPOTENT = true; + private static class CustomServiceException extends BaseServiceException { + + private static final long serialVersionUID = -195251309124875103L; + + public CustomServiceException(int code, String message, String reason, boolean idempotent) { + super(code, message, reason, idempotent); + } + + @Override + protected Set retryableErrors() { + return ImmutableSet.of(new Error(CODE, REASON), new Error(null, REASON), + new Error(CODE_NO_REASON, null)); + } + } + + @Test + public void testBaseServiceException() { + BaseServiceException serviceException = new BaseServiceException(CODE, MESSAGE, REASON, + IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); + + Exception cause = new RuntimeException(); + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, false, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertFalse(serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + IOException exception = new SocketTimeoutException(); + serviceException = new BaseServiceException(exception, true); + assertTrue(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + assertEquals(exception, serviceException.getCause()); + + GoogleJsonError error = new GoogleJsonError(); + error.setCode(CODE); + error.setMessage(MESSAGE); + serviceException = new BaseServiceException(error, true); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertFalse(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE_NO_REASON, MESSAGE, null, IDEMPOTENT); + assertEquals(CODE_NO_REASON, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertNull(serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(UNKNOWN_CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(UNKNOWN_CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BaseServiceException cause = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + RetryHelper.RetryHelperException exceptionMock = + createMock(RetryHelper.RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BaseServiceException.translateAndPropagateIfPossible(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(CODE, ex.code()); + assertEquals(MESSAGE, ex.getMessage()); + assertFalse(ex.retryable()); + assertEquals(IDEMPOTENT, ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java new file mode 100644 index 000000000000..6d5306a3bc7f --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java @@ -0,0 +1,144 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.gcloud.spi.ServiceRpcFactory; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.nio.channels.ClosedChannelException; +import java.util.Arrays; +import java.util.Random; + +public class BaseWriteChannelTest { + + private abstract static class CustomService implements Service {} + private abstract static class CustomServiceOptions + extends ServiceOptions { + + private static final long serialVersionUID = 3302358029307467197L; + + protected CustomServiceOptions( + Class> serviceFactoryClass, + Class> rpcFactoryClass, + Builder builder) { + super(serviceFactoryClass, rpcFactoryClass, builder); + } + } + + private static final Serializable ENTITY = 42L; + private static final String UPLOAD_ID = "uploadId"; + private static final byte[] CONTENT = {0xD, 0xE, 0xA, 0xD}; + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + private static BaseWriteChannel channel; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + channel = new BaseWriteChannel(null, ENTITY, UPLOAD_ID) { + @Override + public RestorableState capture() { + return null; + } + + @Override + protected void flushBuffer(int length, boolean last) {} + + @Override + protected BaseState.Builder stateBuilder() { + return null; + } + }; + } + + @Test + public void testConstructor() throws IOException { + assertEquals(null, channel.options()); + assertEquals(ENTITY, channel.entity()); + assertEquals(0, channel.position()); + assertEquals(UPLOAD_ID, channel.uploadId()); + assertEquals(0, channel.limit()); + assertTrue(channel.isOpen()); + assertArrayEquals(new byte[0], channel.buffer()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testClose() throws IOException { + channel.close(); + assertFalse(channel.isOpen()); + assertNull(channel.buffer()); + } + + @Test + public void testValidateOpen() throws IOException { + channel.close(); + thrown.expect(ClosedChannelException.class); + channel.write(ByteBuffer.allocate(42)); + } + + @Test + public void testChunkSize() throws IOException { + channel.chunkSize(42); + assertEquals(MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(2 * MIN_CHUNK_SIZE); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(512 * 1025); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testWrite() throws IOException { + channel.write(ByteBuffer.wrap(CONTENT)); + assertEquals(CONTENT.length, channel.limit()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.buffer().length); + assertArrayEquals(Arrays.copyOf(CONTENT, DEFAULT_CHUNK_SIZE), channel.buffer()); + } + + @Test + public void testWriteAndFlush() throws IOException { + ByteBuffer content = randomBuffer(DEFAULT_CHUNK_SIZE + 1); + channel.write(content); + assertEquals(DEFAULT_CHUNK_SIZE, channel.position()); + assertEquals(1, channel.limit()); + byte[] newContent = new byte[DEFAULT_CHUNK_SIZE]; + newContent[0] = content.get(DEFAULT_CHUNK_SIZE); + assertArrayEquals(newContent, channel.buffer()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java index c182515dbb16..cedc995ddbd0 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java @@ -82,7 +82,7 @@ public Object call() throws Error { } // using default exception handler (retry upon any non-runtime exceptions) - ExceptionHandler handler = ExceptionHandler.getDefaultInstance(); + ExceptionHandler handler = ExceptionHandler.defaultInstance(); assertValidCallable(new A(), handler); assertValidCallable(new B(), handler); assertValidCallable(new C(), handler); diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java new file mode 100644 index 000000000000..db0935c4766d --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/IamPolicyTest.java @@ -0,0 +1,180 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + +import org.junit.Test; + +import java.util.Map; +import java.util.Set; + +public class IamPolicyTest { + + private static final Identity ALL_USERS = Identity.allUsers(); + private static final Identity ALL_AUTH_USERS = Identity.allAuthenticatedUsers(); + private static final Identity USER = Identity.user("abc@gmail.com"); + private static final Identity SERVICE_ACCOUNT = + Identity.serviceAccount("service-account@gmail.com"); + private static final Identity GROUP = Identity.group("group@gmail.com"); + private static final Identity DOMAIN = Identity.domain("google.com"); + private static final Map> BINDINGS = ImmutableMap.of( + "viewer", + ImmutableSet.of(USER, SERVICE_ACCOUNT, ALL_USERS), + "editor", + ImmutableSet.of(ALL_AUTH_USERS, GROUP, DOMAIN)); + private static final PolicyImpl SIMPLE_POLICY = PolicyImpl.builder() + .addBinding("viewer", ImmutableSet.of(USER, SERVICE_ACCOUNT, ALL_USERS)) + .addBinding("editor", ImmutableSet.of(ALL_AUTH_USERS, GROUP, DOMAIN)) + .build(); + private static final PolicyImpl FULL_POLICY = + new PolicyImpl.Builder(SIMPLE_POLICY.bindings(), "etag", 1).build(); + + static class PolicyImpl extends IamPolicy { + + static class Builder extends IamPolicy.Builder { + + private Builder() {} + + private Builder(Map> bindings, String etag, Integer version) { + bindings(bindings).etag(etag).version(version); + } + + @Override + public PolicyImpl build() { + return new PolicyImpl(this); + } + } + + PolicyImpl(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(bindings(), etag(), version()); + } + + static Builder builder() { + return new Builder(); + } + } + + @Test + public void testBuilder() { + assertEquals(BINDINGS, FULL_POLICY.bindings()); + assertEquals("etag", FULL_POLICY.etag()); + assertEquals(1, FULL_POLICY.version().intValue()); + Map> editorBinding = + ImmutableMap.>builder().put("editor", BINDINGS.get("editor")).build(); + PolicyImpl policy = FULL_POLICY.toBuilder().bindings(editorBinding).build(); + assertEquals(editorBinding, policy.bindings()); + assertEquals("etag", policy.etag()); + assertEquals(1, policy.version().intValue()); + policy = SIMPLE_POLICY.toBuilder().removeBinding("editor").build(); + assertEquals(ImmutableMap.of("viewer", BINDINGS.get("viewer")), policy.bindings()); + assertNull(policy.etag()); + assertNull(policy.version()); + policy = policy.toBuilder() + .removeIdentity("viewer", USER, ALL_USERS) + .addIdentity("viewer", DOMAIN, GROUP) + .build(); + assertEquals(ImmutableMap.of("viewer", ImmutableSet.of(SERVICE_ACCOUNT, DOMAIN, GROUP)), + policy.bindings()); + assertNull(policy.etag()); + assertNull(policy.version()); + policy = PolicyImpl.builder().addBinding("owner", USER, SERVICE_ACCOUNT).build(); + assertEquals( + ImmutableMap.of("owner", ImmutableSet.of(USER, SERVICE_ACCOUNT)), policy.bindings()); + assertNull(policy.etag()); + assertNull(policy.version()); + try { + SIMPLE_POLICY.toBuilder().addBinding("viewer", USER); + fail("Should have failed due to duplicate role."); + } catch (IllegalArgumentException e) { + assertEquals("The policy already contains a binding with the role viewer.", e.getMessage()); + } + try { + SIMPLE_POLICY.toBuilder().addBinding("editor", ImmutableSet.of(USER)); + fail("Should have failed due to duplicate role."); + } catch (IllegalArgumentException e) { + assertEquals("The policy already contains a binding with the role editor.", e.getMessage()); + } + } + + @Test + public void testEqualsHashCode() { + assertNotNull(FULL_POLICY); + PolicyImpl emptyPolicy = PolicyImpl.builder().build(); + AnotherPolicyImpl anotherPolicy = new AnotherPolicyImpl.Builder().build(); + assertNotEquals(emptyPolicy, anotherPolicy); + assertNotEquals(emptyPolicy.hashCode(), anotherPolicy.hashCode()); + assertNotEquals(FULL_POLICY, SIMPLE_POLICY); + assertNotEquals(FULL_POLICY.hashCode(), SIMPLE_POLICY.hashCode()); + PolicyImpl copy = SIMPLE_POLICY.toBuilder().build(); + assertEquals(SIMPLE_POLICY, copy); + assertEquals(SIMPLE_POLICY.hashCode(), copy.hashCode()); + } + + @Test + public void testBindings() { + assertTrue(PolicyImpl.builder().build().bindings().isEmpty()); + assertEquals(BINDINGS, SIMPLE_POLICY.bindings()); + } + + @Test + public void testEtag() { + assertNull(SIMPLE_POLICY.etag()); + assertEquals("etag", FULL_POLICY.etag()); + } + + @Test + public void testVersion() { + assertNull(SIMPLE_POLICY.version()); + assertEquals(1, FULL_POLICY.version().intValue()); + } + + static class AnotherPolicyImpl extends IamPolicy { + + static class Builder extends IamPolicy.Builder { + + private Builder() {} + + @Override + public AnotherPolicyImpl build() { + return new AnotherPolicyImpl(this); + } + } + + AnotherPolicyImpl(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(); + } + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java new file mode 100644 index 000000000000..a42bc9db7abd --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/IdentityTest.java @@ -0,0 +1,105 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.junit.Test; + +public class IdentityTest { + + private static final Identity ALL_USERS = Identity.allUsers(); + private static final Identity ALL_AUTH_USERS = Identity.allAuthenticatedUsers(); + private static final Identity USER = Identity.user("abc@gmail.com"); + private static final Identity SERVICE_ACCOUNT = + Identity.serviceAccount("service-account@gmail.com"); + private static final Identity GROUP = Identity.group("group@gmail.com"); + private static final Identity DOMAIN = Identity.domain("google.com"); + + @Test + public void testAllUsers() { + assertEquals(Identity.Type.ALL_USERS, ALL_USERS.type()); + assertNull(ALL_USERS.value()); + } + + @Test + public void testAllAuthenticatedUsers() { + assertEquals(Identity.Type.ALL_AUTHENTICATED_USERS, ALL_AUTH_USERS.type()); + assertNull(ALL_AUTH_USERS.value()); + } + + @Test + public void testUser() { + assertEquals(Identity.Type.USER, USER.type()); + assertEquals("abc@gmail.com", USER.value()); + } + + @Test(expected = NullPointerException.class) + public void testUserNullEmail() { + Identity.user(null); + } + + @Test + public void testServiceAccount() { + assertEquals(Identity.Type.SERVICE_ACCOUNT, SERVICE_ACCOUNT.type()); + assertEquals("service-account@gmail.com", SERVICE_ACCOUNT.value()); + } + + @Test(expected = NullPointerException.class) + public void testServiceAccountNullEmail() { + Identity.serviceAccount(null); + } + + @Test + public void testGroup() { + assertEquals(Identity.Type.GROUP, GROUP.type()); + assertEquals("group@gmail.com", GROUP.value()); + } + + @Test(expected = NullPointerException.class) + public void testGroupNullEmail() { + Identity.group(null); + } + + @Test + public void testDomain() { + assertEquals(Identity.Type.DOMAIN, DOMAIN.type()); + assertEquals("google.com", DOMAIN.value()); + } + + @Test(expected = NullPointerException.class) + public void testDomainNullId() { + Identity.domain(null); + } + + @Test + public void testIdentityToAndFromPb() { + compareIdentities(ALL_USERS, Identity.valueOf(ALL_USERS.strValue())); + compareIdentities(ALL_AUTH_USERS, Identity.valueOf(ALL_AUTH_USERS.strValue())); + compareIdentities(USER, Identity.valueOf(USER.strValue())); + compareIdentities(SERVICE_ACCOUNT, Identity.valueOf(SERVICE_ACCOUNT.strValue())); + compareIdentities(GROUP, Identity.valueOf(GROUP.strValue())); + compareIdentities(DOMAIN, Identity.valueOf(DOMAIN.strValue())); + } + + private void compareIdentities(Identity expected, Identity actual) { + assertEquals(expected, actual); + assertEquals(expected.type(), actual.type()); + assertEquals(expected.value(), actual.value()); + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java new file mode 100644 index 000000000000..4389171fb49c --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +public class PageImplTest { + + private static final ImmutableList VALUES = ImmutableList.of("1", "2"); + private static final ImmutableList NEXT_VALUES = ImmutableList.of("3", "4"); + private static final ImmutableList ALL_VALUES = ImmutableList.builder() + .addAll(VALUES) + .addAll(NEXT_VALUES) + .build(); + + @Test + public void testPage() { + final PageImpl nextResult = new PageImpl<>(null, "c", NEXT_VALUES); + PageImpl.NextPageFetcher fetcher = new PageImpl.NextPageFetcher() { + @Override + public PageImpl nextPage() { + return nextResult; + } + }; + PageImpl result = new PageImpl<>(fetcher, "c", VALUES); + assertEquals(nextResult, result.nextPage()); + assertEquals("c", result.nextPageCursor()); + assertEquals(VALUES, result.values()); + } + + @Test + public void testIterateAll() { + final PageImpl nextResult = new PageImpl<>(null, "c", NEXT_VALUES); + PageImpl.NextPageFetcher fetcher = new PageImpl.NextPageFetcher() { + @Override + public PageImpl nextPage() { + return nextResult; + } + }; + PageImpl result = new PageImpl<>(fetcher, "c", VALUES); + assertEquals(ALL_VALUES, ImmutableList.copyOf(result.iterateAll())); + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java index dfd933bcae46..9a7cc2104f4a 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java @@ -118,13 +118,13 @@ public void testTriesAtLeastMinTimes() { @Override public Integer call() throws IOException { timesCalled++; assertEquals(timesCalled, RetryHelper.getContext().getAttemptNumber()); - assertEquals(10, RetryHelper.getContext().getRetryParams().getRetryMaxAttempts()); + assertEquals(10, RetryHelper.getContext().getRetryParams().retryMaxAttempts()); if (timesCalled <= timesToFail) { throw new IOException(); } return timesCalled; } - }, params, ExceptionHandler.getDefaultInstance()); + }, params, ExceptionHandler.defaultInstance()); assertEquals(timesToFail + 1, attempted); assertNull(RetryHelper.getContext()); } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java index d1d5e3c076d8..eae44693929b 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java @@ -41,15 +41,15 @@ public class RetryParamsTest { @Test public void testDefaults() { - RetryParams params1 = RetryParams.getDefaultInstance(); + RetryParams params1 = RetryParams.defaultInstance(); RetryParams params2 = RetryParams.builder().build(); for (RetryParams params : Arrays.asList(params1, params2)) { - assertEquals(DEFAULT_INITIAL_RETRY_DELAY_MILLIS, params.getInitialRetryDelayMillis()); - assertEquals(DEFAULT_MAX_RETRY_DELAY_MILLIS, params.getMaxRetryDelayMillis()); - assertEquals(DEFAULT_RETRY_DELAY_BACKOFF_FACTOR, params.getRetryDelayBackoffFactor(), 0); - assertEquals(DEFAULT_RETRY_MAX_ATTEMPTS, params.getRetryMaxAttempts()); - assertEquals(DEFAULT_RETRY_MIN_ATTEMPTS, params.getRetryMinAttempts()); - assertEquals(DEFAULT_TOTAL_RETRY_PERIOD_MILLIS, params.getTotalRetryPeriodMillis()); + assertEquals(DEFAULT_INITIAL_RETRY_DELAY_MILLIS, params.initialRetryDelayMillis()); + assertEquals(DEFAULT_MAX_RETRY_DELAY_MILLIS, params.maxRetryDelayMillis()); + assertEquals(DEFAULT_RETRY_DELAY_BACKOFF_FACTOR, params.retryDelayBackoffFactor(), 0); + assertEquals(DEFAULT_RETRY_MAX_ATTEMPTS, params.retryMaxAttempts()); + assertEquals(DEFAULT_RETRY_MIN_ATTEMPTS, params.retryMinAttempts()); + assertEquals(DEFAULT_TOTAL_RETRY_PERIOD_MILLIS, params.totalRetryPeriodMillis()); } } @@ -65,12 +65,12 @@ public void testSetAndCopy() { RetryParams params1 = builder.build(); RetryParams params2 = new RetryParams.Builder(params1).build(); for (RetryParams params : Arrays.asList(params1, params2)) { - assertEquals(101, params.getInitialRetryDelayMillis()); - assertEquals(102, params.getMaxRetryDelayMillis()); - assertEquals(103, params.getRetryDelayBackoffFactor(), 0); - assertEquals(107, params.getRetryMinAttempts()); - assertEquals(108, params.getRetryMaxAttempts()); - assertEquals(109, params.getTotalRetryPeriodMillis()); + assertEquals(101, params.initialRetryDelayMillis()); + assertEquals(102, params.maxRetryDelayMillis()); + assertEquals(103, params.retryDelayBackoffFactor(), 0); + assertEquals(107, params.retryMinAttempts()); + assertEquals(108, params.retryMaxAttempts()); + assertEquals(109, params.totalRetryPeriodMillis()); } } @@ -79,19 +79,19 @@ public void testBadSettings() { RetryParams.Builder builder = RetryParams.builder(); builder.initialRetryDelayMillis(-1); builder = assertFailure(builder); - builder.maxRetryDelayMillis(RetryParams.getDefaultInstance().getInitialRetryDelayMillis() - 1); + builder.maxRetryDelayMillis(RetryParams.defaultInstance().initialRetryDelayMillis() - 1); builder = assertFailure(builder); builder.retryDelayBackoffFactor(-1); builder = assertFailure(builder); builder.retryMinAttempts(-1); builder = assertFailure(builder); - builder.retryMaxAttempts(RetryParams.getDefaultInstance().getRetryMinAttempts() - 1); + builder.retryMaxAttempts(RetryParams.defaultInstance().retryMinAttempts() - 1); builder = assertFailure(builder); builder.totalRetryPeriodMillis(-1); builder = assertFailure(builder); // verify that it is OK for min and max to be equal - builder.retryMaxAttempts(RetryParams.getDefaultInstance().getRetryMinAttempts()); - builder.maxRetryDelayMillis(RetryParams.getDefaultInstance().getInitialRetryDelayMillis()); + builder.retryMaxAttempts(RetryParams.defaultInstance().retryMinAttempts()); + builder.maxRetryDelayMillis(RetryParams.defaultInstance().initialRetryDelayMillis()); builder.build(); } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java new file mode 100644 index 000000000000..d0e3db2d2a55 --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java @@ -0,0 +1,241 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.gcloud.ServiceOptions.Clock; +import com.google.gcloud.ServiceOptions.DefaultHttpTransportFactory; +import com.google.gcloud.ServiceOptions.HttpTransportFactory; +import com.google.gcloud.spi.ServiceRpcFactory; + +import org.easymock.EasyMock; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Set; + +@RunWith(JUnit4.class) +public class ServiceOptionsTest { + private static final String JSON_KEY = + "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); + private static AuthCredentials authCredentials; + static { + try { + authCredentials = AuthCredentials.createForJson(JSON_KEY_STREAM); + } catch (IOException e) { + fail("Couldn't create fake JSON credentials."); + } + } + private static final HttpTransportFactory MOCK_HTTP_TRANSPORT_FACTORY = + EasyMock.createMock(HttpTransportFactory.class); + private static final Clock TEST_CLOCK = new TestClock(); + private static final TestServiceOptions OPTIONS = + TestServiceOptions.builder() + .authCredentials(authCredentials) + .clock(TEST_CLOCK) + .connectTimeout(1234) + .host("host") + .httpTransportFactory(MOCK_HTTP_TRANSPORT_FACTORY) + .projectId("project-id") + .readTimeout(5678) + .retryParams(RetryParams.noRetries()) + .build(); + private static final TestServiceOptions DEFAULT_OPTIONS = + TestServiceOptions.builder().projectId("project-id").build(); + private static final TestServiceOptions OPTIONS_COPY = OPTIONS.toBuilder().build(); + + private static class TestClock extends Clock { + @Override + public long millis() { + return 123456789L; + } + } + + private interface TestService extends Service {} + + private static class TestServiceImpl + extends BaseService implements TestService { + private TestServiceImpl(TestServiceOptions options) { + super(options); + } + } + + private interface TestServiceFactory extends ServiceFactory {} + + private static class DefaultTestServiceFactory implements TestServiceFactory { + private static final TestServiceFactory INSTANCE = new DefaultTestServiceFactory(); + + @Override + public TestService create(TestServiceOptions options) { + return new TestServiceImpl(options); + } + } + + private interface TestServiceRpcFactory + extends ServiceRpcFactory {} + + private static class DefaultTestServiceRpcFactory implements TestServiceRpcFactory { + private static final TestServiceRpcFactory INSTANCE = new DefaultTestServiceRpcFactory(); + + @Override + public TestServiceRpc create(TestServiceOptions options) { + return new DefaultTestServiceRpc(options); + } + } + + private interface TestServiceRpc {} + + private static class DefaultTestServiceRpc implements TestServiceRpc { + DefaultTestServiceRpc(TestServiceOptions options) {} + } + + private static class TestServiceOptions + extends ServiceOptions { + private static class Builder + extends ServiceOptions.Builder { + private Builder() {} + + private Builder(TestServiceOptions options) { + super(options); + } + + @Override + protected TestServiceOptions build() { + return new TestServiceOptions(this); + } + } + + private TestServiceOptions(Builder builder) { + super(TestServiceFactory.class, TestServiceRpcFactory.class, builder); + } + + @Override + protected TestServiceFactory defaultServiceFactory() { + return DefaultTestServiceFactory.INSTANCE; + } + + @Override + protected TestServiceRpcFactory defaultRpcFactory() { + return DefaultTestServiceRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return null; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + private static Builder builder() { + return new Builder(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TestServiceOptions && baseEquals((TestServiceOptions) obj); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + } + + @Test + public void testBuilder() { + assertSame(authCredentials, OPTIONS.authCredentials()); + assertSame(TEST_CLOCK, OPTIONS.clock()); + assertEquals(1234, OPTIONS.connectTimeout()); + assertEquals("host", OPTIONS.host()); + assertSame(MOCK_HTTP_TRANSPORT_FACTORY, OPTIONS.httpTransportFactory()); + assertEquals("project-id", OPTIONS.projectId()); + assertEquals(5678, OPTIONS.readTimeout()); + assertSame(RetryParams.noRetries(), OPTIONS.retryParams()); + + assertSame(Clock.defaultClock(), DEFAULT_OPTIONS.clock()); + assertEquals(-1, DEFAULT_OPTIONS.connectTimeout()); + assertEquals("https://www.googleapis.com", DEFAULT_OPTIONS.host()); + assertTrue(DEFAULT_OPTIONS.httpTransportFactory() instanceof DefaultHttpTransportFactory); + assertEquals(-1, DEFAULT_OPTIONS.readTimeout()); + assertSame(RetryParams.defaultInstance(), DEFAULT_OPTIONS.retryParams()); + } + + @Test + public void testGetProjectIdRequired() { + assertTrue(OPTIONS.projectIdRequired()); + } + + @Test + public void testService() { + assertTrue(OPTIONS.service() instanceof TestServiceImpl); + } + + @Test + public void testRpc() { + assertTrue(OPTIONS.rpc() instanceof DefaultTestServiceRpc); + } + + @Test + public void testBaseEquals() { + assertEquals(OPTIONS, OPTIONS_COPY); + assertNotEquals(DEFAULT_OPTIONS, OPTIONS); + } + + @Test + public void testBaseHashCode() { + assertEquals(OPTIONS.hashCode(), OPTIONS_COPY.hashCode()); + assertNotEquals(DEFAULT_OPTIONS.hashCode(), OPTIONS.hashCode()); + } +} diff --git a/gcloud-java-datastore/README.md b/gcloud-java-datastore/README.md index bbcdd9d8857c..0d89a0a07e3e 100644 --- a/gcloud-java-datastore/README.md +++ b/gcloud-java-datastore/README.md @@ -6,6 +6,8 @@ Java idiomatic client for [Google Cloud Datastore] (https://cloud.google.com/dat [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-datastore.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-datastore.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) - [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) - [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/datastore/package-summary.html) @@ -15,18 +17,26 @@ Java idiomatic client for [Google Cloud Datastore] (https://cloud.google.com/dat Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-datastore - 0.0.10 + 0.1.5 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-datastore:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.1.5" +``` Example Application -------------------- -[`DatastoreExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java) is a simple command line interface for the Cloud Datastore. Read more about using the application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/DatastoreExample.html). +[`DatastoreExample`](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/DatastoreExample.java) is a simple command line interface for the Cloud Datastore. Read more about using the application on the [`DatastoreExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/datastore/DatastoreExample.html). Authentication -------------- @@ -36,7 +46,7 @@ See the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#auth About Google Cloud Datastore ---------------------------- -Google [Cloud Datastore][cloud-datastore] is a fully managed, schemaless database for +Google [Cloud Datastore][cloud-datastore-docs] is a fully managed, schemaless database for storing non-relational data. Cloud Datastore automatically scales with your users and supports ACID transactions, high availability of reads and writes, strong consistency for reads and ancestor queries, and eventual @@ -48,36 +58,96 @@ Cloud Datastore for your project. See the ``gcloud-java`` API [datastore documentation][datastore-api] to learn how to interact with the Cloud Datastore using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Datastore API enabled. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-datastore` library. See the [Quickstart](#quickstart) section to add `gcloud-java-datastore` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Datastore, you must create a service object with credentials. You can then make API calls by calling methods on the Datastore service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: ```java import com.google.gcloud.datastore.Datastore; import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; + +Datastore datastore = DatastoreOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Objects in Datastore are known as entities. Entities are grouped by "kind" and have keys for easy access. In this code snippet, we will create a new entity representing a person and store that data by the person's email. First, add the following imports at the top of your file: + +```java import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; +``` + +Then add the following code to put an entity in Datastore. + +```java +KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); +Key key = keyFactory.newKey("john.doe@gmail.com"); +Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 51) + .set("favorite_food", "pizza") + .build(); +datastore.put(entity); +``` -Datastore datastore = DatastoreOptions.getDefaultInstance().service(); -KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); -Key key = keyFactory.newKey(keyName); -Entity entity = datastore.get(key); -if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Do") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); -} else { - System.out.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); +Later, if you want to get this entity back, add the following to your code: + +```java +Entity johnEntity = datastore.get(key); +``` + +#### Running a query +In addition to retrieving entities by their keys, you can perform queries to retrieve entities by the values of their properties. A typical query includes an entity kind, filters to select entities with matching values, and sort orders to sequence the results. `gcloud-java-datastore` supports two types of queries: `StructuredQuery` (that allows you to construct query elements) and `GqlQuery` (which operates using [GQL syntax](https://cloud.google.com/datastore/docs/apis/gql/gql_reference)) in string format. In this tutorial, we will use a simple `StructuredQuery`. + +Suppose that you've added more people to Datastore, and now you want to find all people whose favorite food is pizza. Import the following: + +```java +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; +``` + +Then add the following code to your program: + +```java +Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); +QueryResults results = datastore.run(query); +while (results.hasNext()) { + Entity currentEntity = results.next(); + System.out.println(currentEntity.getString("name") + ", you're invited to a pizza party!"); } ``` +Cloud Datastore relies on indexing to run queries. Indexing is turned on by default for most types of properties. To read more about indexing, see the [Cloud Datastore Index Configuration documentation](https://cloud.google.com/datastore/docs/tools/indexconfig). + +#### Complete source code + +In +[AddEntitiesAndRunQuery.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/AddEntitiesAndRunQuery.java) +we put together all the code shown above into one program. The program assumes that you are +running on Compute Engine or from your own desktop. To run the example on App Engine, simply move +the code from the main method to your application's servlet class and change the print statements to +display on your webpage. + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -104,7 +174,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -113,11 +185,10 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-datastore [cloud-platform]: https://cloud.google.com/ -[cloud-datastore]: https://cloud.google.com/datastore/docs [cloud-datastore-docs]: https://cloud.google.com/datastore/docs [cloud-datastore-activation]: https://cloud.google.com/datastore/docs/activate [datastore-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/datastore/package-summary.html - diff --git a/gcloud-java-datastore/pom.xml b/gcloud-java-datastore/pom.xml index c7c57d91ae1d..977b6db22b14 100644 --- a/gcloud-java-datastore/pom.xml +++ b/gcloud-java-datastore/pom.xml @@ -1,7 +1,6 @@ 4.0.0 - com.google.gcloud gcloud-java-datastore jar GCloud Java datastore @@ -11,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-datastore @@ -25,8 +24,14 @@ com.google.apis google-api-services-datastore-protobuf - v1beta2-rev1-2.1.2 + v1beta2-rev1-4.0.0 compile + + + com.google.api-client + google-api-client + + junit @@ -37,7 +42,7 @@ org.easymock easymock - 3.3 + 3.4 test diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseDatastoreBatchWriter.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseDatastoreBatchWriter.java index 7eaf5c535f26..b42c07d62320 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseDatastoreBatchWriter.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseDatastoreBatchWriter.java @@ -199,7 +199,7 @@ protected DatastoreException newInvalidRequest(String msg, Object... params) { return DatastoreException.throwInvalidRequest(String.format(msg, params)); } - protected DatastoreV1.Mutation.Builder toMutationPb() { + DatastoreV1.Mutation.Builder toMutationPb() { DatastoreV1.Mutation.Builder mutationPb = DatastoreV1.Mutation.newBuilder(); for (FullEntity entity : toAddAutoId()) { mutationPb.addInsertAutoId(entity.toPb()); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java index 3a79f3053a1e..20c0b13e5001 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java @@ -33,6 +33,7 @@ import com.google.protobuf.InvalidProtocolBufferException; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -90,7 +91,7 @@ private B self() { } @SuppressWarnings("unchecked") - protected B fill(DatastoreV1.Entity entityPb) { + B fill(DatastoreV1.Entity entityPb) { Map> copiedProperties = Maps.newHashMap(); for (DatastoreV1.Property property : entityPb.getPropertyList()) { copiedProperties.put(property.getName(), Value.fromPb(property.getValue())); @@ -128,61 +129,286 @@ public B remove(String name) { return self(); } + /** + * Sets a property. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, Value value) { properties.put(name, value); return self(); } + /** + * Sets a property of type {@link StringValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, String value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link StringValue}. + * + * @param name name of the property + * @param first the first string in the list + * @param second the second string in the list + * @param others other strings in the list + */ + public B set(String name, String first, String second, String... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (String other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link LongValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, long value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link LongValue}. + * + * @param name name of the property + * @param first the first long in the list + * @param second the second long in the list + * @param others other longs in the list + */ + public B set(String name, long first, long second, long... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (long other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link DoubleValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, double value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link DoubleValue}. + * + * @param name name of the property + * @param first the first double in the list + * @param second the second double in the list + * @param others other doubles in the list + */ + public B set(String name, double first, double second, double... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (double other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link BooleanValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, boolean value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link BooleanValue}. + * + * @param name name of the property + * @param first the first boolean in the list + * @param second the second boolean in the list + * @param others other booleans in the list + */ + public B set(String name, boolean first, boolean second, boolean... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (boolean other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link DateTimeValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, DateTime value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link DateTimeValue}. + * + * @param name name of the property + * @param first the first {@link DateTime} in the list + * @param second the second {@link DateTime} in the list + * @param others other {@link DateTime}s in the list + */ + public B set(String name, DateTime first, DateTime second, DateTime... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (DateTime other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link KeyValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, Key value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link KeyValue}. + * + * @param name name of the property + * @param first the first {@link Key} in the list + * @param second the second {@link Key} in the list + * @param others other {@link Key}s in the list + */ + public B set(String name, Key first, Key second, Key... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (Key other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link EntityValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, FullEntity value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link EntityValue}. + * + * @param name name of the property + * @param first the first {@link FullEntity} in the list + * @param second the second {@link FullEntity} in the list + * @param others other entities in the list + */ + public B set(String name, FullEntity first, FullEntity second, FullEntity... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (FullEntity other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@link ListValue}. + * + * @param name name of the property + * @param values list of values associated with the property + */ public B set(String name, List> values) { properties.put(name, of(values)); return self(); } - public B set(String name, Value value, Value... other) { - properties.put(name, of(value, other)); + /** + * Sets a property of type {@link ListValue}. + * + * @param name name of the property + * @param first the first value in the list + * @param second the second value in the list + * @param others other values in the list + */ + public B set(String name, Value first, Value second, Value... others) { + properties.put(name, ListValue.builder().addValue(first).addValue(second, others).build()); return self(); } + /** + * Sets a property of type {@link BlobValue}. + * + * @param name name of the property + * @param value value associated with the property + */ public B set(String name, Blob value) { properties.put(name, of(value)); return self(); } + /** + * Sets a list property containing elements of type {@link BlobValue}. + * + * @param name name of the property + * @param first the first {@link Blob} in the list + * @param second the second {@link Blob} in the list + * @param others other {@link Blob}s in the list + */ + public B set(String name, Blob first, Blob second, Blob... others) { + List values = new LinkedList<>(); + values.add(of(first)); + values.add(of(second)); + for (Blob other : others) { + values.add(of(other)); + } + properties.put(name, of(values)); + return self(); + } + + /** + * Sets a property of type {@code NullValue}. + * + * @param name name of the property + */ public B setNull(String name) { properties.put(name, of()); return self(); @@ -243,7 +469,7 @@ public boolean contains(String name) { /** * Returns the {@link Value} for the given property {@code name}. * - * @throws DatastoreException if not such property. + * @throws DatastoreException if not such property */ public > V getValue(String name) { @SuppressWarnings("unchecked") @@ -257,7 +483,7 @@ public > V getValue(String name) { /** * Returns true if property is an instance of NullValue. * - * @throws DatastoreException if not such property. + * @throws DatastoreException if not such property */ public boolean isNull(String name) { return getValue(name) instanceof NullValue; @@ -267,8 +493,8 @@ public boolean isNull(String name) { /** * Returns the property value as a string. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a string. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a string */ @SuppressWarnings("unchecked") public String getString(String name) { @@ -278,8 +504,8 @@ public String getString(String name) { /** * Returns the property value as long. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a long. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a long */ @SuppressWarnings("unchecked") public long getLong(String name) { @@ -289,8 +515,8 @@ public long getLong(String name) { /** * Returns the property value as a double. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a double. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a double */ @SuppressWarnings("unchecked") public double getDouble(String name) { @@ -300,8 +526,8 @@ public double getDouble(String name) { /** * Returns the property value as a boolean. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a boolean. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a boolean */ @SuppressWarnings("unchecked") public boolean getBoolean(String name) { @@ -311,8 +537,8 @@ public boolean getBoolean(String name) { /** * Returns the property value as a DateTime. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a DateTime. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a DateTime */ @SuppressWarnings("unchecked") public DateTime getDateTime(String name) { @@ -322,8 +548,8 @@ public DateTime getDateTime(String name) { /** * Returns the property value as a Key. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a Key. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a Key */ @SuppressWarnings("unchecked") public Key getKey(String name) { @@ -333,8 +559,8 @@ public Key getKey(String name) { /** * Returns the property value as an entity. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not an entity. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not an entity */ @SuppressWarnings("unchecked") public FullEntity getEntity(String name) { @@ -344,19 +570,19 @@ public FullEntity getEntity(String name) { /** * Returns the property value as a list of values. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a list of values. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a list of values */ @SuppressWarnings("unchecked") - public List> getList(String name) { - return ((Value>>) getValue(name)).get(); + public > List getList(String name) { + return (List) getValue(name).get(); } /** * Returns the property value as a blob. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a blob. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a blob */ @SuppressWarnings("unchecked") public Blob getBlob(String name) { @@ -375,7 +601,7 @@ ImmutableSortedMap> properties() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { Builder builder = emptyBuilder(); builder.fill(DatastoreV1.Entity.parseFrom(bytesPb)); return builder.build(); @@ -384,7 +610,7 @@ protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { protected abstract Builder emptyBuilder(); @Override - protected final DatastoreV1.Entity toPb() { + final DatastoreV1.Entity toPb() { DatastoreV1.Entity.Builder entityPb = DatastoreV1.Entity.newBuilder(); for (Map.Entry> entry : properties.entrySet()) { DatastoreV1.Property.Builder propertyPb = DatastoreV1.Property.newBuilder(); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java index 865b95ed8518..4ab6f51b6767 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java @@ -31,7 +31,7 @@ /** * Base class for keys. */ -abstract class BaseKey extends Serializable { +public abstract class BaseKey extends Serializable { private static final long serialVersionUID = -4671243265877410635L; @@ -39,7 +39,12 @@ abstract class BaseKey extends Serializable { private final transient String namespace; private final transient ImmutableList path; - abstract static class Builder> { + /** + * Base class for key builders. + * + * @param the key builder. + */ + protected abstract static class Builder> { String projectId; String namespace; @@ -152,6 +157,8 @@ public String kind() { return leaf().kind(); } + abstract BaseKey parent(); + @Override public int hashCode() { return Objects.hash(projectId(), namespace(), path()); @@ -172,7 +179,7 @@ public boolean equals(Object obj) { } @Override - protected DatastoreV1.Key toPb() { + DatastoreV1.Key toPb() { DatastoreV1.Key.Builder keyPb = DatastoreV1.Key.newBuilder(); DatastoreV1.PartitionId.Builder partitionIdPb = DatastoreV1.PartitionId.newBuilder(); if (projectId != null) { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Batch.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Batch.java index 75a5d1381403..5306a685195a 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Batch.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Batch.java @@ -24,14 +24,14 @@ * to the Datastore upon {@link #submit}. * A usage example: *
 {@code
- *   Entity entity1 = datastore.get(key1);
- *   Batch batch = datastore.newBatch();
- *   Entity entity2 = Entity.builder(key2).set("name", "John").build();
- *   entity1 = Entity.builder(entity1).clear().setNull("bla").build();
- *   Entity entity3 = Entity.builder(key3).set("title", "title").build();
- *   batch.update(entity1);
- *   batch.add(entity2, entity3);
- *   batch.submit();
+ * Entity entity1 = datastore.get(key1);
+ * Batch batch = datastore.newBatch();
+ * Entity entity2 = Entity.builder(key2).set("name", "John").build();
+ * entity1 = Entity.builder(entity1).clear().setNull("bla").build();
+ * Entity entity3 = Entity.builder(key3).set("title", "title").build();
+ * batch.update(entity1);
+ * batch.add(entity2, entity3);
+ * batch.submit();
  * } 
*/ public interface Batch extends DatastoreBatchWriter { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java index 0c4d6c26d9fa..42a98e60b5e6 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java @@ -109,7 +109,7 @@ public InputStream asInputStream() { * * @throws java.nio.ReadOnlyBufferException if the target is read-only * @throws java.nio.BufferOverflowException if the target's remaining() space is not large - * enough to hold the data. + * enough to hold the data */ public void copyTo(ByteBuffer target) { byteString.copyTo(target); @@ -147,12 +147,12 @@ public static Blob copyFrom(InputStream input) throws IOException { } @Override - protected Value toPb() { + Value toPb() { return DatastoreV1.Value.newBuilder().setBlobValue(byteString).build(); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return new Blob(DatastoreV1.Value.parseFrom(bytesPb).getBlobValue()); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java index 42a8cee8e5a2..5e577f7feb6c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java @@ -17,21 +17,14 @@ package com.google.gcloud.datastore; import static com.google.common.base.Preconditions.checkNotNull; -import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.services.datastore.DatastoreV1; import com.google.api.services.datastore.DatastoreV1.Value; import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; -import com.google.common.base.Preconditions; +import com.google.common.io.BaseEncoding; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.TextFormat; -import com.google.protobuf.TextFormat.ParseException; - -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.net.URLEncoder; /** * A Google Cloud Datastore cursor. @@ -44,7 +37,6 @@ public final class Cursor extends Serializable { private final transient ByteString byteString; Cursor(ByteString byteString) { - Preconditions.checkArgument(byteString.isValidUtf8(), "content is not a valid UTF-8"); this.byteString = byteString; } @@ -76,11 +68,7 @@ ByteString byteString() { * Returns the cursor in an encoded form that can be used as part of a URL. */ public String toUrlSafe() { - try { - return URLEncoder.encode(TextFormat.printToString(toPb()), UTF_8.name()); - } catch (UnsupportedEncodingException e) { - throw new IllegalStateException("Unexpected encoding exception", e); - } + return BaseEncoding.base64Url().encode(byteString.toByteArray()); } /** @@ -88,11 +76,8 @@ public String toUrlSafe() { */ public static Cursor fromUrlSafe(String urlSafe) { try { - String utf8Str = URLDecoder.decode(urlSafe, UTF_8.name()); - DatastoreV1.Value.Builder builder = DatastoreV1.Value.newBuilder(); - TextFormat.merge(utf8Str, builder); - return fromPb(builder.build()); - } catch (UnsupportedEncodingException | ParseException e) { + return Cursor.copyFrom(BaseEncoding.base64Url().decode(urlSafe)); + } catch (IllegalArgumentException e) { throw new IllegalStateException("Unexpected decoding exception", e); } } @@ -102,12 +87,12 @@ public static Cursor copyFrom(byte[] bytes) { } @Override - protected Value toPb() { + Value toPb() { return DatastoreV1.Value.newBuilder().setBlobValue(byteString).build(); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.Value.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java index 562578a26428..ecad69ac635b 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreException.java @@ -16,141 +16,70 @@ package com.google.gcloud.datastore; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableMap; -import com.google.gcloud.RetryHelper; +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; import com.google.gcloud.RetryHelper.RetryHelperException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; +import com.google.gcloud.RetryHelper.RetryInterruptedException; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; +import java.util.Set; -public class DatastoreException extends RuntimeException { - - private static final long serialVersionUID = 8170357898917041899L; - private static final ImmutableMap REASON_TO_CODE; - private static final ImmutableMap HTTP_TO_CODE; - - private final Code code; - - /** - * An error code to represent the failure. - * - * @see Google Cloud - * Datastore error codes - */ - public enum Code { - - ABORTED(Reason.ABORTED), - DEADLINE_EXCEEDED(Reason.DEADLINE_EXCEEDED), - UNAVAILABLE(Reason.UNAVAILABLE), - FAILED_PRECONDITION(Reason.FAILED_PRECONDITION), - INVALID_ARGUMENT(Reason.INVALID_ARGUMENT), - PERMISSION_DENIED(Reason.PERMISSION_DENIED), - UNAUTHORIZED(false, "Unauthorized", 401), - INTERNAL(Reason.INTERNAL), - RESOURCE_EXHAUSTED(Reason.RESOURCE_EXHAUSTED), - UNKNOWN(false, "Unknown failure", -1); - - private final boolean retryable; - private final String description; - private final int httpStatus; - - Code(Reason reason) { - this(reason.retryable(), reason.description(), reason.httpStatus()); - } - - Code(boolean retryable, String description, int httpStatus) { - this.retryable = retryable; - this.description = description; - this.httpStatus = httpStatus; - } - - public String description() { - return description; - } - - public int httpStatus() { - return httpStatus; - } +/** + * Datastore service exception. + * + * @see Google Cloud + * Datastore error codes + */ +public class DatastoreException extends BaseServiceException { - /** - * Returns {@code true} if this exception is transient and the same request could be retried. - * For any retry it is highly recommended to apply an exponential backoff. - */ - public boolean retryable() { - return retryable; - } + // see https://cloud.google.com/datastore/docs/concepts/errors#Error_Codes" + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(409, "ABORTED"), + new Error(403, "DEADLINE_EXCEEDED"), + new Error(503, "UNAVAILABLE")); + private static final long serialVersionUID = 2663750991205874435L; - DatastoreException translate(DatastoreRpcException exception, String message) { - return new DatastoreException(this, message, exception); - } + public DatastoreException(int code, String message, String reason, Throwable cause) { + super(code, message, reason, true, cause); } - static { - ImmutableMap.Builder builder = ImmutableMap.builder(); - Map httpCodes = new HashMap<>(); - for (Code code : Code.values()) { - builder.put(code.name(), code); - httpCodes.put(code.httpStatus(), code); - } - REASON_TO_CODE = builder.build(); - HTTP_TO_CODE = ImmutableMap.copyOf(httpCodes); + public DatastoreException(int code, String message, String reason) { + super(code, message, reason, true); } - public DatastoreException(Code code, String message, Exception cause) { - super(MoreObjects.firstNonNull(message, code.description), cause); - this.code = code; + public DatastoreException(IOException exception) { + super(exception, true); } - public DatastoreException(Code code, String message) { - this(code, message, null); + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** - * Returns the code associated with this exception. - */ - public Code code() { - return code; - } - - static DatastoreException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof DatastoreRpcException) { - return translateAndThrow((DatastoreRpcException) ex.getCause()); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new DatastoreException(Code.UNKNOWN, ex.getMessage(), ex); - } - - /** - * Translate DatastoreException to DatastoreException based on their - * HTTP error codes. This method will always throw a new DatastoreException. + * Translate RetryHelperException to the DatastoreException that caused the error. This method + * will always throw an exception. * - * @throws DatastoreException every time + * @throws DatastoreException when {@code ex} was caused by a {@code DatastoreException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static DatastoreException translateAndThrow(DatastoreRpcException exception) { - String message = exception.getMessage(); - Code code = REASON_TO_CODE.get(exception.reason()); - if (code == null) { - code = MoreObjects.firstNonNull(HTTP_TO_CODE.get(exception.httpStatus()), Code.UNKNOWN); - } - throw code.translate(exception, message); + static DatastoreException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new DatastoreException(UNKNOWN_CODE, ex.getMessage(), null); } /** - * Throw a DatastoreException with {@code FAILED_PRECONDITION} code and the {@code message} - * in a nested exception. + * Throw a DatastoreException with {@code FAILED_PRECONDITION} reason and the {@code message} in a + * nested exception. * * @throws DatastoreException every time */ static DatastoreException throwInvalidRequest(String massage, Object... params) { - throw new DatastoreException(Code.FAILED_PRECONDITION, String.format(massage, params)); + throw new DatastoreException(UNKNOWN_CODE, String.format(massage, params), + "FAILED_PRECONDITION"); } static DatastoreException propagateUserException(Exception ex) { - throw new DatastoreException(Code.UNKNOWN, ex.getMessage(), ex); + throw new DatastoreException(BaseServiceException.UNKNOWN_CODE, ex.getMessage(), null, ex); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java index 43fd75396538..49a5728a4da9 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java @@ -23,13 +23,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; +import com.google.gcloud.datastore.spi.DatastoreRpc; import com.google.protobuf.ByteString; import java.util.Arrays; @@ -42,32 +39,7 @@ import java.util.Set; import java.util.concurrent.Callable; - -final class DatastoreImpl extends BaseService - implements Datastore { - - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = - new Interceptor() { - - private static final long serialVersionUID = 6911242958397733203L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof DatastoreRpcException) { - boolean retryable = ((DatastoreRpcException) exception).retryable(); - return retryable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - private static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class, DatastoreRpcException.class) - .interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); +final class DatastoreImpl extends BaseService implements Datastore { private final DatastoreRpc datastoreRpc; private final RetryParams retryParams; @@ -105,7 +77,7 @@ QueryResults run(DatastoreV1.ReadOptions readOptionsPb, Query query) { DatastoreV1.RunQueryResponse runQuery(final DatastoreV1.RunQueryRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.RunQueryResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.RunQueryResponse call() throws DatastoreException { return datastoreRpc.runQuery(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -139,7 +111,7 @@ public List allocateId(IncompleteKey... keys) { DatastoreV1.AllocateIdsResponse allocateIds(final DatastoreV1.AllocateIdsRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.AllocateIdsResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.AllocateIdsResponse call() throws DatastoreException { return datastoreRpc.allocateIds(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -176,7 +148,7 @@ public List add(FullEntity... entities) { if (completeEntity != null) { if (completeEntities.put(completeEntity.key(), completeEntity) != null) { throw DatastoreException.throwInvalidRequest( - "Duplicate entity with the key %s", entity.key()); + "Duplicate entity with the key %s", entity.key()); } mutationPb.addInsert(completeEntity.toPb()); } else { @@ -263,7 +235,7 @@ protected Entity computeNext() { DatastoreV1.LookupResponse lookup(final DatastoreV1.LookupRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.LookupResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.LookupResponse call() throws DatastoreException { return datastoreRpc.lookup(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -334,7 +306,7 @@ private DatastoreV1.CommitResponse commitMutation(DatastoreV1.Mutation.Builder m DatastoreV1.CommitResponse commit(final DatastoreV1.CommitRequest requestPb) { try { return RetryHelper.runWithRetries(new Callable() { - @Override public DatastoreV1.CommitResponse call() throws DatastoreRpcException { + @Override public DatastoreV1.CommitResponse call() throws DatastoreException { return datastoreRpc.commit(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -352,7 +324,7 @@ DatastoreV1.BeginTransactionResponse beginTransaction( try { return RetryHelper.runWithRetries(new Callable() { @Override - public DatastoreV1.BeginTransactionResponse call() throws DatastoreRpcException { + public DatastoreV1.BeginTransactionResponse call() throws DatastoreException { return datastoreRpc.beginTransaction(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -370,7 +342,7 @@ void rollbackTransaction(ByteString transaction) { void rollback(final DatastoreV1.RollbackRequest requestPb) { try { RetryHelper.runWithRetries(new Callable() { - @Override public Void call() throws DatastoreRpcException { + @Override public Void call() throws DatastoreException { datastoreRpc.rollback(requestPb); return null; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java index 5338c03a6d56..db1a5f800ce8 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java @@ -24,10 +24,9 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpcFactory; -import com.google.gcloud.spi.DefaultDatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; +import com.google.gcloud.datastore.spi.DefaultDatastoreRpc; import java.lang.reflect.Method; import java.util.Iterator; @@ -126,20 +125,16 @@ private DatastoreOptions normalize() { .addPathElement(DatastoreV1.Key.PathElement.newBuilder().setKind("__foo__").setName("bar")) .build(); requestPb.addKey(key); - try { - LookupResponse responsePb = rpc().lookup(requestPb.build()); - if (responsePb.getDeferredCount() > 0) { - key = responsePb.getDeferred(0); - } else { - Iterator combinedIter = - Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); - key = combinedIter.next().getEntity().getKey(); - } - builder.projectId(key.getPartitionId().getDatasetId()); - return new DatastoreOptions(builder); - } catch (DatastoreRpcException e) { - throw DatastoreException.translateAndThrow(e); + LookupResponse responsePb = rpc().lookup(requestPb.build()); + if (responsePb.getDeferredCount() > 0) { + key = responsePb.getDeferred(0); + } else { + Iterator combinedIter = + Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); + key = combinedIter.next().getEntity().getKey(); } + builder.projectId(key.getPartitionId().getDatasetId()); + return new DatastoreOptions(builder); } @Override @@ -157,11 +152,13 @@ protected String defaultProject() { return projectId != null ? projectId : super.defaultProject(); } + @SuppressWarnings("unchecked") @Override protected DatastoreFactory defaultServiceFactory() { return DefaultDatastoreFactory.INSTANCE; } + @SuppressWarnings("unchecked") @Override protected DatastoreRpcFactory defaultRpcFactory() { return DefaultDatastoreRpcFactory.INSTANCE; @@ -171,6 +168,13 @@ public String namespace() { return namespace; } + /** + * Returns a default {@code DatastoreOptions} instance. + */ + public static DatastoreOptions defaultInstance() { + return builder().build(); + } + private static String defaultNamespace() { try { Class clazz = Class.forName("com.google.appengine.api.NamespaceManager"); @@ -192,6 +196,7 @@ protected Set scopes() { return SCOPES; } + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { return new Builder(this); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java index 056895f850e3..4852dd53e16c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java @@ -27,7 +27,7 @@ public interface DatastoreReader { /** * Returns an {@link Entity} for the given {@link Key} or {@code null} if does not exists. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure */ Entity get(Key key); @@ -38,7 +38,7 @@ public interface DatastoreReader { * from the returned {@code Iterator}'s {@link Iterator#hasNext hasNext} or * {@link Iterator#next next} methods. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure * @see #get(Key) */ Iterator get(Key... key); @@ -53,7 +53,7 @@ public interface DatastoreReader { /** * Submit a {@link Query} and returns its result. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure */ QueryResults run(Query query); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java index af5a17ef7ef3..d22edd9697e4 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java @@ -98,12 +98,12 @@ public static DateTime copyFrom(Calendar calendar) { } @Override - protected Value toPb() { + Value toPb() { return DatastoreV1.Value.newBuilder().setIntegerValue(timestampMicroseconds).build(); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return new DateTime(DatastoreV1.Value.parseFrom(bytesPb).getIntegerValue()); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java new file mode 100644 index 000000000000..902168f20f48 --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java @@ -0,0 +1,67 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +import com.google.api.services.datastore.DatastoreV1; + +/** + * An implementation of a Google Cloud Datastore entity query that can be constructed by providing + * all the specific query elements. + * + * @see Datastore + * queries + */ +public final class EntityQuery extends StructuredQuery { + + private static final long serialVersionUID = 2990565454831019471L; + + /** + * A {@code EntityQuery} builder for queries that return {@link Entity} results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(EntityQuery query) { + super(query); + } + + Builder() { + super(ResultType.ENTITY); + } + + @Override + Builder mergeFrom(DatastoreV1.Query queryPb) { + super.mergeFrom(queryPb); + clearProjection(); + clearGroupBy(); + return this; + } + + @Override + public EntityQuery build() { + return new EntityQuery(this); + } + } + + EntityQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java index bb08fca12e3c..55c573b9a636 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java @@ -19,7 +19,8 @@ import com.google.api.services.datastore.DatastoreV1; /** - * A full entity is a {@link BaseEntity} that with a complete set of properties. + * A full entity is a {@link BaseEntity} that holds all the properties associated with a + * Datastore entity (as opposed to {@link ProjectionEntity}). */ public class FullEntity extends BaseEntity { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java index e9bd8e12cfd8..7c03b69d9f39 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java @@ -43,27 +43,27 @@ *

A usage example:

* *

When the type of the results is known the preferred usage would be: - *

{@code
- *   Query query =
- *       Query.gqlQueryBuilder(Query.ResultType.ENTITY, "select * from kind").build();
- *   QueryResults results = datastore.run(query);
- *   while (results.hasNext()) {
- *     Entity entity = results.next();
- *     ...
- *   }
+ * 
 {@code
+ * Query query =
+ *     Query.gqlQueryBuilder(Query.ResultType.ENTITY, "select * from kind").build();
+ * QueryResults results = datastore.run(query);
+ * while (results.hasNext()) {
+ *   Entity entity = results.next();
+ *   ...
+ * }
  * } 
* *

When the type of the results is unknown you can use this approach: - *

{@code
- *   Query query = Query.gqlQueryBuilder("select __key__ from kind").build();
- *   QueryResults results = datastore.run(query);
- *   if (Key.class.isAssignableFrom(results.resultClass())) {
- *     QueryResults keys = (QueryResults) results;
- *     while (keys.hasNext()) {
- *       Key key = keys.next();
- *       ...
- *     }
+ * 
 {@code
+ * Query query = Query.gqlQueryBuilder("select __key__ from kind").build();
+ * QueryResults results = datastore.run(query);
+ * if (Key.class.isAssignableFrom(results.resultClass())) {
+ *   QueryResults keys = (QueryResults) results;
+ *   while (keys.hasNext()) {
+ *     Key key = keys.next();
+ *     ...
  *   }
+ * }
  * } 
* * @param the type of the result values this query will produce @@ -126,7 +126,7 @@ public boolean equals(Object obj) { } @Override - protected DatastoreV1.GqlQueryArg toPb() { + DatastoreV1.GqlQueryArg toPb() { DatastoreV1.GqlQueryArg.Builder argPb = DatastoreV1.GqlQueryArg.newBuilder(); if (name != null) { argPb.setName(name); @@ -141,7 +141,7 @@ protected DatastoreV1.GqlQueryArg toPb() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.GqlQueryArg.parseFrom(bytesPb)); } @@ -370,7 +370,7 @@ public boolean equals(Object obj) { } @Override - protected DatastoreV1.GqlQuery toPb() { + DatastoreV1.GqlQuery toPb() { DatastoreV1.GqlQuery.Builder queryPb = DatastoreV1.GqlQuery.newBuilder(); queryPb.setQueryString(queryString); queryPb.setAllowLiteral(allowLiteral); @@ -384,18 +384,18 @@ protected DatastoreV1.GqlQuery toPb() { } @Override - protected void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb) { + void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb) { requestPb.setGqlQuery(toPb()); } @Override - protected GqlQuery nextQuery(DatastoreV1.QueryResultBatch responsePb) { + GqlQuery nextQuery(DatastoreV1.QueryResultBatch responsePb) { // See issue #17 throw new UnsupportedOperationException("paging for this query is not implemented yet"); } @Override - protected Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, DatastoreV1.GqlQuery.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java index 6134eed2905b..2192384ef70b 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java @@ -54,7 +54,7 @@ public IncompleteKey build() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.Key.parseFrom(bytesPb)); } @@ -84,6 +84,29 @@ static IncompleteKey fromPb(DatastoreV1.Key keyPb) { return new IncompleteKey(projectId, namespace, path); } + /** + * Returns the key's parent. + */ + @Override + public Key parent() { + List ancestors = ancestors(); + if (ancestors.isEmpty()) { + return null; + } + PathElement parent = ancestors.get(ancestors.size() - 1); + Key.Builder keyBuilder; + if (parent.hasName()) { + keyBuilder = Key.builder(projectId(), parent.kind(), parent.name()); + } else { + keyBuilder = Key.builder(projectId(), parent.kind(), parent.id()); + } + String namespace = namespace(); + if (namespace != null) { + keyBuilder.namespace(namespace); + } + return keyBuilder.ancestors(ancestors.subList(0, ancestors.size() - 1)).build(); + } + public static Builder builder(String projectId, String kind) { return new Builder(projectId, kind); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java index c625c067f6c2..c6cdc0fa6142 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java @@ -164,7 +164,7 @@ public static Key fromUrlSafe(String urlSafe) { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.Key.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java index 28f852ed5355..a440992870df 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java @@ -58,7 +58,7 @@ public Key newKey(long id) { /** * Resets the KeyFactory to its initial state. - * @return {@code this} for chaining. + * @return {@code this} for chaining */ public KeyFactory reset() { projectId(pi); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java new file mode 100644 index 000000000000..7afa0f5099d6 --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java @@ -0,0 +1,68 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +import com.google.api.services.datastore.DatastoreV1; + +/** + * An implementation of a Google Cloud Datastore key-only query that can be constructed by providing + * all the specific query elements. + * + * @see Datastore + * queries + */ +public final class KeyQuery extends StructuredQuery { + + private static final long serialVersionUID = -746768461459070045L; + + /** + * A {@code KeyQuery} builder for queries that return {@link Key} results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(KeyQuery query) { + super(query); + } + + Builder() { + super(ResultType.KEY); + projection(Projection.property(KEY_PROPERTY_NAME)); + } + + @Override + Builder mergeFrom(DatastoreV1.Query queryPb) { + super.mergeFrom(queryPb); + projection(Projection.property(KEY_PROPERTY_NAME)); + clearGroupBy(); + return this; + } + + @Override + public KeyQuery build() { + return new KeyQuery(this); + } + } + + KeyQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ListValue.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ListValue.java index 41c7e82788b5..06282a2c79d1 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ListValue.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ListValue.java @@ -70,17 +70,16 @@ private Builder() { super(ValueType.LIST); } - public Builder addValue(Value value) { + private void addValueHelper(Value value) { // see datastore_v1.proto definition for list_value Preconditions.checkArgument(value.type() != ValueType.LIST, "Cannot contain another list"); listBuilder.add(value); - return this; } public Builder addValue(Value first, Value... other) { - addValue(first); + addValueHelper(first); for (Value value : other) { - addValue(value); + addValueHelper(value); } return this; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java index 186ed97adcde..6b76eb70ea5b 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java @@ -86,7 +86,7 @@ public boolean equals(Object obj) { } @Override - protected DatastoreV1.Key.PathElement toPb() { + DatastoreV1.Key.PathElement toPb() { DatastoreV1.Key.PathElement.Builder pathElementPb = DatastoreV1.Key.PathElement.newBuilder(); pathElementPb.setKind(kind); if (id != null) { @@ -98,7 +98,7 @@ protected DatastoreV1.Key.PathElement toPb() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.Key.PathElement.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java new file mode 100644 index 000000000000..bad9fc5af2d0 --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java @@ -0,0 +1,112 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +/** + * An implementation of a Google Cloud Datastore projection entity query that can be constructed by + * providing all the specific query elements. + * + * @see Datastore + * queries + */ +public final class ProjectionEntityQuery extends StructuredQuery { + + private static final long serialVersionUID = 5488451194542425391L; + + /** + * A {@code ProjectionEntityQuery} builder for queries that return {@link ProjectionEntity} + * results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(ProjectionEntityQuery query) { + super(query); + } + + Builder() { + super(ResultType.PROJECTION_ENTITY); + } + + /** + * Clears the projection clause. + */ + @Override + public Builder clearProjection() { + super.clearProjection(); + return this; + } + + /** + * Sets the query's projection clause (clearing any previously specified Projection settings). + */ + @Override + public Builder projection(Projection projection, Projection... others) { + super.projection(projection, others); + return this; + } + + /** + * Adds one or more projections to the existing projection clause. + */ + @Override + public Builder addProjection(Projection projection, Projection... others) { + super.addProjection(projection, others); + return this; + } + + /** + * Clears the group by clause. + */ + @Override + public Builder clearGroupBy() { + super.clearGroupBy(); + return this; + } + + /** + * Sets the query's group by clause (clearing any previously specified GroupBy settings). + */ + @Override + public Builder groupBy(String property, String... others) { + super.groupBy(property, others); + return this; + } + + /** + * Adds one or more properties to the existing group by clause. + */ + @Override + public Builder addGroupBy(String property, String... others) { + super.addGroupBy(property, others); + return this; + } + + @Override + public ProjectionEntityQuery build() { + return new ProjectionEntityQuery(this); + } + } + + ProjectionEntityQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java index 343535d94628..50591a87a6a4 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java @@ -22,9 +22,6 @@ import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.collect.Maps; -import com.google.gcloud.datastore.StructuredQuery.EntityQueryBuilder; -import com.google.gcloud.datastore.StructuredQuery.KeyQueryBuilder; -import com.google.gcloud.datastore.StructuredQuery.ProjectionEntityQueryBuilder; import com.google.protobuf.GeneratedMessage; import com.google.protobuf.InvalidProtocolBufferException; @@ -35,6 +32,10 @@ * A Google Cloud Datastore query. * For usage examples see {@link GqlQuery} and {@link StructuredQuery}. * + * Note that queries require proper indexing. See + * + * Cloud Datastore Index Configuration for help configuring indexes. + * * @param the type of the values returned by this query. * @see Datastore Queries */ @@ -61,7 +62,8 @@ public abstract static class ResultType implements java.io.Serializable { private static final long serialVersionUID = 1602329532153860907L; - @Override protected Object convert(DatastoreV1.Entity entityPb) { + @Override + Object convert(DatastoreV1.Entity entityPb) { if (entityPb.getPropertyCount() == 0) { if (!entityPb.hasKey()) { return null; @@ -77,7 +79,8 @@ public abstract static class ResultType implements java.io.Serializable { private static final long serialVersionUID = 7712959777507168274L; - @Override protected Entity convert(DatastoreV1.Entity entityPb) { + @Override + Entity convert(DatastoreV1.Entity entityPb) { return Entity.fromPb(entityPb); } }; @@ -87,7 +90,8 @@ public abstract static class ResultType implements java.io.Serializable { private static final long serialVersionUID = -8514289244104446252L; - @Override protected Key convert(DatastoreV1.Entity entityPb) { + @Override + Key convert(DatastoreV1.Entity entityPb) { return Key.fromPb(entityPb.getKey()); } }; @@ -98,7 +102,8 @@ public abstract static class ResultType implements java.io.Serializable { private static final long serialVersionUID = -7591409419690650246L; - @Override protected ProjectionEntity convert(DatastoreV1.Entity entityPb) { + @Override + ProjectionEntity convert(DatastoreV1.Entity entityPb) { return ProjectionEntity.fromPb(entityPb); } }; @@ -147,7 +152,7 @@ boolean isAssignableFrom(ResultType otherResultType) { return resultClass.isAssignableFrom(otherResultType.resultClass); } - protected abstract V convert(DatastoreV1.Entity entityPb); + abstract V convert(DatastoreV1.Entity entityPb); static ResultType fromPb(DatastoreV1.EntityResult.ResultType typePb) { return MoreObjects.firstNonNull(PB_TO_INSTANCE.get(typePb), UNKNOWN); @@ -177,16 +182,16 @@ public String toString() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, bytesPb); } - protected abstract Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + abstract Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException; - protected abstract void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb); + abstract void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb); - protected abstract Query nextQuery(DatastoreV1.QueryResultBatch responsePb); + abstract Query nextQuery(DatastoreV1.QueryResultBatch responsePb); /** * Returns a new {@link GqlQuery} builder. @@ -209,21 +214,21 @@ public static GqlQuery.Builder gqlQueryBuilder(ResultType resultType, /** * Returns a new {@link StructuredQuery} builder for full (complete entities) queries. */ - public static EntityQueryBuilder entityQueryBuilder() { - return new EntityQueryBuilder(); + public static EntityQuery.Builder entityQueryBuilder() { + return new EntityQuery.Builder(); } /** * Returns a new {@link StructuredQuery} builder for key only queries. */ - public static KeyQueryBuilder keyQueryBuilder() { - return new KeyQueryBuilder(); + public static KeyQuery.Builder keyQueryBuilder() { + return new KeyQuery.Builder(); } /** * Returns a new {@link StructuredQuery} builder for projection queries. */ - public static ProjectionEntityQueryBuilder projectionEntityQueryBuilder() { - return new ProjectionEntityQueryBuilder(); + public static ProjectionEntityQuery.Builder projectionEntityQueryBuilder() { + return new ProjectionEntityQuery.Builder(); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java index b23c56a7c395..b882131ba939 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java @@ -22,8 +22,9 @@ * The result of a Google Cloud Datastore query submission. * When the result is not typed it is possible to cast it to its appropriate type according to * the {@link #resultClass} value. - * Results are loaded lazily; therefore it is possible to get a {@code DatastoreException} - * upon {@link Iterator#hasNext hasNext} or {@link Iterator#next next} calls. + * Results are loaded lazily in batches, where batch size is set by Cloud Datastore. As a result, it + * is possible to get a {@code DatastoreException} upon {@link Iterator#hasNext hasNext} or + * {@link Iterator#next next} calls. * * @param the type of the results value. */ @@ -35,8 +36,8 @@ public interface QueryResults extends Iterator { Class resultClass(); /** - * Returns the Cursor for point after the value returned in the last {@link #next} call. - * Not currently implemented (depends on v1beta3). + * Returns the Cursor for the point after the value returned in the last {@link #next} call. + * Currently, {@code cursorAfter} returns null in all cases but the last result. */ Cursor cursorAfter(); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java index cd3fe9dd776b..3c2e0d177f80 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java @@ -21,6 +21,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.AbstractIterator; import com.google.gcloud.datastore.Query.ResultType; +import com.google.protobuf.ByteString; import java.util.Iterator; import java.util.Objects; @@ -36,7 +37,7 @@ class QueryResultsImpl extends AbstractIterator implements QueryResults private DatastoreV1.QueryResultBatch queryResultBatchPb; private boolean lastBatch; private Iterator entityResultPbIter; - //private ByteString cursor; // only available in v1beta3 + private ByteString cursor; // only available in v1beta3 QueryResultsImpl(DatastoreImpl datastore, DatastoreV1.ReadOptions readOptionsPb, @@ -83,6 +84,7 @@ protected T computeNext() { sendRequest(); } if (!entityResultPbIter.hasNext()) { + cursor = queryResultBatchPb.getEndCursor(); return endOfData(); } DatastoreV1.EntityResult entityResultPb = entityResultPbIter.next(); @@ -99,7 +101,7 @@ public Class resultClass() { @Override public Cursor cursorAfter() { + return cursor == null ? null : new Cursor(cursor); //return new Cursor(cursor); // only available in v1beta3 - return null; } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java index ff62fe89195f..89d19bcfd892 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java @@ -46,7 +46,7 @@ private void readObject(ObjectInputStream input) throws IOException, ClassNotFou bytesPb = (byte[]) input.readObject(); } - protected Object readResolve() throws ObjectStreamException { + Object readResolve() throws ObjectStreamException { try { return fromPb(bytesPb); } catch (InvalidProtocolBufferException ex) { @@ -58,7 +58,7 @@ protected Object readResolve() throws ObjectStreamException { } } - protected abstract M toPb(); + abstract M toPb(); - protected abstract Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException; + abstract Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java index b592dc7b600f..5892268f859c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java @@ -46,45 +46,45 @@ *

A usage example:

* *

A simple query that returns all entities for a specific kind - *

{@code
- *   Query query = Query.entityQueryBuilder().kind(kind).build();
- *   QueryResults results = datastore.run(query);
- *   while (results.hasNext()) {
- *     Entity entity = results.next();
- *     ...
- *   }
+ * 
 {@code
+ * Query query = Query.entityQueryBuilder().kind(kind).build();
+ * QueryResults results = datastore.run(query);
+ * while (results.hasNext()) {
+ *   Entity entity = results.next();
+ *   ...
+ * }
  * } 
* *

A simple key-only query of all entities for a specific kind - *

{@code
- *   Query keyOnlyQuery =  Query.keyQueryBuilder().kind(KIND1).build();
- *   QueryResults results = datastore.run(keyOnlyQuery);
- *   ...
+ * 
 {@code
+ * Query keyOnlyQuery =  Query.keyQueryBuilder().kind(KIND1).build();
+ * QueryResults results = datastore.run(keyOnlyQuery);
+ * ...
  * } 
* *

A less trivial example of a projection query that returns the first 10 results * of "age" and "name" properties (sorted and grouped by "age") with an age greater than 18 - *

{@code
- *   Query query = Query.projectionEntityQueryBuilder()
- *       .kind(kind)
- *       .projection(Projection.property("age"), Projection.first("name"))
- *       .filter(PropertyFilter.gt("age", 18))
- *       .groupBy("age")
- *       .orderBy(OrderBy.asc("age"))
- *       .limit(10)
- *       .build();
- *   QueryResults results = datastore.run(query);
- *   ...
+ * 
 {@code
+ * Query query = Query.projectionEntityQueryBuilder()
+ *     .kind(kind)
+ *     .projection(Projection.property("age"), Projection.first("name"))
+ *     .filter(PropertyFilter.gt("age", 18))
+ *     .groupBy("age")
+ *     .orderBy(OrderBy.asc("age"))
+ *     .limit(10)
+ *     .build();
+ * QueryResults results = datastore.run(query);
+ * ...
  * } 
* * @param the type of the result values this query will produce * @see Datastore * queries */ -public class StructuredQuery extends Query { +public abstract class StructuredQuery extends Query { private static final long serialVersionUID = 546838955624019594L; - private static final String KEY_PROPERTY_NAME = "__key__"; + static final String KEY_PROPERTY_NAME = "__key__"; private final transient String kind; private final ImmutableList projection; @@ -103,7 +103,7 @@ public abstract static class Filter implements Serializable { Filter() { } - protected abstract DatastoreV1.Filter toPb(); + abstract DatastoreV1.Filter toPb(); static Filter fromPb(DatastoreV1.Filter filterPb) { if (filterPb.hasCompositeFilter()) { @@ -186,7 +186,7 @@ public static CompositeFilter and(Filter first, Filter... other) { } @Override - protected DatastoreV1.Filter toPb() { + DatastoreV1.Filter toPb() { DatastoreV1.Filter.Builder filterPb = DatastoreV1.Filter.newBuilder(); DatastoreV1.CompositeFilter.Builder compositeFilterPb = filterPb.getCompositeFilterBuilder(); compositeFilterPb.setOperator(operator.toPb()); @@ -231,7 +231,7 @@ private PropertyFilter(String property, Operator operator, Value value) { this.value = checkNotNull(value); } - public static PropertyFilter fromPb(DatastoreV1.PropertyFilter propertyFilterPb) { + static PropertyFilter fromPb(DatastoreV1.PropertyFilter propertyFilterPb) { String property = propertyFilterPb.getProperty().getName(); Operator operator = Operator.fromPb(propertyFilterPb.getOperator()); Value value = Value.fromPb(propertyFilterPb.getValue()); @@ -435,7 +435,7 @@ public static PropertyFilter isNull(String property) { } @Override - protected DatastoreV1.Filter toPb() { + DatastoreV1.Filter toPb() { DatastoreV1.Filter.Builder filterPb = DatastoreV1.Filter.newBuilder(); DatastoreV1.PropertyFilter.Builder propertyFilterPb = filterPb.getPropertyFilterBuilder(); propertyFilterPb.getPropertyBuilder().setName(property); @@ -587,7 +587,7 @@ DatastoreV1.PropertyExpression toPb() { return expressionPb.build(); } - public static Projection fromPb(DatastoreV1.PropertyExpression propertyExpressionPb) { + static Projection fromPb(DatastoreV1.PropertyExpression propertyExpressionPb) { String property = propertyExpressionPb.getProperty().getName(); Aggregate aggregate = null; if (propertyExpressionPb.hasAggregationFunction()) { @@ -609,7 +609,48 @@ public static Projection first(String property) { } } - static class BaseBuilder> { + /** + * Interface for StructuredQuery builders. + * + * @param the type of result the query returns. + */ + public interface Builder { + Builder namespace(String namespace); + + Builder kind(String kind); + + Builder startCursor(Cursor startCursor); + + Builder endCursor(Cursor endCursor); + + Builder offset(int offset); + + Builder limit(Integer limit); + + Builder filter(Filter filter); + + Builder clearOrderBy(); + + /** + * Sets the query's order by clause (clearing any previously specified OrderBy settings). + */ + Builder orderBy(OrderBy orderBy, OrderBy... others); + + /** + * Adds settings to the existing order by clause. + */ + Builder addOrderBy(OrderBy orderBy, OrderBy... others); + + StructuredQuery build(); + } + + /** + * Base class for StructuredQuery builders. + * + * @param the type of result the query returns. + * @param the query builder. + */ + abstract static class BuilderImpl> implements Builder { private final ResultType resultType; private String namespace; @@ -623,69 +664,87 @@ static class BaseBuilder> { private int offset; private Integer limit; - BaseBuilder(ResultType resultType) { + BuilderImpl(ResultType resultType) { this.resultType = resultType; } + BuilderImpl(StructuredQuery query) { + this(query.type()); + namespace = query.namespace(); + kind = query.kind; + projection.addAll(query.projection); + filter = query.filter; + groupBy.addAll(query.groupBy); + orderBy.addAll(query.orderBy); + startCursor = query.startCursor; + endCursor = query.endCursor; + offset = query.offset; + limit = query.limit; + } + @SuppressWarnings("unchecked") B self() { return (B) this; } + @Override public B namespace(String namespace) { this.namespace = namespace; return self(); } + @Override public B kind(String kind) { this.kind = kind; return self(); } + @Override public B startCursor(Cursor startCursor) { this.startCursor = startCursor; return self(); } + @Override public B endCursor(Cursor endCursor) { this.endCursor = endCursor; return self(); } + @Override public B offset(int offset) { Preconditions.checkArgument(offset >= 0, "offset must not be negative"); this.offset = offset; return self(); } + @Override public B limit(Integer limit) { Preconditions.checkArgument(limit == null || limit > 0, "limit must be positive"); this.limit = limit; return self(); } + @Override public B filter(Filter filter) { this.filter = filter; return self(); } + @Override public B clearOrderBy() { orderBy.clear(); return self(); } - /** - * Sets the query's order by clause (clearing any previously specified OrderBy settings). - */ + @Override public B orderBy(OrderBy orderBy, OrderBy... others) { clearOrderBy(); addOrderBy(orderBy, others); return self(); } - /** - * Adds settings to the existing order by clause. - */ + @Override public B addOrderBy(OrderBy orderBy, OrderBy... others) { this.orderBy.add(orderBy); Collections.addAll(this.orderBy, others); @@ -756,117 +815,9 @@ B mergeFrom(DatastoreV1.Query queryPb) { } return self(); } - - public StructuredQuery build() { - return new StructuredQuery<>(this); - } } - static final class Builder extends BaseBuilder> { - - Builder(ResultType resultType) { - super(resultType); - } - } - - /** - * A StructuredQuery builder for queries that return Entity results. - */ - public static final class EntityQueryBuilder extends BaseBuilder { - - EntityQueryBuilder() { - super(ResultType.ENTITY); - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - } - - /** - * A StructuredQuery builder for queries that return Key results. - */ - public static final class KeyQueryBuilder extends BaseBuilder { - - KeyQueryBuilder() { - super(ResultType.KEY); - projection(Projection.property(KEY_PROPERTY_NAME)); - } - - @Override - protected KeyQueryBuilder mergeFrom(DatastoreV1.Query queryPb) { - super.mergeFrom(queryPb); - projection(Projection.property(KEY_PROPERTY_NAME)); - clearGroupBy(); - return this; - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - } - - /** - * A StructuredQuery builder for projection queries. - */ - public static final class ProjectionEntityQueryBuilder - extends BaseBuilder { - - ProjectionEntityQueryBuilder() { - super(ResultType.PROJECTION_ENTITY); - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - - @Override - public ProjectionEntityQueryBuilder clearProjection() { - return super.clearProjection(); - } - - /** - * Sets the query's projection clause (clearing any previously specified Projection settings). - */ - @Override - public ProjectionEntityQueryBuilder projection(Projection projection, Projection... others) { - return super.projection(projection, others); - } - - /** - * Adds one or more projections to the existing projection clause. - */ - @Override - public ProjectionEntityQueryBuilder addProjection(Projection projection, Projection... others) { - return super.addProjection(projection, others); - } - - @Override - public ProjectionEntityQueryBuilder clearGroupBy() { - return super.clearGroupBy(); - } - - /** - * Sets the query's group by clause (clearing any previously specified GroupBy settings). - */ - @Override - public ProjectionEntityQueryBuilder groupBy(String property, String... others) { - return super.groupBy(property, others); - } - - /** - * Adds one or more properties to the existing group by clause. - */ - @Override - public ProjectionEntityQueryBuilder addGroupBy(String property, String... others) { - return super.addGroupBy(property, others); - } - } - - StructuredQuery(BaseBuilder builder) { + StructuredQuery(BuilderImpl builder) { super(builder.resultType, builder.namespace); kind = builder.kind; projection = ImmutableList.copyOf(builder.projection); @@ -947,15 +898,16 @@ public Integer limit() { return limit; } + public abstract Builder toBuilder(); + @Override - protected void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb) { + void populatePb(DatastoreV1.RunQueryRequest.Builder requestPb) { requestPb.setQuery(toPb()); } @Override - protected StructuredQuery nextQuery(DatastoreV1.QueryResultBatch responsePb) { - Builder builder = new Builder<>(type()); - builder.mergeFrom(toPb()); + StructuredQuery nextQuery(DatastoreV1.QueryResultBatch responsePb) { + Builder builder = toBuilder(); builder.startCursor(new Cursor(responsePb.getEndCursor())); if (offset > 0 && responsePb.getSkippedResults() < offset) { builder.offset(offset - responsePb.getSkippedResults()); @@ -969,7 +921,7 @@ protected StructuredQuery nextQuery(DatastoreV1.QueryResultBatch responsePb) } @Override - protected DatastoreV1.Query toPb() { + DatastoreV1.Query toPb() { DatastoreV1.Query.Builder queryPb = DatastoreV1.Query.newBuilder(); if (kind != null) { queryPb.addKindBuilder().setName(kind); @@ -1002,20 +954,20 @@ protected DatastoreV1.Query toPb() { } @Override - protected Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, DatastoreV1.Query.parseFrom(bytesPb)); } - private static StructuredQuery fromPb(ResultType resultType, String namespace, + static StructuredQuery fromPb(ResultType resultType, String namespace, DatastoreV1.Query queryPb) { - BaseBuilder builder; + BuilderImpl builder; if (resultType.equals(ResultType.ENTITY)) { - builder = new EntityQueryBuilder(); + builder = new EntityQuery.Builder(); } else if (resultType.equals(ResultType.KEY)) { - builder = new KeyQueryBuilder(); + builder = new KeyQuery.Builder(); } else { - builder = new ProjectionEntityQueryBuilder(); + builder = new ProjectionEntityQuery.Builder(); } return builder.namespace(namespace).mergeFrom(queryPb).build(); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java index 8089c0130f5d..78ee217f31e7 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java @@ -30,21 +30,21 @@ * the Datastore upon {@code commit}. * A usage example: *
 {@code
- *   Transaction transaction = datastore.newTransaction();
- *   try {
- *     Entity entity = transaction.get(key);
- *     if (!entity.contains("last_name") || entity.isNull("last_name")) {
- *       String[] name = entity.getString("name").split(" ");
- *       entity = Entity.builder(entity).remove("name").set("first_name", name[0])
- *           .set("last_name", name[1]).build();
- *       transaction.update(entity);
- *       transaction.commit();
- *     }
- *   } finally {
- *     if (transaction.active()) {
- *       transaction.rollback();
- *     }
+ * Transaction transaction = datastore.newTransaction();
+ * try {
+ *   Entity entity = transaction.get(key);
+ *   if (!entity.contains("last_name") || entity.isNull("last_name")) {
+ *     String[] name = entity.getString("name").split(" ");
+ *     entity = Entity.builder(entity).remove("name").set("first_name", name[0])
+ *         .set("last_name", name[1]).build();
+ *     transaction.update(entity);
+ *     transaction.commit();
  *   }
+ * } finally {
+ *   if (transaction.active()) {
+ *     transaction.rollback();
+ *   }
+ * }
  * } 
* * @see diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java index c5fc63a960b1..4d8aaec086f1 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java @@ -211,7 +211,7 @@ public boolean equals(Object obj) { @Override @SuppressWarnings("unchecked") - protected DatastoreV1.Value toPb() { + DatastoreV1.Value toPb() { return type().getMarshaller().toProto(this); } @@ -231,7 +231,7 @@ static Value fromPb(DatastoreV1.Value proto) { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(DatastoreV1.Value.parseFrom(bytesPb)); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java index f5b5d4c1319b..a867ef25b321 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java @@ -18,6 +18,10 @@ /** * A common interface for Value builders. + * + * @param the data type that the {@code Value} object holds. + * @param

the value type. + * @param the value type's associated builder. */ public interface ValueBuilder, B extends ValueBuilder> { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java index 2135267d9ac4..fbf468d6458d 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java @@ -17,36 +17,43 @@ /** * A client to the Google Cloud Datastore. * - *

A simple usage example: + *

Here's a simple usage example for using gcloud-java from App/Compute Engine. This example + * shows how to create a Datastore entity. For the complete source code see + * + * CreateEntity.java. *

 {@code
- * DatastoreOptions options = DatastoreOptions.builder().projectId(PROJECT_ID).build();
- * Datastore datastore = options.service();
- * KeyFactory keyFactory = datastore.newKeyFactory().kind(kind);
- * Key key = keyFactory.newKey(keyName);
+ * Datastore datastore = DatastoreOptions.defaultInstance().service();
+ * KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind");
+ * Key key = keyFactory.newKey("keyName");
+ * Entity entity = Entity.builder(key)
+ *     .set("name", "John Doe")
+ *     .set("age", 30)
+ *     .set("access_time", DateTime.now())
+ *     .build();
+ * datastore.put(entity);
+ * } 
+ *

+ * This second example shows how to get and update a Datastore entity if it exists. For the complete + * source code see + * + * UpdateEntity.java. + *

 {@code
+ * Datastore datastore = DatastoreOptions.defaultInstance().service();
+ * KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind");
+ * Key key = keyFactory.newKey("keyName");
  * Entity entity = datastore.get(key);
- * if (entity == null) {
- *   entity = Entity.builder(key)
- *       .set("name", "John Do")
- *       .set("age", LongValue.builder(100).indexed(false).build())
- *       .set("updated", false)
+ * if (entity != null) {
+ *   System.out.println("Updating access_time for " + entity.getString("name"));
+ *   entity = Entity.builder(entity)
+ *       .set("access_time", DateTime.now())
  *       .build();
- *   datastore.put(entity);
- * } else {
- *   boolean updated = entity.getBoolean("updated");
- *   if (!updated) {
- *     String[] name = entity.getString("name").split(" ");
- *     entity = Entity.builder(entity)
- *         .set("name", name[0])
- *         .set("last_name", StringValue.builder(name[1]).indexed(false).build())
- *         .set("updated", true)
- *         .remove("old_property")
- *         .set("new_property", 1.1)
- *         .build();
- *     datastore.update(entity);
- *   }
- * }
- * } 
- * + * datastore.update(entity); + * }}
+ *

When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Datastore */ package com.google.gcloud.datastore; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java new file mode 100644 index 000000000000..002078550d1f --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpc.java @@ -0,0 +1,80 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore.spi; + +import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; +import com.google.api.services.datastore.DatastoreV1.AllocateIdsResponse; +import com.google.api.services.datastore.DatastoreV1.BeginTransactionRequest; +import com.google.api.services.datastore.DatastoreV1.BeginTransactionResponse; +import com.google.api.services.datastore.DatastoreV1.CommitRequest; +import com.google.api.services.datastore.DatastoreV1.CommitResponse; +import com.google.api.services.datastore.DatastoreV1.LookupRequest; +import com.google.api.services.datastore.DatastoreV1.LookupResponse; +import com.google.api.services.datastore.DatastoreV1.RollbackRequest; +import com.google.api.services.datastore.DatastoreV1.RollbackResponse; +import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; +import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; +import com.google.gcloud.datastore.DatastoreException; + +/** + * Provides access to the remote Datastore service. + */ +public interface DatastoreRpc { + + /** + * Sends an allocate IDs request. + * + * @throws DatastoreException upon failure + */ + AllocateIdsResponse allocateIds(AllocateIdsRequest request); + + /** + * Sends a begin transaction request. + * + * @throws DatastoreException upon failure + */ + BeginTransactionResponse beginTransaction(BeginTransactionRequest request) + throws DatastoreException; + + /** + * Sends a commit request. + * + * @throws DatastoreException upon failure + */ + CommitResponse commit(CommitRequest request); + + /** + * Sends a lookup request. + * + * @throws DatastoreException upon failure + */ + LookupResponse lookup(LookupRequest request); + + /** + * Sends a rollback request. + * + * @throws DatastoreException upon failure + */ + RollbackResponse rollback(RollbackRequest request); + + /** + * Sends a request to run a query. + * + * @throws DatastoreException upon failure + */ + RunQueryResponse runQuery(RunQueryRequest request); +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java similarity index 90% rename from gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java rename to gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java index 1815dda30f5d..0979b2203037 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpcFactory.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DatastoreRpcFactory.java @@ -14,9 +14,10 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.datastore.spi; import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.spi.ServiceRpcFactory; /** * An interface for Datastore RPC factory. diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java similarity index 68% rename from gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java rename to gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java index ccb89267a29e..093322fa4117 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/spi/DefaultDatastoreRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.datastore.spi; import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; import com.google.api.services.datastore.DatastoreV1.AllocateIdsResponse; @@ -29,43 +29,24 @@ import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; import com.google.api.services.datastore.client.Datastore; -import com.google.api.services.datastore.client.DatastoreException; import com.google.api.services.datastore.client.DatastoreFactory; import com.google.api.services.datastore.client.DatastoreOptions.Builder; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableMap; +import com.google.gcloud.datastore.DatastoreException; import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONTokener; +import java.io.IOException; import java.net.InetAddress; -import java.net.MalformedURLException; import java.net.URL; -import java.net.UnknownHostException; -import java.util.HashMap; -import java.util.Map; public class DefaultDatastoreRpc implements DatastoreRpc { private final Datastore client; - private static final ImmutableMap STR_TO_REASON; - private static final ImmutableMap HTTP_STATUS_TO_REASON; - - static { - ImmutableMap.Builder builder = ImmutableMap.builder(); - Map httpCodes = new HashMap<>(); - for (Reason reason : Reason.values()) { - builder.put(reason.name(), reason); - httpCodes.put(reason.httpStatus(), reason); - } - STR_TO_REASON = builder.build(); - HTTP_STATUS_TO_REASON = ImmutableMap.copyOf(httpCodes); - } - public DefaultDatastoreRpc(DatastoreOptions options) { String normalizedHost = normalizeHost(options.host()); client = DatastoreFactory.get().create( @@ -95,7 +76,7 @@ private static boolean isLocalHost(String host) { } InetAddress hostAddr = InetAddress.getByName(new URL(normalizedHost).getHost()); return hostAddr.isAnyLocalAddress() || hostAddr.isLoopbackAddress(); - } catch (UnknownHostException | MalformedURLException e) { + } catch (Exception e) { // ignore } } @@ -106,82 +87,80 @@ private static boolean includesScheme(String url) { return url.startsWith("http://") || url.startsWith("https://"); } - private static DatastoreRpcException translate(DatastoreException exception) { + private static DatastoreException translate( + com.google.api.services.datastore.client.DatastoreException exception) { String message = exception.getMessage(); - String reasonStr = ""; + int code = exception.getCode(); + String reason = ""; if (message != null) { try { JSONObject json = new JSONObject(new JSONTokener(message)); JSONObject error = json.getJSONObject("error").getJSONArray("errors").getJSONObject(0); - reasonStr = error.getString("reason"); + reason = error.getString("reason"); message = error.getString("message"); } catch (JSONException ignore) { // ignore - will be converted to unknown } } - Reason reason = STR_TO_REASON.get(reasonStr); if (reason == null) { - reason = HTTP_STATUS_TO_REASON.get(exception.getCode()); + if (exception.getCause() instanceof IOException) { + return new DatastoreException((IOException) exception.getCause()); + } } - return reason != null - ? new DatastoreRpcException(reason) - : new DatastoreRpcException("Unknown", exception.getCode(), false, message); + return new DatastoreException(code, message, reason, exception); } @Override - public AllocateIdsResponse allocateIds(AllocateIdsRequest request) - throws DatastoreRpcException { + public AllocateIdsResponse allocateIds(AllocateIdsRequest request) { try { return client.allocateIds(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreRpcException { + public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) { try { return client.beginTransaction(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public CommitResponse commit(CommitRequest request) throws DatastoreRpcException { + public CommitResponse commit(CommitRequest request) { try { return client.commit(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public LookupResponse lookup(LookupRequest request) throws DatastoreRpcException { + public LookupResponse lookup(LookupRequest request) { try { return client.lookup(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public RollbackResponse rollback(RollbackRequest request) throws DatastoreRpcException { + public RollbackResponse rollback(RollbackRequest request) { try { return client.rollback(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } @Override - public RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreRpcException { + public RunQueryResponse runQuery(RunQueryRequest request) { try { return client.runQuery(request); - } catch (DatastoreException ex) { + } catch (com.google.api.services.datastore.client.DatastoreException ex) { throw translate(ex); } } } - diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java index 7c60da50b0b0..fdb6774f810f 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java @@ -17,6 +17,7 @@ package com.google.gcloud.datastore.testing; import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.base.Strings; @@ -85,6 +86,7 @@ public class LocalGcdHelper { private static final String GCLOUD = "gcloud"; private static final Path INSTALLED_GCD_PATH; private static final String GCD_VERSION_PREFIX = "gcd-emulator "; + private static final double DEFAULT_CONSISTENCY = 0.9; static { INSTALLED_GCD_PATH = installedGcdPath(); @@ -398,9 +400,15 @@ public LocalGcdHelper(String projectId, int port) { * All content is written to a temporary directory that will be deleted when * {@link #stop()} is called or when the program terminates) to make sure that no left-over * data from prior runs is used. + * + * @param consistency the fraction of job application attempts that will succeed, with 0.0 + * resulting in no attempts succeeding, and 1.0 resulting in all attempts succeeding. Defaults + * to 0.9. Note that setting this to 1.0 may mask incorrect assumptions about the consistency + * of non-ancestor queries; non-ancestor queries are eventually consistent. */ - public void start() throws IOException, InterruptedException { + public void start(double consistency) throws IOException, InterruptedException { // send a quick request in case we have a hanging process from a previous run + checkArgument(consistency >= 0.0 && consistency <= 1.0, "Consistency must be between 0 and 1"); sendQuitRequest(port); // Each run is associated with its own folder that is deleted once test completes. gcdPath = Files.createTempDirectory("gcd"); @@ -415,7 +423,7 @@ public void start() throws IOException, InterruptedException { } else { gcdExecutablePath = INSTALLED_GCD_PATH; } - startGcd(gcdExecutablePath); + startGcd(gcdExecutablePath, consistency); } private void downloadGcd() throws IOException { @@ -453,7 +461,8 @@ private void downloadGcd() throws IOException { } } - private void startGcd(Path executablePath) throws IOException, InterruptedException { + private void startGcd(Path executablePath, double consistency) + throws IOException, InterruptedException { // cleanup any possible data for the same project File datasetFolder = new File(gcdPath.toFile(), projectId); deleteRecurse(datasetFolder.toPath()); @@ -486,7 +495,8 @@ private void startGcd(Path executablePath) throws IOException, InterruptedExcept startProcess = CommandWrapper.create() .command(gcdAbsolutePath.toString(), "start", "--testing", "--allow_remote_shutdown", - "--port=" + Integer.toString(port), projectId) + "--port=" + Integer.toString(port), "--consistency=" + Double.toString(consistency), + projectId) .directory(gcdPath) .start(); processReader = ProcessStreamReader.start(startProcess.getInputStream()); @@ -526,6 +536,7 @@ private static void extractFile(ZipInputStream zipIn, File filePath) throws IOEx public static boolean sendQuitRequest(int port) { StringBuilder result = new StringBuilder(); + String shutdownMsg = "Shutting down local server"; try { URL url = new URL("http", "localhost", port, "/_ah/admin/quit"); HttpURLConnection con = (HttpURLConnection) url.openConnection(); @@ -537,13 +548,13 @@ public static boolean sendQuitRequest(int port) { out.flush(); InputStream in = con.getInputStream(); int currByte = 0; - while ((currByte = in.read()) != -1) { + while ((currByte = in.read()) != -1 && result.length() < shutdownMsg.length()) { result.append(((char) currByte)); } } catch (IOException ignore) { // ignore } - return result.toString().startsWith("Shutting down local server"); + return result.toString().startsWith(shutdownMsg); } public void stop() throws IOException, InterruptedException { @@ -578,10 +589,10 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO }); } - public static LocalGcdHelper start(String projectId, int port) + public static LocalGcdHelper start(String projectId, int port, double consistency) throws IOException, InterruptedException { LocalGcdHelper helper = new LocalGcdHelper(projectId, port); - helper.start(); + helper.start(consistency); return helper; } @@ -593,7 +604,9 @@ public static void main(String... args) throws IOException, InterruptedException switch (action) { case "START": if (!isActive(DEFAULT_PROJECT_ID, port)) { - LocalGcdHelper helper = start(DEFAULT_PROJECT_ID, port); + double consistency = parsedArgs.get("consistency") == null + ? DEFAULT_CONSISTENCY : Double.parseDouble(parsedArgs.get("consistency")); + LocalGcdHelper helper = start(DEFAULT_PROJECT_ID, port, consistency); try (FileWriter writer = new FileWriter(".local_gcd_helper")) { writer.write(helper.gcdPath.toAbsolutePath().toString() + System.lineSeparator()); writer.write(Integer.toString(port)); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java deleted file mode 100644 index dffcc3f0e16f..000000000000 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.google.gcloud.spi; - -import com.google.api.services.datastore.DatastoreV1.AllocateIdsRequest; -import com.google.api.services.datastore.DatastoreV1.AllocateIdsResponse; -import com.google.api.services.datastore.DatastoreV1.BeginTransactionRequest; -import com.google.api.services.datastore.DatastoreV1.BeginTransactionResponse; -import com.google.api.services.datastore.DatastoreV1.CommitRequest; -import com.google.api.services.datastore.DatastoreV1.CommitResponse; -import com.google.api.services.datastore.DatastoreV1.LookupRequest; -import com.google.api.services.datastore.DatastoreV1.LookupResponse; -import com.google.api.services.datastore.DatastoreV1.RollbackRequest; -import com.google.api.services.datastore.DatastoreV1.RollbackResponse; -import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; -import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; - -/** - * Provides access to the remote Datastore service. - */ -public interface DatastoreRpc { - - public class DatastoreRpcException extends Exception { - - /** - * The reason for the exception. - * - * @see Google - * Cloud Datastore error codes - */ - public enum Reason { - - ABORTED(true, "Request aborted", 409), - DEADLINE_EXCEEDED(true, "Deadline exceeded", 403), - FAILED_PRECONDITION(false, "Invalid request", 412), - INTERNAL(false, "Server returned an error", 500), - INVALID_ARGUMENT(false, "Request parameter has an invalid value", 400), - PERMISSION_DENIED(false, "Unauthorized request", 403), - RESOURCE_EXHAUSTED(false, "Quota exceeded", 402), - UNAVAILABLE(true, "Could not reach service", 503); - - private final boolean retryable; - private final String description; - private final int httpStatus; - - private Reason(boolean retryable, String description, int httpStatus) { - this.retryable = retryable; - this.description = description; - this.httpStatus = httpStatus; - } - - public boolean retryable() { - return retryable; - } - - public String description() { - return description; - } - - public int httpStatus() { - return httpStatus; - } - } - - private final String reason; - private final int httpStatus; - private final boolean retryable; - - public DatastoreRpcException(Reason reason) { - this(reason.name(), reason.httpStatus, reason.retryable, reason.description); - } - - public DatastoreRpcException(String reason, int httpStatus, boolean retryable, String message) { - super(message); - this.reason = reason; - this.httpStatus = httpStatus; - this.retryable = retryable; - } - - public String reason() { - return reason; - } - - public int httpStatus() { - return httpStatus; - } - - public boolean retryable() { - return retryable; - } - } - - AllocateIdsResponse allocateIds(AllocateIdsRequest request) throws DatastoreRpcException; - - BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreRpcException; - - CommitResponse commit(CommitRequest request) throws DatastoreRpcException; - - LookupResponse lookup(LookupRequest request) throws DatastoreRpcException; - - RollbackResponse rollback(RollbackRequest request) throws DatastoreRpcException; - - RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreRpcException; -} diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseEntityTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseEntityTest.java index 5ece01508d3a..a69ea5e20e3b 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseEntityTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseEntityTest.java @@ -67,6 +67,16 @@ public void setUp() { builder.set("list1", NullValue.of(), StringValue.of("foo")); builder.set("list2", ImmutableList.of(LongValue.of(10), DoubleValue.of(2))); builder.set("list3", Collections.singletonList(BooleanValue.of(true))); + builder.set( + "blobList", BLOB, Blob.copyFrom(new byte[] {3, 4}), Blob.copyFrom(new byte[] {5, 6})); + builder.set("booleanList", true, false, true); + builder.set("dateTimeList", DateTime.now(), DateTime.now(), DateTime.now()); + builder.set("doubleList", 12.3, 4.56, .789); + builder.set("keyList", KEY, Key.builder("ds2", "k2", "n2").build(), + Key.builder("ds3", "k3", "n3").build()); + builder.set("entityList", ENTITY, PARTIAL_ENTITY); + builder.set("stringList", "s1", "s2", "s3"); + builder.set("longList", 1, 23, 456); } @Test @@ -183,6 +193,17 @@ public void testGetList() throws Exception { assertEquals(Boolean.TRUE, list.get(0).get()); entity = builder.set("list1", ListValue.of(list)).build(); assertEquals(list, entity.getList("list1")); + List> stringList = entity.getList("stringList"); + assertEquals( + ImmutableList.of(StringValue.of("s1"), StringValue.of("s2"), StringValue.of("s3")), + stringList); + List> doubleList = entity.getList("doubleList"); + assertEquals( + ImmutableList.of(DoubleValue.of(12.3), DoubleValue.of(4.56), DoubleValue.of(.789)), + doubleList); + List entityList = entity.getList("entityList"); + assertEquals( + ImmutableList.of(EntityValue.of(ENTITY), EntityValue.of(PARTIAL_ENTITY)), entityList); } @Test @@ -198,7 +219,9 @@ public void testGetBlob() throws Exception { public void testNames() throws Exception { Set names = ImmutableSet.builder() .add("string", "stringValue", "boolean", "double", "long", "list1", "list2", "list3") - .add("entity", "partialEntity", "null", "dateTime", "blob", "key") + .add("entity", "partialEntity", "null", "dateTime", "blob", "key", "blobList") + .add("booleanList", "dateTimeList", "doubleList", "keyList", "entityList", "stringList") + .add("longList") .build(); BaseEntity entity = builder.build(); assertEquals(names, entity.names()); diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java index 8615ee025bd1..43db4695b191 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java @@ -50,6 +50,11 @@ protected BaseKey build() { protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return null; } + + @Override + protected BaseKey parent() { + return null; + } }; } } diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java index a64a3531c19d..f7bdcb89bcec 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java @@ -16,39 +16,81 @@ package com.google.gcloud.datastore; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import com.google.gcloud.datastore.DatastoreException.Code; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper; import org.junit.Test; +import java.io.IOException; +import java.net.SocketTimeoutException; + public class DatastoreExceptionTest { @Test - public void testCode() throws Exception { - for (Reason reason : Reason.values()) { - Code code = Code.valueOf(reason.name()); - assertEquals(reason.retryable(), code.retryable()); - assertEquals(reason.description(), code.description()); - assertEquals(reason.httpStatus(), code.httpStatus()); - } + public void testDatastoreException() throws Exception { + DatastoreException exception = new DatastoreException(409, "message", "ABORTED"); + assertEquals(409, exception.code()); + assertEquals("ABORTED", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(403, "message", "DEADLINE_EXCEEDED"); + assertEquals(403, exception.code()); + assertEquals("DEADLINE_EXCEEDED", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(503, "message", "UNAVAILABLE"); + assertEquals(503, exception.code()); + assertEquals("UNAVAILABLE", exception.reason()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new DatastoreException(500, "message", "INTERNAL"); + assertEquals(500, exception.code()); + assertEquals("INTERNAL", exception.reason()); + assertEquals("message", exception.getMessage()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new DatastoreException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); - DatastoreException exception = new DatastoreException(Code.ABORTED, "bla"); - assertEquals(Code.ABORTED, exception.code()); } @Test public void testTranslateAndThrow() throws Exception { - for (Reason reason : Reason.values()) { - try { - DatastoreException.translateAndThrow(new DatastoreRpcException(reason)); - fail("Exception expected"); - } catch (DatastoreException ex) { - assertEquals(reason.name(), ex.code().name()); - } + DatastoreException cause = new DatastoreException(503, "message", "UNAVAILABLE"); + RetryHelper.RetryHelperException exceptionMock = + createMock(RetryHelper.RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + DatastoreException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); } } @@ -58,7 +100,7 @@ public void testThrowInvalidRequest() throws Exception { DatastoreException.throwInvalidRequest("message %s %d", "a", 1); fail("Exception expected"); } catch (DatastoreException ex) { - assertEquals(Code.FAILED_PRECONDITION, ex.code()); + assertEquals("FAILED_PRECONDITION", ex.reason()); assertEquals("message a 1", ex.getMessage()); } } diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java index 78536e3f45cb..1d188c7f4e94 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java @@ -22,16 +22,14 @@ import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; import com.google.gcloud.datastore.testing.LocalGcdHelper; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpcFactory; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; -import java.io.IOException; - public class DatastoreOptionsTest { private static final String PROJECT_ID = "project_id"; @@ -41,7 +39,7 @@ public class DatastoreOptionsTest { private DatastoreOptions.Builder options; @Before - public void setUp() throws IOException, InterruptedException { + public void setUp() { datastoreRpcFactory = EasyMock.createMock(DatastoreRpcFactory.class); datastoreRpc = EasyMock.createMock(DatastoreRpc.class); options = DatastoreOptions.builder() diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java index f639ca3fdac0..e3829a2e71ce 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java @@ -27,17 +27,21 @@ import com.google.api.services.datastore.DatastoreV1; import com.google.api.services.datastore.DatastoreV1.EntityResult; +import com.google.api.services.datastore.DatastoreV1.QueryResultBatch; +import com.google.api.services.datastore.DatastoreV1.RunQueryRequest; +import com.google.api.services.datastore.DatastoreV1.RunQueryResponse; import com.google.common.collect.Iterators; +import com.google.common.collect.Lists; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.RetryParams; import com.google.gcloud.datastore.Query.ResultType; import com.google.gcloud.datastore.StructuredQuery.OrderBy; import com.google.gcloud.datastore.StructuredQuery.Projection; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; +import com.google.gcloud.datastore.spi.DatastoreRpc; +import com.google.gcloud.datastore.spi.DatastoreRpcFactory; import com.google.gcloud.datastore.testing.LocalGcdHelper; -import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason; -import com.google.gcloud.spi.DatastoreRpcFactory; +import com.google.protobuf.ByteString; import org.easymock.EasyMock; import org.junit.AfterClass; @@ -87,8 +91,8 @@ public class DatastoreTest { FullEntity.builder(INCOMPLETE_KEY2).set("str", STR_VALUE).set("bool", BOOL_VALUE) .set("list", LIST_VALUE1).build(); private static final FullEntity PARTIAL_ENTITY2 = - FullEntity.builder(PARTIAL_ENTITY1).remove("str").set("bool", true). - set("list", LIST_VALUE1.get()).build(); + FullEntity.builder(PARTIAL_ENTITY1).remove("str").set("bool", true) + .set("list", LIST_VALUE1.get()).build(); private static final FullEntity PARTIAL_ENTITY3 = FullEntity.builder(PARTIAL_ENTITY1).key(IncompleteKey.builder(PROJECT_ID, KIND3).build()) .build(); @@ -116,15 +120,17 @@ public class DatastoreTest { @BeforeClass public static void beforeClass() throws IOException, InterruptedException { if (!LocalGcdHelper.isActive(PROJECT_ID, PORT)) { - gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT); + gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT, 1.0); } } @Before - public void setUp() throws IOException, InterruptedException { + public void setUp() { options = DatastoreOptions.builder() .projectId(PROJECT_ID) .host("http://localhost:" + PORT) + .authCredentials(AuthCredentials.noAuth()) + .retryParams(RetryParams.noRetries()) .build(); datastore = options.service(); StructuredQuery query = Query.keyQueryBuilder().build(); @@ -197,7 +203,7 @@ public void testTransactionWithRead() { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { - assertEquals(DatastoreException.Code.ABORTED, expected.code()); + assertEquals("ABORTED", expected.reason()); } } @@ -225,7 +231,7 @@ public void testTransactionWithQuery() { transaction.commit(); fail("Expecting a failure"); } catch (DatastoreException expected) { - assertEquals(DatastoreException.Code.ABORTED, expected.code()); + assertEquals("ABORTED", expected.reason()); } } @@ -461,6 +467,110 @@ public void testRunStructuredQuery() { assertFalse(results4.hasNext()); } + @Test + public void testQueryPaginationWithLimit() throws DatastoreException { + DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class); + DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) + .andReturn(rpcMock); + List responses = buildResponsesForQueryPaginationWithLimit(); + List endCursors = Lists.newArrayListWithCapacity(responses.size()); + for (RunQueryResponse response : responses) { + EasyMock.expect(rpcMock.runQuery(EasyMock.anyObject(RunQueryRequest.class))) + .andReturn(response); + if (response.getBatch().getMoreResults() != QueryResultBatch.MoreResultsType.NOT_FINISHED) { + endCursors.add(response.getBatch().getEndCursor()); + } + } + EasyMock.replay(rpcFactoryMock, rpcMock); + Datastore mockDatastore = options.toBuilder() + .retryParams(RetryParams.defaultInstance()) + .serviceRpcFactory(rpcFactoryMock) + .build() + .service(); + int limit = 2; + int totalCount = 0; + Iterator cursorIter = endCursors.iterator(); + StructuredQuery query = Query.entityQueryBuilder().limit(limit).build(); + while (true) { + QueryResults results = mockDatastore.run(query); + int resultCount = 0; + while (results.hasNext()) { + results.next(); + resultCount++; + totalCount++; + } + assertTrue(cursorIter.hasNext()); + Cursor expectedEndCursor = Cursor.copyFrom(cursorIter.next().toByteArray()); + assertEquals(expectedEndCursor, results.cursorAfter()); + if (resultCount < limit) { + break; + } + query = query.toBuilder().startCursor(results.cursorAfter()).build(); + } + assertEquals(5, totalCount); + EasyMock.verify(rpcFactoryMock, rpcMock); + } + + private List buildResponsesForQueryPaginationWithLimit() { + Entity entity4 = Entity.builder(KEY4).set("value", StringValue.of("value")).build(); + Entity entity5 = Entity.builder(KEY5).set("value", "value").build(); + datastore.add(ENTITY3, entity4, entity5); + DatastoreRpc datastoreRpc = datastore.options().rpc(); + List responses = new ArrayList<>(); + Query query = Query.entityQueryBuilder().build(); + RunQueryRequest.Builder requestPb = RunQueryRequest.newBuilder(); + query.populatePb(requestPb); + QueryResultBatch queryResultBatchPb = RunQueryResponse.newBuilder() + .mergeFrom(datastoreRpc.runQuery(requestPb.build())) + .getBatch(); + QueryResultBatch queryResultBatchPb1 = QueryResultBatch.newBuilder() + .mergeFrom(queryResultBatchPb) + .setMoreResults(QueryResultBatch.MoreResultsType.NOT_FINISHED) + .clearEntityResult() + .addAllEntityResult(queryResultBatchPb.getEntityResultList().subList(0, 1)) + .setEndCursor(ByteString.copyFromUtf8("a")) + .build(); + responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb1).build()); + QueryResultBatch queryResultBatchPb2 = QueryResultBatch.newBuilder() + .mergeFrom(queryResultBatchPb) + .setMoreResults(QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT) + .clearEntityResult() + .addAllEntityResult(queryResultBatchPb.getEntityResultList().subList(1, 2)) + .setEndCursor( + ByteString.copyFrom(new byte[] {(byte) 0x80})) // test invalid UTF-8 string + .build(); + responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb2).build()); + QueryResultBatch queryResultBatchPb3 = QueryResultBatch.newBuilder() + .mergeFrom(queryResultBatchPb) + .setMoreResults(QueryResultBatch.MoreResultsType.MORE_RESULTS_AFTER_LIMIT) + .clearEntityResult() + .addAllEntityResult(queryResultBatchPb.getEntityResultList().subList(2, 4)) + .setEndCursor(ByteString.copyFromUtf8("b")) + .build(); + responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb3).build()); + QueryResultBatch queryResultBatchPb4 = QueryResultBatch.newBuilder() + .mergeFrom(queryResultBatchPb) + .setMoreResults(QueryResultBatch.MoreResultsType.NO_MORE_RESULTS) + .clearEntityResult() + .addAllEntityResult(queryResultBatchPb.getEntityResultList().subList(4, 5)) + .setEndCursor(ByteString.copyFromUtf8("c")) + .build(); + responses.add(RunQueryResponse.newBuilder().setBatch(queryResultBatchPb4).build()); + return responses; + } + + @Test + public void testToUrlSafe() { + byte[][] invalidUtf8 = + new byte[][] {{(byte) 0xfe}, {(byte) 0xc1, (byte) 0xbf}, {(byte) 0xc0}, {(byte) 0x80}}; + for (byte[] bytes : invalidUtf8) { + assertFalse(ByteString.copyFrom(bytes).isValidUtf8()); + Cursor cursor = new Cursor(ByteString.copyFrom(bytes)); + assertEquals(cursor, Cursor.fromUrlSafe(cursor.toUrlSafe())); + } + } + @Test public void testAllocateId() { KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND1); @@ -552,7 +662,7 @@ public void testGetArrayNoDeferredResults() { assertFalse(result.hasNext()); } - public void testGetArrayDeferredResults() throws DatastoreRpcException { + public void testGetArrayDeferredResults() throws DatastoreException { Set requestedKeys = new HashSet<>(); requestedKeys.add(KEY1); requestedKeys.add(KEY2); @@ -567,7 +677,7 @@ public void testGetArrayDeferredResults() throws DatastoreRpcException { assertEquals(requestedKeys, keysOfFoundEntities); } - public void testFetchArrayDeferredResults() throws DatastoreRpcException { + public void testFetchArrayDeferredResults() throws DatastoreException { List foundEntities = createDatastoreForDeferredLookup().fetch(KEY1, KEY2, KEY3, KEY4, KEY5); assertEquals(foundEntities.get(0).key(), KEY1); @@ -578,7 +688,7 @@ public void testFetchArrayDeferredResults() throws DatastoreRpcException { assertEquals(foundEntities.size(), 5); } - private Datastore createDatastoreForDeferredLookup() throws DatastoreRpcException { + private Datastore createDatastoreForDeferredLookup() throws DatastoreException { List keysPb = new ArrayList<>(); keysPb.add(KEY1.toPb()); keysPb.add(KEY2.toPb()); @@ -625,7 +735,7 @@ private Datastore createDatastoreForDeferredLookup() throws DatastoreRpcExceptio EasyMock.replay(rpcFactoryMock, rpcMock); DatastoreOptions options = this.options.toBuilder() - .retryParams(RetryParams.getDefaultInstance()) + .retryParams(RetryParams.defaultInstance()) .serviceRpcFactory(rpcFactoryMock) .build(); return options.service(); @@ -734,11 +844,11 @@ public void testRetryableException() throws Exception { EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) .andReturn(rpcMock); EasyMock.expect(rpcMock.lookup(requestPb)) - .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.UNAVAILABLE)) + .andThrow(new DatastoreException(503, "UNAVAILABLE", "UNAVAILABLE", null)) .andReturn(responsePb); EasyMock.replay(rpcFactoryMock, rpcMock); DatastoreOptions options = this.options.toBuilder() - .retryParams(RetryParams.getDefaultInstance()) + .retryParams(RetryParams.defaultInstance()) .serviceRpcFactory(rpcFactoryMock) .build(); Datastore datastore = options.service(); @@ -756,7 +866,8 @@ public void testNonRetryableException() throws Exception { EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class))) .andReturn(rpcMock); EasyMock.expect(rpcMock.lookup(requestPb)) - .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.PERMISSION_DENIED)) + .andThrow( + new DatastoreException(DatastoreException.UNKNOWN_CODE, "denied", "PERMISSION_DENIED")) .times(1); EasyMock.replay(rpcFactoryMock, rpcMock); RetryParams retryParams = RetryParams.builder().retryMinAttempts(2).build(); @@ -766,7 +877,7 @@ public void testNonRetryableException() throws Exception { .build(); Datastore datastore = options.service(); thrown.expect(DatastoreException.class); - thrown.expectMessage(Reason.PERMISSION_DENIED.description()); + thrown.expectMessage("denied"); datastore.get(KEY1); EasyMock.verify(rpcFactoryMock, rpcMock); } @@ -784,7 +895,7 @@ public void testRuntimeException() throws Exception { .andThrow(new RuntimeException(exceptionMessage)); EasyMock.replay(rpcFactoryMock, rpcMock); DatastoreOptions options = this.options.toBuilder() - .retryParams(RetryParams.getDefaultInstance()) + .retryParams(RetryParams.defaultInstance()) .serviceRpcFactory(rpcFactoryMock) .build(); Datastore datastore = options.service(); diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/IncompleteKeyTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/IncompleteKeyTest.java index 7edbf133d330..acd1dfd3c9e3 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/IncompleteKeyTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/IncompleteKeyTest.java @@ -17,29 +17,47 @@ package com.google.gcloud.datastore; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import org.junit.Before; import org.junit.Test; public class IncompleteKeyTest { + private static IncompleteKey pk1, pk2; + private static Key parent1; + + @Before + public void setUp() { + pk1 = IncompleteKey.builder("ds", "kind1").build(); + parent1 = Key.builder("ds", "kind2", 10).namespace("ns").build(); + pk2 = IncompleteKey.builder(parent1, "kind3").build(); + } + @Test public void testBuilders() throws Exception { - IncompleteKey pk1 = IncompleteKey.builder("ds", "kind1").build(); assertEquals("ds", pk1.projectId()); assertEquals("kind1", pk1.kind()); assertTrue(pk1.ancestors().isEmpty()); - Key parent = Key.builder("ds", "kind2", 10).build(); - IncompleteKey pk2 = IncompleteKey.builder(parent, "kind3").build(); assertEquals("ds", pk2.projectId()); assertEquals("kind3", pk2.kind()); - assertEquals(parent.path(), pk2.ancestors()); + assertEquals(parent1.path(), pk2.ancestors()); assertEquals(pk2, IncompleteKey.builder(pk2).build()); IncompleteKey pk3 = IncompleteKey.builder(pk2).kind("kind4").build(); assertEquals("ds", pk3.projectId()); assertEquals("kind4", pk3.kind()); - assertEquals(parent.path(), pk3.ancestors()); + assertEquals(parent1.path(), pk3.ancestors()); + } + + @Test + public void testParent() { + assertNull(pk1.parent()); + assertEquals(parent1, pk2.parent()); + Key parent2 = Key.builder("ds", "kind3", "name").namespace("ns").build(); + IncompleteKey pk3 = IncompleteKey.builder(parent2, "kind3").build(); + assertEquals(parent2, pk3.parent()); } } diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java index 40ea62c5a7e0..5d761a713506 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java @@ -49,7 +49,7 @@ public void testFindAvailablePort() { @Test public void testSendQuitRequest() throws IOException, InterruptedException { - LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT); + LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT, 0.75); assertTrue(LocalGcdHelper.sendQuitRequest(PORT)); long timeoutMillis = 30000; long startTime = System.currentTimeMillis(); @@ -64,7 +64,7 @@ public void testSendQuitRequest() throws IOException, InterruptedException { @Test public void testStartStop() throws IOException, InterruptedException { - LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT); + LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT, 0.75); assertFalse(LocalGcdHelper.isActive("wrong-project-id", PORT)); assertTrue(LocalGcdHelper.isActive(PROJECT_ID, PORT)); gcdHelper.stop(); diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java index 1ad690938ef5..3976be2cc383 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java @@ -143,8 +143,8 @@ public void testServiceOptions() throws Exception { options = options.toBuilder() .namespace("ns1") - .retryParams(RetryParams.getDefaultInstance()) - .authCredentials(AuthCredentials.noCredentials()) + .retryParams(RetryParams.defaultInstance()) + .authCredentials(null) .force(true) .build(); serializedCopy = serializeAndDeserialize(options); diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java new file mode 100644 index 000000000000..4b6589efd723 --- /dev/null +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java @@ -0,0 +1,172 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.datastore.Query.ResultType; +import com.google.gcloud.datastore.StructuredQuery.CompositeFilter; +import com.google.gcloud.datastore.StructuredQuery.Filter; +import com.google.gcloud.datastore.StructuredQuery.OrderBy; +import com.google.gcloud.datastore.StructuredQuery.Projection; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; + +import org.junit.Test; + +import java.util.List; + +public class StructuredQueryTest { + + private static final String NAMESPACE = "ns"; + private static final String KIND = "k"; + private static final Cursor START_CURSOR = Cursor.copyFrom(new byte[] {1, 2}); + private static final Cursor END_CURSOR = Cursor.copyFrom(new byte[] {10}); + private static final int OFFSET = 42; + private static final Integer LIMIT = 43; + private static final Filter FILTER = + CompositeFilter.and(PropertyFilter.gt("p1", 10), PropertyFilter.eq("a", "v")); + private static final OrderBy ORDER_BY_1 = OrderBy.asc("p2"); + private static final OrderBy ORDER_BY_2 = OrderBy.desc("p3"); + private static final List ORDER_BY = ImmutableList.of(ORDER_BY_1, ORDER_BY_2); + private static final Projection PROJECTION1 = Projection.property("p4"); + private static final Projection PROJECTION2 = Projection.property("p5"); + private static final List PROJECTION = ImmutableList.of(PROJECTION1, PROJECTION2); + private static final String GROUP_BY1 = "p6"; + private static final String GROUP_BY2 = "p7"; + private static final List GROUP_BY = ImmutableList.of(GROUP_BY1, GROUP_BY2); + private static final EntityQuery ENTITY_QUERY = Query.entityQueryBuilder() + .namespace(NAMESPACE) + .kind(KIND) + .startCursor(START_CURSOR) + .endCursor(END_CURSOR) + .offset(OFFSET) + .limit(LIMIT) + .filter(FILTER) + .orderBy(ORDER_BY_1, ORDER_BY_2) + .build(); + private static final KeyQuery KEY_QUERY = Query.keyQueryBuilder() + .namespace(NAMESPACE) + .kind(KIND) + .startCursor(START_CURSOR) + .endCursor(END_CURSOR) + .offset(OFFSET) + .limit(LIMIT) + .filter(FILTER) + .orderBy(ORDER_BY_1, ORDER_BY_2) + .build(); + private static final ProjectionEntityQuery PROJECTION_QUERY = + Query.projectionEntityQueryBuilder() + .namespace(NAMESPACE) + .kind(KIND) + .startCursor(START_CURSOR) + .endCursor(END_CURSOR) + .offset(OFFSET) + .limit(LIMIT) + .filter(FILTER) + .orderBy(ORDER_BY_1, ORDER_BY_2) + .projection(PROJECTION1, PROJECTION2) + .groupBy(GROUP_BY1, GROUP_BY2) + .build(); + + @Test + public void testEntityQueryBuilder() { + compareBaseBuilderFields(ENTITY_QUERY); + assertTrue(ENTITY_QUERY.projection().isEmpty()); + assertTrue(ENTITY_QUERY.groupBy().isEmpty()); + } + + @Test + public void testKeyQueryBuilder() { + compareBaseBuilderFields(KEY_QUERY); + assertEquals( + ImmutableList.of(Projection.property(StructuredQuery.KEY_PROPERTY_NAME)), + KEY_QUERY.projection()); + assertTrue(KEY_QUERY.groupBy().isEmpty()); + } + + @Test + public void testProjectionEntityQueryBuilder() { + compareBaseBuilderFields(PROJECTION_QUERY); + assertEquals(PROJECTION, PROJECTION_QUERY.projection()); + assertEquals(GROUP_BY, PROJECTION_QUERY.groupBy()); + } + + private void compareBaseBuilderFields(StructuredQuery query) { + assertEquals(NAMESPACE, query.namespace()); + assertEquals(KIND, query.kind()); + assertEquals(START_CURSOR, query.startCursor()); + assertEquals(END_CURSOR, query.endCursor()); + assertEquals(OFFSET, query.offset()); + assertEquals(LIMIT, query.limit()); + assertEquals(FILTER, query.filter()); + assertEquals(ORDER_BY, query.orderBy()); + } + + @Test + public void mergeFrom() { + compareMergedQuery( + ENTITY_QUERY, new EntityQuery.Builder().mergeFrom(ENTITY_QUERY.toPb()).build()); + compareMergedQuery(KEY_QUERY, new KeyQuery.Builder().mergeFrom(KEY_QUERY.toPb()).build()); + compareMergedQuery( + PROJECTION_QUERY, + new ProjectionEntityQuery.Builder().mergeFrom(PROJECTION_QUERY.toPb()).build()); + } + + private void compareMergedQuery(StructuredQuery expected, StructuredQuery actual) { + assertEquals(expected.kind(), actual.kind()); + assertEquals(expected.startCursor(), actual.startCursor()); + assertEquals(expected.endCursor(), actual.endCursor()); + assertEquals(expected.offset(), actual.offset()); + assertEquals(expected.limit(), actual.limit()); + assertEquals(expected.filter(), actual.filter()); + assertEquals(expected.orderBy(), actual.orderBy()); + assertEquals(expected.projection(), actual.projection()); + assertEquals(expected.groupBy(), actual.groupBy()); + } + + @Test + public void testToAndFromPb() { + assertEquals( + ENTITY_QUERY, + StructuredQuery.fromPb(ResultType.ENTITY, ENTITY_QUERY.namespace(), ENTITY_QUERY.toPb())); + assertEquals( + KEY_QUERY, StructuredQuery.fromPb(ResultType.KEY, KEY_QUERY.namespace(), KEY_QUERY.toPb())); + assertEquals( + PROJECTION_QUERY, + StructuredQuery.fromPb( + ResultType.PROJECTION_ENTITY, PROJECTION_QUERY.namespace(), PROJECTION_QUERY.toPb())); + } + + @Test + public void testToBuilder() { + List> queries = + ImmutableList.>of(ENTITY_QUERY, KEY_QUERY, PROJECTION_QUERY); + for (StructuredQuery query : queries) { + assertEquals(query, query.toBuilder().build()); + } + } + + @Test + public void testKeyOnly() { + assertTrue(KEY_QUERY.keyOnly()); + assertFalse(ENTITY_QUERY.keyOnly()); + assertFalse(PROJECTION_QUERY.keyOnly()); + } +} diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java index 199b3f90f442..891668990f66 100644 --- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java +++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java @@ -60,11 +60,13 @@ public class ValueTest { private ImmutableMap> typeToValue; + @SuppressWarnings("rawtypes") private class TestBuilder extends Value.BaseBuilder, TestBuilder> { TestBuilder() { super(ValueType.LIST); } + @SuppressWarnings({"unchecked"}) @Override public Value build() { return new Value(this) { @@ -197,6 +199,7 @@ public void testGet() throws Exception { @Test public void testToBuilder() throws Exception { Set content = Collections.singleton("bla"); + @SuppressWarnings("rawtypes") ValueBuilder builder = new TestBuilder(); builder.meaning(1).set(content).indexed(true); Value value = builder.build(); diff --git a/gcloud-java-examples/README.md b/gcloud-java-examples/README.md index 366acd5de929..7d54b8f3e1a9 100644 --- a/gcloud-java-examples/README.md +++ b/gcloud-java-examples/README.md @@ -6,49 +6,94 @@ Examples for gcloud-java (Java idiomatic client for [Google Cloud Platform][clou [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-examples.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-examples.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) - [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) - [Examples] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/examples/package-summary.html) Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-examples - 0.0.10 + 0.1.5 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-examples:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.1.5" +``` To run examples from your command line: 1. Login using gcloud SDK (`gcloud auth login` in command line) -2. Set your current project using `gcloud config set project PROJECT_ID` +2. Set your current project using `gcloud config set project PROJECT_ID`. This step is not necessary for `ResourceManagerExample`. 3. Compile using Maven (`mvn compile` in command line from your base project directory) 4. Run an example using Maven from command line. - Here's an example run of `DatastoreExample`. + * Here's an example run of `BigQueryExample`. + + Before running the example, go to the [Google Developers Console][developers-console] to ensure + that BigQuery API is enabled. You can upload a CSV file `my_csv_file` to the `my_bucket` bucket + (replace `my_csv_file` and `my_bucket` with actual file and bucket names) using the GCS + [web browser](https://console.developers.google.com/storage/browser). The CSV file will be used to + load data into a BigQuery table and should look something like: + ```csv + value1 + value2 + value3 + ``` + Then you are ready to run the following example: + ``` + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample" -Dexec.args="create dataset new_dataset_id" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample" -Dexec.args="create table new_dataset_id new_table_id field_name:string" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample" -Dexec.args="list tables new_dataset_id" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample" -Dexec.args="load new_dataset_id new_table_id CSV gs://my_bucket/my_csv_file" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample" -Dexec.args="query 'select * from new_dataset_id.new_table_id'" + ``` + + * Here's an example run of `DatastoreExample`. - Note that you have to enable the Google Cloud Datastore API on the [Google Developers Console][developers-console] before running the following commands. - ``` - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name add my\ comment" - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name display" - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name delete" - ``` - - Here's an example run of `StorageExample`. - - Before running the example, go to the [Google Developers Console][developers-console] to ensure that Google Cloud Storage API is enabled and that you have a bucket. Also ensure that you have a test file (`test.txt` is chosen here) to upload to Cloud Storage stored locally on your machine. - ``` - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="upload /path/to/test.txt " - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="list " - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="download test.txt" - $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="delete test.txt" -``` + Be sure to change the placeholder project ID "your-project-id" with your own project ID. Also note that you have to enable the Google Cloud Datastore API on the [Google Developers Console][developers-console] before running the following commands. + ``` + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.datastore.DatastoreExample" -Dexec.args="your-project-id my_name add my\ comment" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.datastore.DatastoreExample" -Dexec.args="your-project-id my_name display" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.datastore.DatastoreExample" -Dexec.args="your-project-id my_name delete" + ``` + + * Here's an example run of `ResourceManagerExample`. + + Be sure to change the placeholder project ID "your-project-id" with your own globally unique project ID. + ``` + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.resourcemanager.ResourceManagerExample" -Dexec.args="create your-project-id" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.resourcemanager.ResourceManagerExample" -Dexec.args="list" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.resourcemanager.ResourceManagerExample" -Dexec.args="get your-project-id" + ``` + + * Here's an example run of `StorageExample`. + + Before running the example, go to the [Google Developers Console][developers-console] to ensure that Google Cloud Storage API is enabled and that you have a bucket. Also ensure that you have a test file (`test.txt` is chosen here) to upload to Cloud Storage stored locally on your machine. + ``` + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.storage.StorageExample" -Dexec.args="upload /path/to/test.txt " + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.storage.StorageExample" -Dexec.args="list " + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.storage.StorageExample" -Dexec.args="download test.txt" + mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.storage.StorageExample" -Dexec.args="delete test.txt" + ``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). Java Versions ------------- @@ -69,7 +114,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -78,6 +125,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ [developers-console]:https://console.developers.google.com/ diff --git a/gcloud-java-examples/pom.xml b/gcloud-java-examples/pom.xml index 55548e756be3..111308658c2e 100644 --- a/gcloud-java-examples/pom.xml +++ b/gcloud-java-examples/pom.xml @@ -1,7 +1,6 @@ 4.0.0 - com.google.gcloud gcloud-java-examples jar GCloud Java examples @@ -11,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-examples diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java new file mode 100644 index 000000000000..fe27ee3cf63b --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/BigQueryExample.java @@ -0,0 +1,792 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.examples.bigquery; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryError; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.CopyJobConfiguration; +import com.google.gcloud.bigquery.Dataset; +import com.google.gcloud.bigquery.DatasetId; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.ExternalTableDefinition; +import com.google.gcloud.bigquery.ExtractJobConfiguration; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.FormatOptions; +import com.google.gcloud.bigquery.Job; +import com.google.gcloud.bigquery.JobId; +import com.google.gcloud.bigquery.JobInfo; +import com.google.gcloud.bigquery.LoadJobConfiguration; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +import com.google.gcloud.bigquery.ViewDefinition; +import com.google.gcloud.bigquery.WriteChannelConfiguration; +import com.google.gcloud.bigquery.spi.BigQueryRpc.Tuple; + +import java.nio.channels.FileChannel; +import java.nio.file.Paths; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * An example of using Google BigQuery. + * + *

This example demonstrates a simple/typical BigQuery usage. + * + *

Steps needed for running the example: + *

    + *
  1. login using gcloud SDK - {@code gcloud auth login}.
  2. + *
  3. compile using maven - {@code mvn compile}
  4. + *
  5. run using maven - + *
    {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.bigquery.BigQueryExample"
    + *  -Dexec.args="[]
    + *  list datasets |
    + *  list tables  |
    + *  list jobs |
    + *  list data  
| + * info dataset | + * info table
| + * info job | + * create dataset | + * create table
(:)+ | + * create view
| + * create external-table
(:)+ | + * delete dataset | + * delete table
| + * cancel | + * copy | + * load
+ | + * extract
+ | + * query | + * load-file
"} + * + * + * + *

The first parameter is an optional {@code project_id} (logged-in project will be used if not + * supplied). Second parameter is a BigQuery operation and can be used to demonstrate its usage. For + * operations that apply to more than one entity (`list`, `create`, `info` and `delete`) the third + * parameter specifies the entity. {@code } indicates that only primitive types are + * supported by the {@code create table} and {@code create external-table} operations + * ({@code string}, {@code float}, {@code integer}, {@code timestamp}, {@code boolean}). + * {@code }, {@code } and {@code } parameters are URIs to + * Google Cloud Storage blobs, in the form {@code gs://bucket/path}. See each action's run method + * for the specific BigQuery interaction. + */ +public class BigQueryExample { + + private static final int CHUNK_SIZE = 8 * 256 * 1024; + private static final Map CREATE_ACTIONS = new HashMap<>(); + private static final Map INFO_ACTIONS = new HashMap<>(); + private static final Map LIST_ACTIONS = new HashMap<>(); + private static final Map DELETE_ACTIONS = new HashMap<>(); + private static final Map ACTIONS = new HashMap<>(); + + private abstract static class BigQueryAction { + + abstract void run(BigQuery bigquery, T request) throws Exception; + + abstract T parse(String... args) throws Exception; + + protected String params() { + return ""; + } + } + + private static class ParentAction extends BigQueryAction> { + + private final Map subActions; + + public ParentAction(Map subActions) { + this.subActions = ImmutableMap.copyOf(subActions); + } + + @Override + @SuppressWarnings("unchecked") + void run(BigQuery bigquery, Tuple subaction) throws Exception { + subaction.x().run(bigquery, subaction.y()); + } + + @Override + Tuple parse(String... args) throws Exception { + if (args.length >= 1) { + BigQueryAction action = subActions.get(args[0]); + if (action != null) { + Object actionArguments = action.parse(Arrays.copyOfRange(args, 1, args.length)); + return Tuple.of(action, actionArguments); + } else { + throw new IllegalArgumentException("Unrecognized entity '" + args[0] + "'."); + } + } + throw new IllegalArgumentException("Missing required entity."); + } + + @Override + public String params() { + StringBuilder builder = new StringBuilder(); + for (Map.Entry entry : subActions.entrySet()) { + builder.append('\n').append(entry.getKey()); + String param = entry.getValue().params(); + if (param != null && !param.isEmpty()) { + builder.append(' ').append(param); + } + } + return builder.toString(); + } + } + + private abstract static class NoArgsAction extends BigQueryAction { + @Override + Void parse(String... args) throws Exception { + if (args.length == 0) { + return null; + } + throw new IllegalArgumentException("This action takes no arguments."); + } + } + + /** + * This class demonstrates how to list BigQuery Datasets. + * + * @see Datasets: list + * + */ + private static class ListDatasetsAction extends NoArgsAction { + @Override + public void run(BigQuery bigquery, Void arg) { + Iterator datasetInfoIterator = bigquery.listDatasets().iterateAll(); + while (datasetInfoIterator.hasNext()) { + System.out.println(datasetInfoIterator.next()); + } + } + } + + private abstract static class DatasetAction extends BigQueryAction { + @Override + DatasetId parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return DatasetId.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required dataset id."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return ""; + } + } + + /** + * This class demonstrates how to list BigQuery Tables in a Dataset. + * + * @see Tables: list + */ + private static class ListTablesAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + Iterator

tableInfoIterator = bigquery.listTables(datasetId).iterateAll(); + while (tableInfoIterator.hasNext()) { + System.out.println(tableInfoIterator.next()); + } + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Dataset. + * + * @see Datasets: get + * + */ + private static class DatasetInfoAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + System.out.println("Dataset info: " + bigquery.getDataset(datasetId)); + } + } + + /** + * This class demonstrates how to create a BigQuery Dataset. + * + * @see Datasets: + * insert + */ + private static class CreateDatasetAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + bigquery.create(DatasetInfo.builder(datasetId).build()); + System.out.println("Created dataset " + datasetId); + } + } + + /** + * This class demonstrates how to delete a BigQuery Dataset. + * + * @see Datasets: + * delete + */ + private static class DeleteDatasetAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + if (bigquery.delete(datasetId)) { + System.out.println("Dataset " + datasetId + " was deleted"); + } else { + System.out.println("Dataset " + datasetId + " not found"); + } + } + } + + private abstract static class TableAction extends BigQueryAction { + @Override + TableId parse(String... args) throws Exception { + String message; + if (args.length == 2) { + return TableId.of(args[0], args[1]); + } else if (args.length < 2) { + message = "Missing required dataset and table id."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return "
"; + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Table. + * + * @see Tables: get + */ + private static class TableInfoAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + System.out.println("Table info: " + bigquery.getTable(tableId)); + } + } + + /** + * This class demonstrates how to delete a BigQuery Table. + * + * @see Tables: delete + * + */ + private static class DeleteTableAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + if (bigquery.delete(tableId)) { + System.out.println("Table " + tableId + " was deleted"); + } else { + System.out.println("Table " + tableId + " not found"); + } + } + } + + /** + * This class demonstrates how to list the rows in a BigQuery Table. + * + * @see Tabledata: + * list + */ + private static class ListTableDataAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + Iterator> iterator = bigquery.listTableData(tableId).iterateAll(); + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } + } + + private abstract static class JobAction extends BigQueryAction { + @Override + JobId parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return JobId.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required query."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return ""; + } + } + + /** + * This class demonstrates how to list BigQuery Jobs. + * + * @see Jobs: list + */ + private static class ListJobsAction extends NoArgsAction { + @Override + public void run(BigQuery bigquery, Void arg) { + Iterator datasetInfoIterator = bigquery.listJobs().iterateAll(); + while (datasetInfoIterator.hasNext()) { + System.out.println(datasetInfoIterator.next()); + } + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Job. + * + * @see Jobs: get + */ + private static class JobInfoAction extends JobAction { + @Override + public void run(BigQuery bigquery, JobId jobId) { + System.out.println("Job info: " + bigquery.getJob(jobId)); + } + } + + /** + * This class demonstrates how to cancel a BigQuery Job. + * + * @see Jobs: cancel + */ + private static class CancelJobAction extends JobAction { + @Override + public void run(BigQuery bigquery, JobId jobId) { + if (bigquery.cancel(jobId)) { + System.out.println("Requested cancel for job " + jobId); + } else { + System.out.println("Job " + jobId + " not found"); + } + } + } + + private abstract static class CreateTableAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, TableInfo table) throws Exception { + Table createTable = bigquery.create(table); + System.out.println("Created table:"); + System.out.println(createTable.toString()); + } + + static Schema parseSchema(String[] args, int start, int end) { + Schema.Builder builder = Schema.builder(); + for (int i = start; i < end; i++) { + String[] fieldsArray = args[i].split(":"); + if (fieldsArray.length != 2) { + throw new IllegalArgumentException("Unrecognized field definition '" + args[i] + "'."); + } + String fieldName = fieldsArray[0]; + String typeString = fieldsArray[1].toLowerCase(); + Field.Type fieldType; + switch (typeString) { + case "string": + fieldType = Field.Type.string(); + break; + case "integer": + fieldType = Field.Type.integer(); + break; + case "timestamp": + fieldType = Field.Type.timestamp(); + break; + case "float": + fieldType = Field.Type.floatingPoint(); + break; + case "boolean": + fieldType = Field.Type.bool(); + break; + default: + throw new IllegalArgumentException("Unrecognized field type '" + typeString + "'."); + } + builder.addField(Field.of(fieldName, fieldType)); + } + return builder.build(); + } + } + + /** + * This class demonstrates how to create a simple BigQuery Table (i.e. a table created from a + * {@link StandardTableDefinition}). + * + * @see Tables: insert + * + */ + private static class CreateSimpleTableAction extends CreateTableAction { + @Override + TableInfo parse(String... args) throws Exception { + if (args.length >= 3) { + String dataset = args[0]; + String table = args[1]; + TableId tableId = TableId.of(dataset, table); + return TableInfo.of(tableId, StandardTableDefinition.of(parseSchema(args, 2, args.length))); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
(:)+"; + } + } + + /** + * This class demonstrates how to create a BigQuery External Table (i.e. a table created from a + * {@link ExternalTableDefinition}). + * + * @see Tables: insert + * + */ + private static class CreateExternalTableAction extends CreateTableAction { + @Override + TableInfo parse(String... args) throws Exception { + if (args.length >= 5) { + String dataset = args[0]; + String table = args[1]; + TableId tableId = TableId.of(dataset, table); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.of(args[args.length - 1], + parseSchema(args, 3, args.length - 1), FormatOptions.of(args[2])); + return TableInfo.of(tableId, externalTableDefinition); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
(:)+ "; + } + } + + /** + * This class demonstrates how to create a BigQuery View Table (i.e. a table created from a + * {@link ViewDefinition}). + * + * @see Tables: insert + * + */ + private static class CreateViewAction extends CreateTableAction { + @Override + TableInfo parse(String... args) throws Exception { + String message; + if (args.length == 3) { + String dataset = args[0]; + String table = args[1]; + String query = args[2]; + TableId tableId = TableId.of(dataset, table); + return TableInfo.of(tableId, ViewDefinition.of(query)); + } else if (args.length < 3) { + message = "Missing required dataset id, table id or query."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return "
"; + } + } + + private abstract static class JobRunAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, JobInfo job) throws Exception { + System.out.println("Creating job"); + Job startedJob = bigquery.create(job); + while (!startedJob.isDone()) { + System.out.println("Waiting for job " + startedJob.jobId().job() + " to complete"); + Thread.sleep(1000L); + } + if (startedJob.status().error() == null) { + System.out.println("Job " + startedJob.jobId().job() + " succeeded"); + } else { + System.out.println("Job " + startedJob.jobId().job() + " failed"); + System.out.println("Error: " + startedJob.status().error()); + } + } + } + + /** + * This class demonstrates how to create a BigQuery Load Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class LoadAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + if (args.length >= 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + LoadJobConfiguration configuration = LoadJobConfiguration.of( + tableId, Arrays.asList(args).subList(3, args.length), FormatOptions.of(format)); + return JobInfo.of(configuration); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
+"; + } + } + + /** + * This class demonstrates how to create a BigQuery Extract Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class ExtractAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + if (args.length >= 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + ExtractJobConfiguration configuration = ExtractJobConfiguration.of( + tableId, Arrays.asList(args).subList(3, args.length), format); + return JobInfo.of(configuration); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
+"; + } + } + + /** + * This class demonstrates how to create a BigQuery Copy Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class CopyAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + String message; + if (args.length == 4) { + TableId sourceTableId = TableId.of(args[0], args[1]); + TableId destinationTableId = TableId.of(args[2], args[3]); + return JobInfo.of(CopyJobConfiguration.of(destinationTableId, sourceTableId)); + } else if (args.length < 3) { + message = "Missing required source or destination table."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return " "; + } + } + + /** + * This class demonstrates how to run a BigQuery SQL Query and wait for associated job to + * complete. Results or errors are shown. + * + * @see Jobs: query + */ + private static class QueryAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, QueryRequest queryRequest) throws Exception { + System.out.println("Running query"); + QueryResponse queryResponse = bigquery.query(queryRequest); + while (!queryResponse.jobCompleted()) { + System.out.println("Waiting for query job " + queryResponse.jobId() + " to complete"); + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); + } + if (!queryResponse.hasErrors()) { + System.out.println("Query succeeded. Results:"); + Iterator> iterator = queryResponse.result().iterateAll(); + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } else { + System.out.println("Query completed with errors. Errors:"); + for (BigQueryError err : queryResponse.executionErrors()) { + System.out.println(err); + } + } + } + + @Override + QueryRequest parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return QueryRequest.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required query."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return ""; + } + } + + /** + * This class demonstrates how to load data into a BigQuery Table from a local file. + * + * @see Resumable + * Upload + */ + private static class LoadFileAction + extends BigQueryAction> { + @Override + void run(BigQuery bigquery, Tuple configuration) + throws Exception { + System.out.println("Running insert"); + try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { + WriteChannel writeChannel = bigquery.writer(configuration.x()); + long position = 0; + long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); + while (written > 0) { + position += written; + written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); + } + writeChannel.close(); + } + } + + @Override + Tuple parse(String... args) throws Exception { + if (args.length == 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + WriteChannelConfiguration configuration = + WriteChannelConfiguration.of(tableId, FormatOptions.of(format)); + return Tuple.of(configuration, args[3]); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
"; + } + } + + static { + CREATE_ACTIONS.put("dataset", new CreateDatasetAction()); + CREATE_ACTIONS.put("table", new CreateSimpleTableAction()); + CREATE_ACTIONS.put("view", new CreateViewAction()); + CREATE_ACTIONS.put("external-table", new CreateExternalTableAction()); + INFO_ACTIONS.put("dataset", new DatasetInfoAction()); + INFO_ACTIONS.put("table", new TableInfoAction()); + INFO_ACTIONS.put("job", new JobInfoAction()); + LIST_ACTIONS.put("datasets", new ListDatasetsAction()); + LIST_ACTIONS.put("tables", new ListTablesAction()); + LIST_ACTIONS.put("jobs", new ListJobsAction()); + LIST_ACTIONS.put("data", new ListTableDataAction()); + DELETE_ACTIONS.put("dataset", new DeleteDatasetAction()); + DELETE_ACTIONS.put("table", new DeleteTableAction()); + ACTIONS.put("create", new ParentAction(CREATE_ACTIONS)); + ACTIONS.put("info", new ParentAction(INFO_ACTIONS)); + ACTIONS.put("list", new ParentAction(LIST_ACTIONS)); + ACTIONS.put("delete", new ParentAction(DELETE_ACTIONS)); + ACTIONS.put("cancel", new CancelJobAction()); + ACTIONS.put("load", new LoadAction()); + ACTIONS.put("extract", new ExtractAction()); + ACTIONS.put("copy", new CopyAction()); + ACTIONS.put("query", new QueryAction()); + ACTIONS.put("load-file", new LoadFileAction()); + } + + private static void printUsage() { + StringBuilder actionAndParams = new StringBuilder(); + for (Map.Entry entry : ACTIONS.entrySet()) { + actionAndParams.append("\n\t").append(entry.getKey()); + + String param = entry.getValue().params(); + if (param != null && !param.isEmpty()) { + actionAndParams.append(' ').append(param.replace("\n", "\n\t\t")); + } + } + System.out.printf("Usage: %s [] operation [entity] *%s%n", + BigQueryExample.class.getSimpleName(), actionAndParams); + } + + @SuppressWarnings("unchecked") + public static void main(String... args) throws Exception { + if (args.length < 1) { + System.out.println("Missing required project id and action"); + printUsage(); + return; + } + BigQueryOptions.Builder optionsBuilder = BigQueryOptions.builder(); + BigQueryAction action; + String actionName; + if (args.length >= 2 && !ACTIONS.containsKey(args[0])) { + actionName = args[1]; + optionsBuilder.projectId(args[0]); + action = ACTIONS.get(args[1]); + args = Arrays.copyOfRange(args, 2, args.length); + } else { + actionName = args[0]; + action = ACTIONS.get(args[0]); + args = Arrays.copyOfRange(args, 1, args.length); + } + if (action == null) { + System.out.println("Unrecognized action."); + printUsage(); + return; + } + BigQuery bigquery = optionsBuilder.build().service(); + Object request; + try { + request = action.parse(args); + } catch (IllegalArgumentException ex) { + System.out.println("Invalid input for action '" + actionName + "'. " + ex.getMessage()); + System.out.println("Expected: " + action.params()); + return; + } catch (Exception ex) { + System.out.println("Failed to parse request."); + ex.printStackTrace(); + return; + } + action.run(bigquery, request); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/CreateTableAndLoadData.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/CreateTableAndLoadData.java new file mode 100644 index 000000000000..857f6b43d013 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/CreateTableAndLoadData.java @@ -0,0 +1,64 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.bigquery.snippets; + +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FormatOptions; +import com.google.gcloud.bigquery.Job; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.Table; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +/** + * A snippet for Google Cloud BigQuery showing how to get a BigQuery table or create it if it does + * not exist. The snippet also starts a BigQuery job to load data into the table from a Cloud + * Storage blob and wait until the job completes. + */ +public class CreateTableAndLoadData { + + public static void main(String... args) throws InterruptedException { + BigQuery bigquery = BigQueryOptions.defaultInstance().service(); + TableId tableId = TableId.of("dataset", "table"); + Table table = bigquery.getTable(tableId); + if (table == null) { + System.out.println("Creating table " + tableId); + Field integerField = Field.of("fieldName", Field.Type.integer()); + Schema schema = Schema.of(integerField); + table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema))); + } + System.out.println("Loading data into table " + tableId); + Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path"); + while (!loadJob.isDone()) { + Thread.sleep(1000L); + } + if (loadJob.status().error() != null) { + System.out.println("Job completed with errors"); + } else { + System.out.println("Job succeeded"); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java new file mode 100644 index 000000000000..ba2d1291b229 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/bigquery/snippets/InsertDataAndQueryTable.java @@ -0,0 +1,102 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.bigquery.snippets; + +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.StandardTableDefinition; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * A snippet for Google Cloud BigQuery showing how to create a BigQuery dataset and table. Once + * created, the snippet streams data into the table and then queries it. + */ +public class InsertDataAndQueryTable { + + public static void main(String... args) throws InterruptedException { + // Create a service instance + BigQuery bigquery = BigQueryOptions.defaultInstance().service(); + + // Create a dataset + String datasetId = "my_dataset_id"; + bigquery.create(DatasetInfo.builder(datasetId).build()); + + TableId tableId = TableId.of(datasetId, "my_table_id"); + // Table field definition + Field stringField = Field.of("StringField", Field.Type.string()); + // Table schema definition + Schema schema = Schema.of(stringField); + // Create a table + StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema); + bigquery.create(TableInfo.of(tableId, tableDefinition)); + + // Define rows to insert + Map firstRow = new HashMap<>(); + Map secondRow = new HashMap<>(); + firstRow.put("StringField", "value1"); + secondRow.put("StringField", "value2"); + // Create an insert request + InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); + // Insert rows + InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); + // Check if errors occurred + if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); + } + + // Create a query request + QueryRequest queryRequest = QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .pageSize(1000L) + .build(); + // Request query to be executed and wait for results + QueryResponse queryResponse = bigquery.query(queryRequest); + while (!queryResponse.jobCompleted()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); + } + // Read rows + Iterator> rowIterator = queryResponse.result().iterateAll(); + System.out.println("Table rows:"); + while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/DatastoreExample.java similarity index 84% rename from gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java rename to gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/DatastoreExample.java index 93bb1b5b93b4..cc4331734200 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/DatastoreExample.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.examples; +package com.google.gcloud.examples.datastore; import com.google.gcloud.datastore.Datastore; import com.google.gcloud.datastore.DatastoreOptions; @@ -25,8 +25,8 @@ import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; import com.google.gcloud.datastore.Query; -import com.google.gcloud.datastore.Query.ResultType; import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; import com.google.gcloud.datastore.Transaction; @@ -36,15 +36,15 @@ import java.util.TreeMap; /** - * An example of using the Google Cloud Datastore. - *

- * This example adds, display or clear comments for a given user. - *

- * Steps needed for running the example:

    + * An example of using Google Cloud Datastore. + * + *

    This example adds, display or clear comments for a given user. + * + *

    Steps needed for running the example:

      *
    1. login using gcloud SDK - {@code gcloud auth login}.
    2. *
    3. compile using maven - {@code mvn compile}
    4. *
    5. run using maven - {@code mvn exec:java - * -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" + * -Dexec.mainClass="com.google.gcloud.examples.datastore.DatastoreExample" * -Dexec.args="[projectId] [user] [delete|display|add comment]"}
    6. *
    */ @@ -58,6 +58,7 @@ public class DatastoreExample { private interface DatastoreAction { void run(Transaction tx, Key userKey, String... args); + String getRequiredParams(); } @@ -66,7 +67,7 @@ private static class DeleteAction implements DatastoreAction { public void run(Transaction tx, Key userKey, String... args) { Entity user = tx.get(userKey); if (user == null) { - System.out.println("Nothing to delete, user does not exists."); + System.out.println("Nothing to delete, user does not exist."); return; } Query query = Query.keyQueryBuilder() @@ -99,21 +100,31 @@ public void run(Transaction tx, Key userKey, String... args) { return; } System.out.printf("User '%s' has %d comment[s].%n", userKey.name(), user.getLong("count")); - // ORDER BY timestamp"; - String gql = "SELECT * FROM " + COMMENT_KIND + " WHERE __key__ HAS ANCESTOR @1"; - Query query = Query.gqlQueryBuilder(ResultType.ENTITY, gql) - .namespace(NAMESPACE) - .addBinding(userKey) - .build(); - QueryResults results = tx.run(query); - // We could have added "ORDER BY timestamp" to the query to avoid the sorting bellow - // but that would require adding an ancestor index for timestamp - // see: https://cloud.google.com/datastore/docs/tools/indexconfig + int limit = 200; Map sortedComments = new TreeMap<>(); - while (results.hasNext()) { - Entity result = results.next(); - sortedComments.put(result.getDateTime("timestamp"), result.getString("content")); + StructuredQuery query = + Query.entityQueryBuilder() + .namespace(NAMESPACE) + .kind(COMMENT_KIND) + .filter(PropertyFilter.hasAncestor(userKey)) + .limit(limit) + .build(); + while (true) { + QueryResults results = tx.run(query); + int resultCount = 0; + while (results.hasNext()) { + Entity result = results.next(); + sortedComments.put(result.getDateTime("timestamp"), result.getString("content")); + resultCount++; + } + if (resultCount < limit) { + break; + } + query = query.toBuilder().startCursor(results.cursorAfter()).build(); } + // We could have added "ORDER BY timestamp" to the query to avoid sorting, but that would + // require adding an ancestor index for timestamp. + // See: https://cloud.google.com/datastore/docs/tools/indexconfig for (Map.Entry entry : sortedComments.entrySet()) { System.out.printf("\t%s: %s%n", entry.getKey(), entry.getValue()); } diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/AddEntitiesAndRunQuery.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/AddEntitiesAndRunQuery.java new file mode 100644 index 000000000000..f1e844c79b24 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/AddEntitiesAndRunQuery.java @@ -0,0 +1,84 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.datastore.snippets; + +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; + +/** + * A snippet for Google Cloud Datastore showing how to create and get entities. The snippet also + * shows how to run a query against Datastore. + */ +public class AddEntitiesAndRunQuery { + + public static void main(String... args) { + // Create datastore service object. + // By default, credentials are inferred from the runtime environment. + Datastore datastore = DatastoreOptions.defaultInstance().service(); + + // Add an entity to Datastore + KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); + Key key = keyFactory.newKey("john.doe@gmail.com"); + Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 51) + .set("favorite_food", "pizza") + .build(); + datastore.put(entity); + + // Get an entity from Datastore + Entity johnEntity = datastore.get(key); + + // Add a couple more entities to make the query results more interesting + Key janeKey = keyFactory.newKey("jane.doe@gmail.com"); + Entity janeEntity = Entity.builder(janeKey) + .set("name", "Jane Doe") + .set("age", 44) + .set("favorite_food", "pizza") + .build(); + Key joeKey = keyFactory.newKey("joe.shmoe@gmail.com"); + Entity joeEntity = Entity.builder(joeKey) + .set("name", "Joe Shmoe") + .set("age", 27) + .set("favorite_food", "sushi") + .build(); + datastore.put(janeEntity, joeEntity); + + // Run a query + Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); + QueryResults results = datastore.run(query); + while (results.hasNext()) { + Entity currentEntity = results.next(); + System.out.println(currentEntity.getString("name") + ", you're invited to a pizza party!"); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/CreateEntity.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/CreateEntity.java new file mode 100644 index 000000000000..3981162a2943 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/CreateEntity.java @@ -0,0 +1,48 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.datastore.snippets; + +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.DateTime; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; + +/** + * A snippet for Google Cloud Datastore showing how to create an entity. + */ +public class CreateEntity { + + public static void main(String... args) { + Datastore datastore = DatastoreOptions.defaultInstance().service(); + KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind"); + Key key = keyFactory.newKey("keyName"); + Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 30) + .set("access_time", DateTime.now()) + .build(); + datastore.put(entity); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/UpdateEntity.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/UpdateEntity.java new file mode 100644 index 000000000000..cbc97f0784dd --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/datastore/snippets/UpdateEntity.java @@ -0,0 +1,50 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.datastore.snippets; + +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.DateTime; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; + +/** + * A snippet for Google Cloud Datastore showing how to get an entity and update it if it exists. + */ +public class UpdateEntity { + + public static void main(String... args) { + Datastore datastore = DatastoreOptions.defaultInstance().service(); + KeyFactory keyFactory = datastore.newKeyFactory().kind("keyKind"); + Key key = keyFactory.newKey("keyName"); + Entity entity = datastore.get(key); + if (entity != null) { + System.out.println("Updating access_time for " + entity.getString("name")); + entity = Entity.builder(entity) + .set("access_time", DateTime.now()) + .build(); + datastore.update(entity); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java new file mode 100644 index 000000000000..349c0eebe73d --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java @@ -0,0 +1,224 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.examples.resourcemanager; + +import com.google.common.base.Joiner; +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Scanner; + +/** + * An example of using Google Cloud Resource Manager. + * + *

    This example creates, deletes, gets, and lists projects. + * + *

    Steps needed for running the example:

      + *
    1. login using gcloud SDK - {@code gcloud auth login}.
    2. + *
    3. compile using maven - {@code mvn compile}
    4. + *
    5. run using maven - {@code mvn exec:java + * -Dexec.mainClass="com.google.gcloud.examples.resourcemanager.ResourceManagerExample" + * -Dexec.args="[list | [create | delete | get] projectId]"}
    6. + *
    + */ +public class ResourceManagerExample { + + private static final String DEFAULT_ACTION = "list"; + private static final Map ACTIONS = new HashMap<>(); + + private interface ResourceManagerAction { + void run(ResourceManager resourceManager, String... args); + + String[] getRequiredParams(); + + String[] getOptionalParams(); + } + + private static class CreateAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + Map labels = new HashMap<>(); + for (int i = 1; i < args.length; i += 2) { + if (i + 1 < args.length) { + labels.put(args[i], args[i + 1]); + } else { + labels.put(args[i], ""); + } + } + Project project = + resourceManager.create(ProjectInfo.builder(projectId).labels(labels).build()); + System.out.printf( + "Successfully created project '%s': %s.%n", projectId, projectDetails(project)); + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {"label-key-1", "label-value-1", "label-key-2", "label-value-2", "..."}; + } + } + + private static class DeleteAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + System.out.printf("Going to delete project \"%s\". Are you sure [y/N]: ", projectId); + Scanner scanner = new Scanner(System.in); + if (scanner.nextLine().toLowerCase().equals("y")) { + resourceManager.delete(projectId); + System.out.println("Successfully deleted project " + projectId + "."); + } else { + System.out.println("Will not delete project " + projectId + "."); + } + scanner.close(); + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + private static class GetAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + ProjectInfo project = resourceManager.get(projectId); + if (project != null) { + System.out.printf( + "Successfully got project '%s': %s.%n", projectId, projectDetails(project)); + } else { + System.out.printf("Could not find project '%s'.%n", projectId); + } + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + private static class ListAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + System.out.println("Projects you can view:"); + for (ProjectInfo project : resourceManager.list().values()) { + System.out.println(projectDetails(project)); + } + } + + @Override + public String[] getRequiredParams() { + return new String[] {}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + static { + ACTIONS.put("create", new CreateAction()); + ACTIONS.put("delete", new DeleteAction()); + ACTIONS.put("get", new GetAction()); + ACTIONS.put("list", new ListAction()); + } + + private static String projectDetails(ProjectInfo project) { + return new StringBuilder() + .append("{projectId:") + .append(project.projectId()) + .append(", projectNumber:") + .append(project.projectNumber()) + .append(", createTimeMillis:") + .append(project.createTimeMillis()) + .append(", state:") + .append(project.state()) + .append(", labels:") + .append(project.labels()) + .append("}") + .toString(); + } + + private static void addUsage( + String actionName, ResourceManagerAction action, StringBuilder usage) { + usage.append(actionName); + Joiner joiner = Joiner.on(" "); + String[] requiredParams = action.getRequiredParams(); + if (requiredParams.length > 0) { + usage.append(' '); + joiner.appendTo(usage, requiredParams); + } + String[] optionalParams = action.getOptionalParams(); + if (optionalParams.length > 0) { + usage.append(" ["); + joiner.appendTo(usage, optionalParams); + usage.append(']'); + } + } + + public static void main(String... args) { + String actionName = args.length > 0 ? args[0].toLowerCase() : DEFAULT_ACTION; + ResourceManagerAction action = ACTIONS.get(actionName); + if (action == null) { + StringBuilder actionAndParams = new StringBuilder(); + for (Map.Entry entry : ACTIONS.entrySet()) { + addUsage(entry.getKey(), entry.getValue(), actionAndParams); + actionAndParams.append('|'); + } + actionAndParams.setLength(actionAndParams.length() - 1); + System.out.printf( + "Usage: %s [%s]%n", ResourceManagerExample.class.getSimpleName(), actionAndParams); + return; + } + + // If you want to access a local Resource Manager emulator (after creating and starting the + // LocalResourceManagerHelper), use the following code instead: + // ResourceManager resourceManager = LocalResourceManagerHelper.options().service(); + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + args = args.length > 1 ? Arrays.copyOfRange(args, 1, args.length) : new String[] {}; + if (args.length < action.getRequiredParams().length) { + StringBuilder usage = new StringBuilder(); + usage.append("Usage: "); + addUsage(actionName, action, usage); + System.out.println(usage); + } else { + action.run(resourceManager, args); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/GetOrCreateProject.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/GetOrCreateProject.java new file mode 100644 index 000000000000..5a298107cc60 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/GetOrCreateProject.java @@ -0,0 +1,49 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.resourcemanager.snippets; + +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +/** + * A snippet for Google Cloud Resource Manager showing how to create a project if it does not exist. + */ +public class GetOrCreateProject { + + public static void main(String... args) { + // Create Resource Manager service object. + // By default, credentials are inferred from the runtime environment. + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + + String projectId = "my-globally-unique-project-id"; // Change to a unique project ID. + // Get a project from the server. + Project project = resourceManager.get(projectId); + if (project == null) { + // Create a project. + project = resourceManager.create(ProjectInfo.builder(projectId).build()); + } + System.out.println("Got project " + project.projectId() + " from the server."); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java new file mode 100644 index 000000000000..b194de0815d5 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java @@ -0,0 +1,62 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.resourcemanager.snippets; + +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Iterator; + +/** + * A snippet for Google Cloud Resource Manager showing how to update a project and list all projects + * the user has permission to view. + */ +public class UpdateAndListProjects { + + public static void main(String... args) { + // Create Resource Manager service object + // By default, credentials are inferred from the runtime environment. + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + + // Get a project from the server + Project project = resourceManager.get("some-project-id"); // Use an existing project's ID + + // Update a project + if (project != null) { + Project newProject = project.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); + System.out.println("Updated the labels of project " + newProject.projectId() + + " to be " + newProject.labels()); + } + + // List all the projects you have permission to view. + Iterator projectIterator = resourceManager.list().iterateAll(); + System.out.println("Projects I can view:"); + while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java similarity index 86% rename from gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java rename to gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java index fb207023203f..a7260134202d 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java @@ -14,25 +14,23 @@ * limitations under the License. */ -package com.google.gcloud.examples; +package com.google.gcloud.examples.storage; import com.google.gcloud.AuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; -import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BlobReadChannel; -import com.google.gcloud.storage.CopyWriter; -import com.google.gcloud.storage.BlobWriteChannel; import com.google.gcloud.storage.Bucket; -import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.CopyWriter; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.Storage.ComposeRequest; import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; import com.google.gcloud.storage.StorageOptions; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.FileOutputStream; import java.io.IOException; @@ -52,33 +50,39 @@ import java.security.cert.CertificateException; import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** - * An example of using the Google Cloud Storage. - *

    - * This example demonstrates a simple/typical storage usage. - *

    - * Steps needed for running the example: + * An example of using Google Cloud Storage. + * + *

    This example demonstrates a simple/typical storage usage. + * + *

    Steps needed for running the example: *

      *
    1. login using gcloud SDK - {@code gcloud auth login}.
    2. *
    3. compile using maven - {@code mvn compile}
    4. *
    5. run using maven - - * {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" - * -Dexec.args="[] list []| info [ []]| - * download [local_file]| upload []| - * delete +| cp | - * compose + | update_metadata [key=value]*| - * sign_url "} + *
      {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.storage.StorageExample"
      + *  -Dexec.args="[]
      + *  list [] |
      + *  info [ []] |
      + *  download   [local_file] |
      + *  upload   [] |
      + *  delete  + |
      + *  cp     |
      + *  compose  +  |
      + *  update_metadata   [key=value]* |
      + *  sign_url    "}
      *
    6. *
    * - * The first parameter is an optional project_id (logged-in project will be used if not supplied). - * Second parameter is a Storage operation (list, delete, compose,...) to demonstrate the its - * usage. Any other arguments are specific to the operation. - * See each action's run method for the specific Storage interaction. + *

    The first parameter is an optional {@code project_id} (logged-in project will be used if not + * supplied). Second parameter is a Storage operation (list, delete, compose,...) and can be used to + * demonstrate its usage. Any other arguments are specific to the operation. See each action's run + * method for the specific Storage interaction. */ public class StorageExample { @@ -128,27 +132,27 @@ public void run(Storage storage, BlobId... blobIds) { if (blobIds.length == 1) { if (blobIds[0].name().isEmpty()) { // get Bucket - Bucket bucket = Bucket.load(storage, blobIds[0].bucket()); + Bucket bucket = storage.get(blobIds[0].bucket()); if (bucket == null) { System.out.println("No such bucket"); return; } - System.out.println("Bucket info: " + bucket.info()); + System.out.println("Bucket info: " + bucket); } else { // get Blob - Blob blob = Blob.load(storage, blobIds[0]); + Blob blob = storage.get(blobIds[0]); if (blob == null) { System.out.println("No such object"); return; } - System.out.println("Blob info: " + blob.info()); + System.out.println("Blob info: " + blob); } } else { // use batch to get multiple blobs. - List blobs = Blob.get(storage, blobIds); + List blobs = storage.get(blobIds); for (Blob blob : blobs) { if (blob != null) { - System.out.println(blob.info()); + System.out.println(blob); } } } @@ -179,7 +183,7 @@ private static class DeleteAction extends BlobsAction { @Override public void run(Storage storage, BlobId... blobIds) { // use batch operation - List deleteResults = Blob.delete(storage, blobIds); + List deleteResults = storage.delete(blobIds); int index = 0; for (Boolean deleted : deleteResults) { if (deleted) { @@ -213,18 +217,20 @@ String parse(String... args) { public void run(Storage storage, String bucketName) { if (bucketName == null) { // list buckets - for (BucketInfo b : storage.list()) { - System.out.println(b); + Iterator bucketIterator = storage.list().iterateAll(); + while (bucketIterator.hasNext()) { + System.out.println(bucketIterator.next()); } } else { // list a bucket's blobs - Bucket bucket = Bucket.load(storage, bucketName); + Bucket bucket = storage.get(bucketName); if (bucket == null) { System.out.println("No such bucket"); return; } - for (Blob b : bucket.list()) { - System.out.println(b.info()); + Iterator blobIterator = bucket.list().iterateAll(); + while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); } } } @@ -250,8 +256,7 @@ private void run(Storage storage, Path uploadFrom, BlobInfo blobInfo) throws IOE if (Files.size(uploadFrom) > 1_000_000) { // When content is not available or large (1MB or more) it is recommended // to write it in chunks via the blob's channel writer. - Blob blob = new Blob(storage, blobInfo); - try (BlobWriteChannel writer = blob.writer()) { + try (WriteChannel writer = storage.writer(blobInfo)) { byte[] buffer = new byte[1024]; try (InputStream input = Files.newInputStream(uploadFrom)) { int limit; @@ -304,7 +309,7 @@ public void run(Storage storage, Tuple tuple) throws IOException { } private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOException { - Blob blob = Blob.load(storage, blobId); + Blob blob = storage.get(blobId); if (blob == null) { System.out.println("No such object"); return; @@ -313,13 +318,13 @@ private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOExcep if (downloadTo != null) { writeTo = new PrintStream(new FileOutputStream(downloadTo.toFile())); } - if (blob.info().size() < 1_000_000) { + if (blob.size() < 1_000_000) { // Blob is small read all its content in one request byte[] content = blob.content(); writeTo.write(content); } else { // When Blob size is big or unknown use the blob's channel reader. - try (BlobReadChannel reader = blob.reader()) { + try (ReadChannel reader = blob.reader()) { WritableByteChannel channel = Channels.newChannel(writeTo); ByteBuffer bytes = ByteBuffer.allocate(64 * 1024); while (reader.read(bytes) > 0) { @@ -431,13 +436,13 @@ public void run(Storage storage, Tuple> tuple) } private void run(Storage storage, BlobId blobId, Map metadata) { - Blob blob = Blob.load(storage, blobId); + Blob blob = storage.get(blobId); if (blob == null) { System.out.println("No such object"); return; } - Blob updateBlob = blob.update(blob.info().toBuilder().metadata(metadata).build()); - System.out.println("Updated " + updateBlob.info()); + Blob updateBlob = blob.toBuilder().metadata(metadata).build().update(); + System.out.println("Updated " + updateBlob); } @Override @@ -481,9 +486,8 @@ public void run(Storage storage, Tuple run(storage, tuple.x(), tuple.y()); } - private void run(Storage storage, ServiceAccountAuthCredentials cred, BlobInfo blobInfo) - throws IOException { - Blob blob = new Blob(storage, blobInfo); + private void run(Storage storage, ServiceAccountAuthCredentials cred, BlobInfo blobInfo) { + Blob blob = storage.get(blobInfo.blobId()); System.out.println("Signed URL: " + blob.signUrl(1, TimeUnit.DAYS, SignUrlOption.serviceAccount(cred))); } @@ -520,7 +524,7 @@ public String params() { ACTIONS.put("sign_url", new SignUrlAction()); } - public static void printUsage() { + private static void printUsage() { StringBuilder actionAndParams = new StringBuilder(); for (Map.Entry entry : ACTIONS.entrySet()) { actionAndParams.append("\n\t").append(entry.getKey()); @@ -541,8 +545,7 @@ public static void main(String... args) throws Exception { printUsage(); return; } - StorageOptions.Builder optionsBuilder = - StorageOptions.builder().retryParams(RetryParams.getDefaultInstance()); + StorageOptions.Builder optionsBuilder = StorageOptions.builder(); StorageAction action; String actionName; if (args.length >= 2 && !ACTIONS.containsKey(args[0])) { diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateAndListBucketsAndBlobs.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateAndListBucketsAndBlobs.java new file mode 100644 index 000000000000..435cc90b03d8 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateAndListBucketsAndBlobs.java @@ -0,0 +1,70 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.storage.snippets; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.Bucket; +import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +import java.util.Iterator; + +/** + * A snippet for Google Cloud Storage showing how to create a bucket and a blob in it. The snippet + * also shows how to get a blob's content, list buckets and list blobs. + */ +public class CreateAndListBucketsAndBlobs { + + public static void main(String... args) { + // Create a service object + // Credentials are inferred from the environment. + Storage storage = StorageOptions.defaultInstance().service(); + + // Create a bucket + String bucketName = "my_unique_bucket"; // Change this to something unique + Bucket bucket = storage.create(BucketInfo.of(bucketName)); + + // Upload a blob to the newly created bucket + Blob blob = bucket.create("my_blob_name", "a simple blob".getBytes(UTF_8), "text/plain"); + + // Read the blob content from the server + String blobContent = new String(blob.content(), UTF_8); + + // List all your buckets + Iterator bucketIterator = storage.list().iterateAll(); + System.out.println("My buckets:"); + while (bucketIterator.hasNext()) { + System.out.println(bucketIterator.next()); + } + + // List the blobs in a particular bucket + Iterator blobIterator = bucket.list().iterateAll(); + System.out.println("My blobs:"); + while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateBlob.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateBlob.java new file mode 100644 index 000000000000..2c1304a478ab --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateBlob.java @@ -0,0 +1,44 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.storage.snippets; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +/** + * A snippet for Google Cloud Storage showing how to create a blob. + */ +public class CreateBlob { + + public static void main(String... args) { + Storage storage = StorageOptions.defaultInstance().service(); + BlobId blobId = BlobId.of("bucket", "blob_name"); + BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); + Blob blob = storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/UpdateBlob.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/UpdateBlob.java new file mode 100644 index 000000000000..13290b201787 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/UpdateBlob.java @@ -0,0 +1,53 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * EDITING INSTRUCTIONS + * This file is referenced in READMEs and javadoc. Any change to this file should be reflected in + * the project's READMEs and package-info.java. + */ + +package com.google.gcloud.examples.storage.snippets; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; + +/** + * A snippet for Google Cloud Storage showing how to update the blob's content if the blob exists. + */ +public class UpdateBlob { + + public static void main(String... args) throws IOException { + Storage storage = StorageOptions.defaultInstance().service(); + BlobId blobId = BlobId.of("bucket", "blob_name"); + Blob blob = storage.get(blobId); + if (blob != null) { + byte[] prevContent = blob.content(); + System.out.println(new String(prevContent, UTF_8)); + WritableByteChannel channel = blob.writer(); + channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); + channel.close(); + } + } +} diff --git a/gcloud-java-resourcemanager/README.md b/gcloud-java-resourcemanager/README.md new file mode 100644 index 000000000000..94037e27a709 --- /dev/null +++ b/gcloud-java-resourcemanager/README.md @@ -0,0 +1,218 @@ +Google Cloud Java Client for Resource Manager (Alpha) +============================================= + +Java idiomatic client for [Google Cloud Resource Manager] (https://cloud.google.com/resource-manager/). + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) +[![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-resourcemanager.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-resourcemanager.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) + +- [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) +- [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html) + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-resourcemanager + 0.1.5 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-resourcemanager:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-resourcemanager" % "0.1.5" +``` + +Example Application +-------------------- +[`ResourceManagerExample`](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/ResourceManagerExample.java) is a simple command line interface for the Cloud Resource Manager. Read more about using the application on the [`ResourceManagerExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/resourcemanager/ResourceManagerExample.html). + +Authentication +-------------- + +Unlike other `gcloud-java` service libraries, `gcloud-java-resourcemanager` only accepts Google Cloud SDK credentials at this time. If you are having trouble authenticating, it may be that you have other types of credentials that override your Google Cloud SDK credentials. See more about Google Cloud SDK credentials and credential precedence in the global README's [Authentication section](https://github.com/GoogleCloudPlatform/gcloud-java#authentication). + +About Google Cloud Resource Manager +----------------------------------- + +Google [Cloud Resource Manager][cloud-resourcemanager] provides a programmatic way to manage your Google Cloud Platform projects. With this API, you can do the following: + +* Get a list of all projects associated with an account. +* Create new projects. +* Update existing projects. +* Delete projects. +* Undelete projects that you don't want to delete. + +Google Cloud Resource Manager is currently in beta and may occasionally make backwards incompatible changes. + +Be sure to activate the Google Cloud Resource Manager API on the Developer's Console to use Resource Manager from your project. + +See the ``gcloud-java`` API [Resource Manager documentation][resourcemanager-api] to learn how to interact +with the Cloud Resource Manager using this client Library. + +Getting Started +--------------- +#### Prerequisites +You will need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following command in command line: `gcloud auth login`. + +> Note: You don't need a project ID to use this service. If you have a project ID set in the Google Cloud SDK, you can unset it by typing `gcloud config unset project` in command line. + +#### Installation and setup +You'll need to obtain the `gcloud-java-resourcemanager` library. See the [Quickstart](#quickstart) section to add `gcloud-java-resourcemanager` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Resource Manager, you must create a service object +with Google Cloud SDK credentials. You can then make API calls by calling methods on the Resource +Manager service object. The simplest way to authenticate is to use +[Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). +These credentials are automatically inferred from your environment, so you only need the following +code to create your service object: + +```java +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); +``` + +#### Getting a specific project +You can load a project if you know it's project ID and have read permissions to the project. +To get a project, add the following import at the top of your file: + +```java +import com.google.gcloud.resourcemanager.Project; +``` + +Then use the following code to get the project: + +```java +String projectId = "my-globally-unique-project-id"; // Change to a unique project ID +Project project = resourceManager.get(projectId); +``` + +#### Creating a project +All you need to create a project is a globally unique project ID. You can also optionally attach a +non-unique name and labels to your project. Read more about naming guidelines for project IDs, +names, and labels [here](https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects). +To create a project, add the following imports at the top of your file: + +```java +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +``` + +Then add the following code to create a project (be sure to change `projectId` to your own unique +project ID). + +```java +String projectId = "my-globally-unique-project-id"; // Change to a unique project ID +Project project = resourceManager.create(ProjectInfo.builder(projectId).build()); +``` + +Note that the return value from `create` is a `Project` that includes additional read-only +information, like creation time, project number, and lifecycle state. Read more about these fields +on the [Projects page](https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects). +`Project`, a subclass of `ProjectInfo`, adds a layer of service-related functionality over +`ProjectInfo`. + +#### Editing a project +To edit a project, create a new `ProjectInfo` object and pass it in to the `Project.replace` method. +For example, to add a label to a project to denote that it's launch status is "in development", add +the following code: + +```java +Project newProject = project.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); +``` + +Note that the values of the project you pass in to `replace` overwrite the server's values for +non-read-only fields, namely `projectName` and `labels`. For example, if you create a project with +`projectName` "some-project-name" and subsequently call replace using a `ProjectInfo` object that +didn't set the `projectName`, then the server will unset the project's name. The server ignores any +attempted changes to the read-only fields `projectNumber`, `lifecycleState`, and `createTime`. +The `projectId` cannot change. + +#### Listing all projects +Suppose that we want a list of all projects for which we have read permissions. Add the following +import: + +```java +import java.util.Iterator; +``` + +Then add the following code to print a list of projects you can view: + +```java +Iterator projectIterator = resourceManager.list().iterateAll(); +System.out.println("Projects I can view:"); +while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); +} +``` + +#### Complete source code + +We put together all the code shown above into two programs. Both programs assume that you are +running from your own desktop and used the Google Cloud SDK to authenticate yourself. + +The first program creates a project if it does not exist. Complete source code can be found at +[GetOrCreateProject.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/GetOrCreateProject.java). + +The second program updates a project if it exists and lists all projects the user has permission to +view. Complete source code can be found at +[UpdateAndListProjects.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/resourcemanager/snippets/UpdateAndListProjects.java). + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Testing +------- + +This library has tools to help write tests for code that uses Resource Manager. + +See [TESTING] to read more about testing. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING] for more information on how to get started. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-resource-manager +[cloud-platform]: https://cloud.google.com/ +[cloud-resourcemanager]: https://cloud.google.com/resource-manager/docs +[resourcemanager-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html + diff --git a/gcloud-java-resourcemanager/pom.xml b/gcloud-java-resourcemanager/pom.xml new file mode 100644 index 000000000000..c10691d3b07d --- /dev/null +++ b/gcloud-java-resourcemanager/pom.xml @@ -0,0 +1,49 @@ + + + 4.0.0 + gcloud-java-resourcemanager + jar + GCloud Java resource manager + + Java idiomatic client for Google Cloud Resource Manager. + + + com.google.gcloud + gcloud-java-pom + 0.1.6-SNAPSHOT + + + gcloud-java-resourcemanager + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + com.google.apis + google-api-services-cloudresourcemanager + v1beta1-rev10-1.21.0 + compile + + + com.google.guava + guava-jdk5 + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.4 + test + + + diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java new file mode 100644 index 000000000000..72d62d7fc224 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java @@ -0,0 +1,72 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Resource Manager operation options. + */ +class Option implements Serializable { + + private static final long serialVersionUID = 2655177550880762967L; + + private final ResourceManagerRpc.Option rpcOption; + private final Object value; + + Option(ResourceManagerRpc.Option rpcOption, Object value) { + this.rpcOption = checkNotNull(rpcOption); + this.value = value; + } + + ResourceManagerRpc.Option rpcOption() { + return rpcOption; + } + + Object value() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Option)) { + return false; + } + Option other = (Option) obj; + return Objects.equals(rpcOption, other.rpcOption) + && Objects.equals(value, other.value); + } + + @Override + public int hashCode() { + return Objects.hash(rpcOption, value); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", rpcOption.value()) + .add("value", value) + .toString(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java new file mode 100644 index 000000000000..46330e19fa59 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Policy.java @@ -0,0 +1,226 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.gcloud.IamPolicy; +import com.google.gcloud.Identity; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * An Identity and Access Management (IAM) policy for a project. IAM policies are used to specify + * access settings for Cloud Platform resources. A policy is a map of bindings. A binding assigns + * a set of identities to a role, where the identities can be user accounts, Google groups, Google + * domains, and service accounts. A role is a named list of permissions defined by IAM. Policies set + * at the project level control access both to the project and to resources associated with the + * project. + * + * @see Policy + */ +public class Policy extends IamPolicy { + + private static final long serialVersionUID = -5573557282693961850L; + + /** + * Represents legacy roles in an IAM Policy. + */ + public static class Role implements Serializable { + + /** + * The recognized roles in a Project's IAM policy. + */ + public enum Type { + + /** + * Permissions for read-only actions that preserve state. + */ + VIEWER, + + /** + * All viewer permissions and permissions for actions that modify state. + */ + EDITOR, + + /** + * All editor permissions and permissions for the following actions: + *

      + *
    • Manage access control for a resource. + *
    • Set up billing (for a project). + *
    + */ + OWNER + } + + private static final long serialVersionUID = 2421978909244287488L; + + private final String value; + private final Type type; + + private Role(String value, Type type) { + this.value = value; + this.type = type; + } + + String value() { + return value; + } + + /** + * Returns the type of role (editor, owner, or viewer). Returns {@code null} if the role type + * is unrecognized. + */ + public Type type() { + return type; + } + + /** + * Returns a {@code Role} of type {@link Type#VIEWER VIEWER}. + */ + public static Role viewer() { + return new Role("roles/viewer", Type.VIEWER); + } + + /** + * Returns a {@code Role} of type {@link Type#EDITOR EDITOR}. + */ + public static Role editor() { + return new Role("roles/editor", Type.EDITOR); + } + + /** + * Returns a {@code Role} of type {@link Type#OWNER OWNER}. + */ + public static Role owner() { + return new Role("roles/owner", Type.OWNER); + } + + static Role rawRole(String roleStr) { + return new Role(roleStr, null); + } + + static Role fromStr(String roleStr) { + try { + Type type = Type.valueOf(roleStr.split("/")[1].toUpperCase()); + return new Role(roleStr, type); + } catch (Exception ex) { + return new Role(roleStr, null); + } + } + + @Override + public final int hashCode() { + return Objects.hash(value, type); + } + + @Override + public final boolean equals(Object obj) { + if (!(obj instanceof Role)) { + return false; + } + Role other = (Role) obj; + return Objects.equals(value, other.value()) && Objects.equals(type, other.type()); + } + } + + /** + * Builder for an IAM Policy. + */ + public static class Builder extends IamPolicy.Builder { + + private Builder() {} + + @VisibleForTesting + Builder(Map> bindings, String etag, Integer version) { + bindings(bindings).etag(etag).version(version); + } + + @Override + public Policy build() { + return new Policy(this); + } + } + + private Policy(Builder builder) { + super(builder); + } + + public static Builder builder() { + return new Builder(); + } + + @Override + public Builder toBuilder() { + return new Builder(bindings(), etag(), version()); + } + + com.google.api.services.cloudresourcemanager.model.Policy toPb() { + com.google.api.services.cloudresourcemanager.model.Policy policyPb = + new com.google.api.services.cloudresourcemanager.model.Policy(); + List bindingPbList = + new LinkedList<>(); + for (Map.Entry> binding : bindings().entrySet()) { + com.google.api.services.cloudresourcemanager.model.Binding bindingPb = + new com.google.api.services.cloudresourcemanager.model.Binding(); + bindingPb.setRole(binding.getKey().value()); + bindingPb.setMembers( + Lists.transform( + new ArrayList<>(binding.getValue()), + new Function() { + @Override + public String apply(Identity identity) { + return identity.strValue(); + } + })); + bindingPbList.add(bindingPb); + } + policyPb.setBindings(bindingPbList); + policyPb.setEtag(etag()); + policyPb.setVersion(version()); + return policyPb; + } + + static Policy fromPb( + com.google.api.services.cloudresourcemanager.model.Policy policyPb) { + Map> bindings = new HashMap<>(); + for (com.google.api.services.cloudresourcemanager.model.Binding bindingPb : + policyPb.getBindings()) { + bindings.put( + Role.fromStr(bindingPb.getRole()), + ImmutableSet.copyOf( + Lists.transform( + bindingPb.getMembers(), + new Function() { + @Override + public Identity apply(String identityPb) { + return Identity.valueOf(identityPb); + } + }))); + } + return new Policy.Builder(bindings, policyPb.getEtag(), policyPb.getVersion()).build(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java new file mode 100644 index 000000000000..46b142c5aa53 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java @@ -0,0 +1,227 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkNotNull; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.Map; +import java.util.Objects; + +/** + * A Google Cloud Resource Manager project object. + * + *

    A Project is a high-level Google Cloud Platform entity. It is a container for ACLs, APIs, + * AppEngine Apps, VMs, and other Google Cloud Platform resources. This class' member variables are + * immutable. Methods that change or update the underlying Project information return a new Project + * instance. {@code Project} adds a layer of service-related functionality over {@link ProjectInfo}. + */ +public class Project extends ProjectInfo { + + private static final long serialVersionUID = 6767630161335155133L; + + private final ResourceManagerOptions options; + private transient ResourceManager resourceManager; + + /** + * Builder for {@code Project}. + */ + public static class Builder extends ProjectInfo.Builder { + private final ResourceManager resourceManager; + private final ProjectInfo.BuilderImpl infoBuilder; + + Builder(Project project) { + this.resourceManager = project.resourceManager; + this.infoBuilder = new ProjectInfo.BuilderImpl(project); + } + + @Override + public Builder name(String name) { + infoBuilder.name(name); + return this; + } + + @Override + public Builder projectId(String projectId) { + infoBuilder.projectId(projectId); + return this; + } + + @Override + public Builder addLabel(String key, String value) { + infoBuilder.addLabel(key, value); + return this; + } + + @Override + public Builder removeLabel(String key) { + infoBuilder.removeLabel(key); + return this; + } + + @Override + public Builder clearLabels() { + infoBuilder.clearLabels(); + return this; + } + + @Override + public Builder labels(Map labels) { + infoBuilder.labels(labels); + return this; + } + + @Override + Builder projectNumber(Long projectNumber) { + infoBuilder.projectNumber(projectNumber); + return this; + } + + @Override + Builder state(State state) { + infoBuilder.state(state); + return this; + } + + @Override + Builder createTimeMillis(Long createTimeMillis) { + infoBuilder.createTimeMillis(createTimeMillis); + return this; + } + + @Override + Builder parent(ResourceId parent) { + infoBuilder.parent(parent); + return this; + } + + @Override + public Project build() { + return new Project(resourceManager, infoBuilder); + } + } + + Project(ResourceManager resourceManager, ProjectInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.resourceManager = checkNotNull(resourceManager); + this.options = resourceManager.options(); + } + + /** + * Returns the {@link ResourceManager} service object associated with this Project. + */ + public ResourceManager resourceManager() { + return resourceManager; + } + + /** + * Fetches the project's latest information. Returns {@code null} if the project does not + * exist. + * + * @return Project containing the project's updated metadata or {@code null} if not found + * @throws ResourceManagerException upon failure + */ + public Project reload() { + return resourceManager.get(projectId()); + } + + /** + * Marks the project identified by the specified project ID for deletion. + * + *

    This method will only affect the project if the following criteria are met: + *

      + *
    • The project does not have a billing account associated with it. + *
    • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
    + * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to + * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * the deletion completes, you can check the lifecycle state checked by retrieving the project + * with {@link ResourceManager#get}, and the project remains visible to + * {@link ResourceManager#list}. However, you cannot update the project. After the deletion + * completes, the project is not retrievable by the {@link ResourceManager#get} and + * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. + * + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager delete + */ + public void delete() { + resourceManager.delete(projectId()); + } + + /** + * Restores the project identified by the specified project ID. + * + *

    You can only use this method for a project that has a lifecycle state of + * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * caller must have modify permissions for this project. + * + * @throws ResourceManagerException upon failure (including when the project can't be restored) + * @see Cloud + * Resource Manager undelete + */ + public void undelete() { + resourceManager.undelete(projectId()); + } + + /** + * Replaces the attributes of the project with the attributes of this project. + * + *

    The caller must have modify permissions for this project. + * + * @return the Project representing the new project metadata + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager update + */ + public Project replace() { + return resourceManager.replace(this); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Project && Objects.equals(toPb(), ((Project) obj).toPb()) + && Objects.equals(options, ((Project) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.resourceManager = options.service(); + } + + static Project fromPb(ResourceManager resourceManager, + com.google.api.services.cloudresourcemanager.model.Project answer) { + ProjectInfo info = ProjectInfo.fromPb(answer); + return new Project(resourceManager, new ProjectInfo.BuilderImpl(info)); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java new file mode 100644 index 000000000000..260e8a8e2f26 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java @@ -0,0 +1,394 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +import org.joda.time.DateTime; +import org.joda.time.format.ISODateTimeFormat; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * A Google Cloud Resource Manager project metadata object. + * A Project is a high-level Google Cloud Platform entity. It is a container for ACLs, APIs, + * AppEngine Apps, VMs, and other Google Cloud Platform resources. + */ +public class ProjectInfo implements Serializable { + + private static final long serialVersionUID = 9148970963697734236L; + private final String name; + private final String projectId; + private final Map labels; + private final Long projectNumber; + private final State state; + private final Long createTimeMillis; + private final ResourceId parent; + + /** + * The project lifecycle states. + */ + public enum State { + /** + * Only used/useful for distinguishing unset values. + */ + LIFECYCLE_STATE_UNSPECIFIED, + + /** + * The normal and active state. + */ + ACTIVE, + + /** + * The project has been marked for deletion by the user or by the system (Google Cloud + * Platform). This can generally be reversed by calling {@link ResourceManager#undelete}. + */ + DELETE_REQUESTED, + + /** + * The process of deleting the project has begun. Reversing the deletion is no longer possible. + */ + DELETE_IN_PROGRESS + } + + static class ResourceId implements Serializable { + + private static final long serialVersionUID = -325199985993344726L; + + private final String id; + private final String type; + + ResourceId(String id, String type) { + this.id = checkNotNull(id); + this.type = checkNotNull(type); + } + + String id() { + return id; + } + + String type() { + return type; + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ResourceId && Objects.equals(toPb(), ((ResourceId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(id, type); + } + + com.google.api.services.cloudresourcemanager.model.ResourceId toPb() { + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb = + new com.google.api.services.cloudresourcemanager.model.ResourceId(); + resourceIdPb.setId(id); + resourceIdPb.setType(type.toLowerCase()); + return resourceIdPb; + } + + static ResourceId fromPb( + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb) { + return new ResourceId(resourceIdPb.getId(), resourceIdPb.getType()); + } + } + + /** + * Builder for {@code ProjectInfo}. + */ + public abstract static class Builder { + + /** + * Set the user-assigned name of the project. + * + *

    This field is optional and can remain unset. Allowed characters are: lowercase and + * uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point. + * This field can be changed after project creation. + */ + public abstract Builder name(String name); + + /** + * Set the unique, user-assigned ID of the project. + * + *

    The ID must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. + * Trailing hyphens are prohibited. This field cannot be changed after the server creates the + * project. + */ + public abstract Builder projectId(String projectId); + + /** + * Add a label associated with this project. + * + *

    See {@link #labels} for label restrictions. + */ + public abstract Builder addLabel(String key, String value); + + /** + * Remove a label associated with this project. + */ + public abstract Builder removeLabel(String key); + + /** + * Clear the labels associated with this project. + */ + public abstract Builder clearLabels(); + + /** + * Set the labels associated with this project. + * + *

    Label keys must be between 1 and 63 characters long and must conform to the following + * regular expression: [a-z]([-a-z0-9]*[a-z0-9])?. Label values must be between 0 and 63 + * characters long and must conform to the regular expression ([a-z]([-a-z0-9]*[a-z0-9])?)?. No + * more than 256 labels can be associated with a given resource. This field can be changed after + * project creation. + */ + public abstract Builder labels(Map labels); + + abstract Builder projectNumber(Long projectNumber); + + abstract Builder state(State state); + + abstract Builder createTimeMillis(Long createTimeMillis); + + abstract Builder parent(ResourceId parent); + + public abstract ProjectInfo build(); + } + + static class BuilderImpl extends Builder { + + private String name; + private String projectId; + private Map labels = new HashMap<>(); + private Long projectNumber; + private State state; + private Long createTimeMillis; + private ResourceId parent; + + BuilderImpl(String projectId) { + this.projectId = projectId; + } + + BuilderImpl(ProjectInfo info) { + this.name = info.name; + this.projectId = info.projectId; + this.labels.putAll(info.labels); + this.projectNumber = info.projectNumber; + this.state = info.state; + this.createTimeMillis = info.createTimeMillis; + this.parent = info.parent; + } + + @Override + public Builder name(String name) { + this.name = firstNonNull(name, Data.nullOf(String.class)); + return this; + } + + @Override + public Builder projectId(String projectId) { + this.projectId = checkNotNull(projectId); + return this; + } + + @Override + public Builder addLabel(String key, String value) { + this.labels.put(key, value); + return this; + } + + @Override + public Builder removeLabel(String key) { + this.labels.remove(key); + return this; + } + + @Override + public Builder clearLabels() { + this.labels.clear(); + return this; + } + + @Override + public Builder labels(Map labels) { + this.labels = Maps.newHashMap(checkNotNull(labels)); + return this; + } + + @Override + Builder projectNumber(Long projectNumber) { + this.projectNumber = projectNumber; + return this; + } + + @Override + Builder state(State state) { + this.state = state; + return this; + } + + @Override + Builder createTimeMillis(Long createTimeMillis) { + this.createTimeMillis = createTimeMillis; + return this; + } + + @Override + Builder parent(ResourceId parent) { + this.parent = parent; + return this; + } + + @Override + public ProjectInfo build() { + return new ProjectInfo(this); + } + } + + ProjectInfo(BuilderImpl builder) { + this.name = builder.name; + this.projectId = builder.projectId; + this.labels = ImmutableMap.copyOf(builder.labels); + this.projectNumber = builder.projectNumber; + this.state = builder.state; + this.createTimeMillis = builder.createTimeMillis; + this.parent = builder.parent; + } + + /** + * Get the unique, user-assigned ID of the project. + * + *

    This field cannot be changed after the server creates the project. + */ + public String projectId() { + return projectId; + } + + /** + * Get the user-assigned name of the project. + * + *

    This field is optional, can remain unset, and can be changed after project creation. + */ + public String name() { + return Data.isNull(name) ? null : name; + } + + /** + * Get number uniquely identifying the project. + * + *

    This field is set by the server and is read-only. + */ + public Long projectNumber() { + return projectNumber; + } + + /** + * Get the immutable map of labels associated with this project. + */ + public Map labels() { + return labels; + } + + /** + * Get the project's lifecycle state. + * + *

    This is a read-only field. To change the lifecycle state of your project, use the + * {@code delete} or {@code undelete} method. + */ + public State state() { + return state; + } + + ResourceId parent() { + return parent; + } + + /** + * Get the project's creation time (in milliseconds). + * + *

    This field is set by the server and is read-only. + */ + public Long createTimeMillis() { + return createTimeMillis; + } + + @Override + public boolean equals(Object obj) { + return obj != null && obj.getClass().equals(ProjectInfo.class) + && Objects.equals(toPb(), ((ProjectInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(name, projectId, labels, projectNumber, state, createTimeMillis, parent); + } + + public static Builder builder(String id) { + return new BuilderImpl(id); + } + + public Builder toBuilder() { + return new BuilderImpl(this); + } + + com.google.api.services.cloudresourcemanager.model.Project toPb() { + com.google.api.services.cloudresourcemanager.model.Project projectPb = + new com.google.api.services.cloudresourcemanager.model.Project(); + projectPb.setName(name); + projectPb.setProjectId(projectId); + projectPb.setLabels(labels); + projectPb.setProjectNumber(projectNumber); + if (state != null) { + projectPb.setLifecycleState(state.toString()); + } + if (createTimeMillis != null) { + projectPb.setCreateTime(ISODateTimeFormat.dateTime().withZoneUTC().print(createTimeMillis)); + } + if (parent != null) { + projectPb.setParent(parent.toPb()); + } + return projectPb; + } + + static ProjectInfo fromPb(com.google.api.services.cloudresourcemanager.model.Project projectPb) { + Builder builder = builder(projectPb.getProjectId()).projectNumber(projectPb.getProjectNumber()); + if (projectPb.getName() != null && !projectPb.getName().equals("Unnamed")) { + builder.name(projectPb.getName()); + } + if (projectPb.getLabels() != null) { + builder.labels(projectPb.getLabels()); + } + if (projectPb.getLifecycleState() != null) { + builder.state(State.valueOf(projectPb.getLifecycleState())); + } + if (projectPb.getCreateTime() != null) { + builder.createTimeMillis(DateTime.parse(projectPb.getCreateTime()).getMillis()); + } + if (projectPb.getParent() != null) { + builder.parent(ResourceId.fromPb(projectPb.getParent())); + } + return builder.build(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java new file mode 100644 index 000000000000..f14d47f2a676 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java @@ -0,0 +1,387 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.base.Joiner; +import com.google.common.collect.Sets; +import com.google.gcloud.IamPolicy; +import com.google.gcloud.Page; +import com.google.gcloud.Service; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; + +import java.util.List; +import java.util.Set; + +/** + * An interface for Google Cloud Resource Manager. + * + * @see Google Cloud Resource Manager + */ +public interface ResourceManager extends Service { + + String DEFAULT_CONTENT_TYPE = "application/octet-stream"; + + /** + * The fields of a project. + * + *

    These values can be used to specify the fields to include in a partial response when calling + * {@link ResourceManager#get} or {@link ResourceManager#list}. Project ID is always returned, + * even if not specified. + */ + enum ProjectField { + PROJECT_ID("projectId"), + NAME("name"), + LABELS("labels"), + PROJECT_NUMBER("projectNumber"), + STATE("lifecycleState"), + CREATE_TIME("createTime"); + + private final String selector; + + ProjectField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(ProjectField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(PROJECT_ID.selector()); + for (ProjectField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying project get options. + */ + class ProjectGetOption extends Option { + + private static final long serialVersionUID = 270185129961146874L; + + private ProjectGetOption(ResourceManagerRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the project's fields to be returned by the RPC call. + * + *

    If this option is not provided all project fields are returned. + * {@code ProjectGetOption.fields} can be used to specify only the fields of interest. Project + * ID is always returned, even if not specified. {@link ProjectField} provides a list of fields + * that can be used. + */ + public static ProjectGetOption fields(ProjectField... fields) { + return new ProjectGetOption(ResourceManagerRpc.Option.FIELDS, ProjectField.selector(fields)); + } + } + + /** + * Class for specifying project list options. + */ + class ProjectListOption extends Option { + + private static final long serialVersionUID = 7888768979702012328L; + + private ProjectListOption(ResourceManagerRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify a filter. + * + *

    Filter rules are case insensitive. The fields eligible for filtering are: + *

      + *
    • name + *
    • project ID + *
    • labels.key, where key is the name of a label + *
    + * + *

    You can specify multiple filters by adding a space between each filter. Multiple filters + * are composed using "and". + * + *

    Some examples of filters: + *

      + *
    • name:* The project has a name. + *
    • name:Howl The project's name is Howl or howl. + *
    • name:HOWL Equivalent to above. + *
    • NAME:howl Equivalent to above. + *
    • labels.color:* The project has the label color. + *
    • labels.color:red The project's label color has the value red. + *
    • labels.color:red label.size:big The project's label color has the value red and its + * label size has the value big. + *
    + */ + public static ProjectListOption filter(String filter) { + return new ProjectListOption(ResourceManagerRpc.Option.FILTER, filter); + } + + /** + * Returns an option to specify a page token. + * + *

    The page token (returned from a previous call to list) indicates from where listing should + * continue. + */ + public static ProjectListOption pageToken(String pageToken) { + return new ProjectListOption(ResourceManagerRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * The maximum number of projects to return per RPC. + * + *

    The server can return fewer projects than requested. When there are more results than the + * page size, the server will return a page token that can be used to fetch other results. + */ + public static ProjectListOption pageSize(int pageSize) { + return new ProjectListOption(ResourceManagerRpc.Option.PAGE_SIZE, pageSize); + } + + /** + * Returns an option to specify the project's fields to be returned by the RPC call. + * + *

    If this option is not provided all project fields are returned. + * {@code ProjectListOption.fields} can be used to specify only the fields of interest. Project + * ID is always returned, even if not specified. {@link ProjectField} provides a list of fields + * that can be used. + */ + public static ProjectListOption fields(ProjectField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("projects(").append(ProjectField.selector(fields)).append("),nextPageToken"); + return new ProjectListOption(ResourceManagerRpc.Option.FIELDS, builder.toString()); + } + } + + /** + * The permissions associated with a Google Cloud project. These values can be used when calling + * {@link #testPermissions}. + * + * @see + * Project-level roles + */ + enum Permission { + DELETE("delete"), + GET("get"), + GET_POLICY("getIamPolicy"), + REPLACE("update"), + REPLACE_POLICY("setIamPolicy"), + UNDELETE("undelete"); + + private static final String PREFIX = "resourcemanager.projects."; + + private final String value; + + Permission(String suffix) { + this.value = PREFIX + suffix; + } + + /** + * Returns the string representation of the permission. + */ + public String value() { + return value; + } + } + + /** + * Creates a new project. + * + *

    Initially, the project resource is owned by its creator exclusively. The creator can later + * grant permission to others to read or update the project. Several APIs are activated + * automatically for the project, including Google Cloud Storage. + * + * @return Project object representing the new project's metadata. The returned object will + * include the following read-only fields supplied by the server: project number, lifecycle + * state, and creation time. + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager create + */ + Project create(ProjectInfo project); + + /** + * Marks the project identified by the specified project ID for deletion. + * + *

    This method will only affect the project if the following criteria are met: + *

      + *
    • The project does not have a billing account associated with it. + *
    • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
    + * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to + * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * the deletion completes, you can check the lifecycle state checked by retrieving the project + * with {@link ResourceManager#get}, and the project remains visible to + * {@link ResourceManager#list}. However, you cannot update the project. After the deletion + * completes, the project is not retrievable by the {@link ResourceManager#get} and + * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. + * + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager delete + */ + void delete(String projectId); + + /** + * Retrieves the project identified by the specified project ID. + * + *

    Returns {@code null} if the project is not found or if the user doesn't have read + * permissions for the project. + * + * @throws ResourceManagerException upon failure + * @see + * Cloud Resource Manager get + */ + Project get(String projectId, ProjectGetOption... options); + + /** + * Lists the projects visible to the current user. + * + *

    This method returns projects in an unspecified order. New projects do not necessarily appear + * at the end of the list. Use {@link ProjectListOption} to filter this list, set page size, and + * set page tokens. + * + * @return {@code Page}, a page of projects + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager list + */ + Page list(ProjectListOption... options); + + /** + * Replaces the attributes of the project. + * + *

    The caller must have modify permissions for this project. + * + * @return the Project representing the new project metadata + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager update + */ + Project replace(ProjectInfo newProject); + + /** + * Restores the project identified by the specified project ID. + * + *

    You can only use this method for a project that has a lifecycle state of + * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * caller must have modify permissions for this project. + * + * @throws ResourceManagerException upon failure + * @see Cloud + * Resource Manager undelete + */ + void undelete(String projectId); + + /** + * Returns the IAM access control policy for the specified project. Returns {@code null} if the + * resource does not exist or if you do not have adequate permission to view the project or get + * the policy. + * + * @throws ResourceManagerException upon failure + * @see + * Resource Manager getIamPolicy + */ + Policy getPolicy(String projectId); + + /** + * Sets the IAM access control policy for the specified project. Replaces any existing policy. The + * following constraints apply: + *

      + *
    • Projects currently support only user:{emailid} and serviceAccount:{emailid} + * members in a binding of a policy. + *
    • To be added as an owner, a user must be invited via Cloud Platform console and must accept + * the invitation. + *
    • Members cannot be added to more than one role in the same policy. + *
    • There must be at least one owner who has accepted the Terms of Service (ToS) agreement in + * the policy. An attempt to set a policy that removes the last ToS-accepted owner from the + * policy will fail. + *
    • Calling this method requires enabling the App Engine Admin API. + *
    + * Note: Removing service accounts from policies or changing their roles can render services + * completely inoperable. It is important to understand how the service account is being used + * before removing or updating its roles. + * + *

    It is recommended that you use the read-modify-write pattern. This pattern entails reading + * the project's current policy, updating it locally, and then sending the modified policy for + * writing. Cloud IAM solves the problem of conflicting processes simultaneously attempting to + * modify a policy by using the {@link IamPolicy#etag etag} property. This property is used to + * verify whether the policy has changed since the last request. When you make a request to Cloud + * IAM with an etag value, Cloud IAM compares the etag value in the request with the existing etag + * value associated with the policy. It writes the policy only if the etag values match. If the + * etags don't match, a {@code ResourceManagerException} is thrown, denoting that the server + * aborted update. If an etag is not provided, the policy is overwritten blindly. + * + *

    An example of using the read-write-modify pattern is as follows: + *

     {@code
    +   * Policy currentPolicy = resourceManager.getPolicy("my-project-id");
    +   * Policy modifiedPolicy =
    +   *     current.toBuilder().removeIdentity(Role.VIEWER, Identity.user("user@gmail.com"));
    +   * Policy newPolicy = resourceManager.replacePolicy("my-project-id", modified);
    +   * }
    +   * 
    + * + * @throws ResourceManagerException upon failure + * @see + * Resource Manager setIamPolicy + */ + Policy replacePolicy(String projectId, Policy newPolicy); + + /** + * Returns the permissions that a caller has on the specified project. You typically don't call + * this method if you're using Google Cloud Platform directly to manage permissions. This method + * is intended for integration with your proprietary software, such as a customized graphical user + * interface. For example, the Cloud Platform Console tests IAM permissions internally to + * determine which UI should be available to the logged-in user. + * + * @return A list of booleans representing whether the caller has the permissions specified (in + * the order of the given permissions) + * @throws ResourceManagerException upon failure + * @see + * Resource Manager testIamPermissions + */ + List testPermissions(String projectId, List permissions); + + /** + * Returns the permissions that a caller has on the specified project. You typically don't call + * this method if you're using Google Cloud Platform directly to manage permissions. This method + * is intended for integration with your proprietary software, such as a customized graphical user + * interface. For example, the Cloud Platform Console tests IAM permissions internally to + * determine which UI should be available to the logged-in user. + * + * @return A list of booleans representing whether the caller has the permissions specified (in + * the order of the given permissions) + * @throws ResourceManagerException upon failure + * @see + * Resource Manager testIamPermissions + */ + List testPermissions(String projectId, Permission first, Permission... others); +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java new file mode 100644 index 000000000000..32a2998791c9 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java @@ -0,0 +1,75 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; + +/** + * Resource Manager service exception. + * + * @see Google Cloud + * Resource Manager error codes + */ +public class ResourceManagerException extends BaseServiceException { + + // see https://cloud.google.com/resource-manager/v1/errors/core_errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(503, null), + new Error(500, null), + new Error(429, null), + new Error(403, "concurrentLimitExceeded"), + new Error(403, "limitExceeded"), + new Error(403, "rateLimitExceeded"), + new Error(403, "rateLimitExceededUnreg"), + new Error(403, "servingLimitExceeded"), + new Error(403, "userRateLimitExceeded"), + new Error(403, "userRateLimitExceededUnreg"), + new Error(403, "variableTermLimitExceeded")); + private static final long serialVersionUID = -9207194488966554136L; + + public ResourceManagerException(int code, String message) { + super(code, message, null, true); + } + + public ResourceManagerException(IOException exception) { + super(exception, true); + } + + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; + } + + /** + * Translate RetryHelperException to the ResourceManagerException that caused the error. This + * method will always throw an exception. + * + * @throws ResourceManagerException when {@code ex} was caused by a {@code + * ResourceManagerException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} + */ + static ResourceManagerException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new ResourceManagerException(UNKNOWN_CODE, ex.getMessage()); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java new file mode 100644 index 000000000000..256fc321e4e1 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java @@ -0,0 +1,25 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.gcloud.ServiceFactory; + +/** + * An interface for ResourceManager factories. + */ +public interface ResourceManagerFactory + extends ServiceFactory {} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java new file mode 100644 index 000000000000..d9911b911f0b --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java @@ -0,0 +1,252 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.BaseService; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Tuple; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; + +final class ResourceManagerImpl + extends BaseService implements ResourceManager { + + private final ResourceManagerRpc resourceManagerRpc; + + ResourceManagerImpl(ResourceManagerOptions options) { + super(options); + resourceManagerRpc = options.rpc(); + } + + @Override + public Project create(final ProjectInfo project) { + try { + return Project.fromPb(this, runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.create(project.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public void delete(final String projectId) { + try { + runWithRetries(new Callable() { + @Override + public Void call() { + resourceManagerRpc.delete(projectId); + return null; + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Project get(final String projectId, ProjectGetOption... options) { + final Map optionsMap = optionMap(options); + try { + com.google.api.services.cloudresourcemanager.model.Project answer = runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.get(projectId, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Project.fromPb(this, answer); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + private static class ProjectPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = 2158209410430566961L; + private final Map requestOptions; + private final ResourceManagerOptions serviceOptions; + + ProjectPageFetcher(ResourceManagerOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(ResourceManagerRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listProjects(serviceOptions, requestOptions); + } + } + + @Override + public Page list(ProjectListOption... options) { + return listProjects(options(), optionMap(options)); + } + + private static Page listProjects(final ResourceManagerOptions serviceOptions, + final Map optionsMap) { + try { + Tuple> result = + runWithRetries(new Callable>>() { + @Override + public Tuple> call() { + return serviceOptions.rpc().list(optionsMap); + } + }, + serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable projects = + result.y() == null + ? ImmutableList.of() : Iterables.transform( + result.y(), + new Function() { + @Override + public Project apply( + com.google.api.services.cloudresourcemanager.model.Project projectPb) { + return new Project( + serviceOptions.service(), + new ProjectInfo.BuilderImpl(ProjectInfo.fromPb(projectPb))); + } + }); + return new PageImpl<>( + new ProjectPageFetcher(serviceOptions, cursor, optionsMap), cursor, projects); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Project replace(final ProjectInfo newProject) { + try { + return Project.fromPb(this, runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.replace(newProject.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public void undelete(final String projectId) { + try { + runWithRetries(new Callable() { + @Override + public Void call() { + resourceManagerRpc.undelete(projectId); + return null; + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Policy getPolicy(final String projectId) { + try { + com.google.api.services.cloudresourcemanager.model.Policy answer = + runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Policy call() { + return resourceManagerRpc.getPolicy(projectId); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Policy.fromPb(answer); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public Policy replacePolicy(final String projectId, final Policy newPolicy) { + try { + return Policy.fromPb(runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Policy call() { + return resourceManagerRpc.replacePolicy(projectId, newPolicy.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public List testPermissions(final String projectId, final List permissions) { + try { + return runWithRetries( + new Callable>() { + @Override + public List call() { + return resourceManagerRpc.testPermissions(projectId, + Lists.transform(permissions, new Function() { + @Override + public String apply(Permission permission) { + return permission.value(); + } + })); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException ex) { + throw ResourceManagerException.translateAndThrow(ex); + } + } + + @Override + public List testPermissions(String projectId, Permission first, Permission... others) { + return testPermissions(projectId, Lists.asList(first, others)); + } + + private Map optionMap(Option... options) { + Map temp = Maps.newEnumMap(ResourceManagerRpc.Option.class); + for (Option option : options) { + Object prev = temp.put(option.rpcOption(), option.value()); + checkArgument(prev == null, "Duplicate option %s", option); + } + return ImmutableMap.copyOf(temp); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java new file mode 100644 index 000000000000..c744864147c2 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java @@ -0,0 +1,123 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.ServiceOptions; +import com.google.gcloud.resourcemanager.spi.DefaultResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpcFactory; + +import java.util.Set; + +public class ResourceManagerOptions + extends ServiceOptions { + + private static final long serialVersionUID = 538303101192527452L; + private static final String GCRM_SCOPE = "https://www.googleapis.com/auth/cloud-platform"; + private static final Set SCOPES = ImmutableSet.of(GCRM_SCOPE); + private static final String DEFAULT_HOST = "https://cloudresourcemanager.googleapis.com"; + + public static class DefaultResourceManagerFactory implements ResourceManagerFactory { + private static final ResourceManagerFactory INSTANCE = new DefaultResourceManagerFactory(); + + @Override + public ResourceManager create(ResourceManagerOptions options) { + return new ResourceManagerImpl(options); + } + } + + /** + * Returns a default {@code ResourceManagerOptions} instance. + */ + public static ResourceManagerOptions defaultInstance() { + return builder().build(); + } + + public static class DefaultResourceManagerRpcFactory implements ResourceManagerRpcFactory { + private static final ResourceManagerRpcFactory INSTANCE = + new DefaultResourceManagerRpcFactory(); + + @Override + public ResourceManagerRpc create(ResourceManagerOptions options) { + return new DefaultResourceManagerRpc(options); + } + } + + @Override + protected String defaultHost() { + return DEFAULT_HOST; + } + + public static class Builder extends ServiceOptions.Builder { + + private Builder() {} + + private Builder(ResourceManagerOptions options) { + super(options); + } + + @Override + public ResourceManagerOptions build() { + return new ResourceManagerOptions(this); + } + } + + private ResourceManagerOptions(Builder builder) { + super(ResourceManagerFactory.class, ResourceManagerRpcFactory.class, builder); + } + + @Override + protected boolean projectIdRequired() { + return false; + } + + @Override + protected ResourceManagerFactory defaultServiceFactory() { + return DefaultResourceManagerFactory.INSTANCE; + } + + @Override + protected ResourceManagerRpcFactory defaultRpcFactory() { + return DefaultResourceManagerRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ResourceManagerOptions && baseEquals((ResourceManagerOptions) obj); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java new file mode 100644 index 000000000000..d1794447e9fb --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java @@ -0,0 +1,61 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to Google Cloud Resource Manager. + * + *

    Here's a simple usage example for using gcloud-java from App/Compute Engine. This example + * creates a project if it does not exist. For the complete source code see + * + * GetOrCreateProject.java. + *

     {@code
    + * ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service();
    + * String projectId = "my-globally-unique-project-id"; // Change to a unique project ID.
    + * Project project = resourceManager.get(projectId);
    + * if (project == null) {
    + *   project = resourceManager.create(ProjectInfo.builder(projectId).build());
    + * }
    + * System.out.println("Got project " + project.projectId() + " from the server.");
    + * }
    + *

    + * This second example shows how to update a project if it exists and list all projects the user has + * permission to view. For the complete source code see + * + * UpdateAndListProjects.java. + *

     {@code
    + * ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service();
    + * Project project = resourceManager.get("some-project-id"); // Use an existing project's ID
    + * if (project != null) {
    + *   Project newProject = project.toBuilder()
    + *       .addLabel("launch-status", "in-development")
    + *       .build()
    + *       .replace();
    + *   System.out.println("Updated the labels of project " + newProject.projectId()
    + *       + " to be " + newProject.labels());
    + * }
    + * Iterator projectIterator = resourceManager.list().iterateAll();
    + * System.out.println("Projects I can view:");
    + * while (projectIterator.hasNext()) {
    + *   System.out.println(projectIterator.next().projectId());
    + * }}
    + *

    Remember that you must authenticate using the Google Cloud SDK. See more about + * providing + * credentials here. + * + * @see Google Cloud Resource Manager + */ + +package com.google.gcloud.resourcemanager; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java new file mode 100644 index 000000000000..9f92ff545874 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/DefaultResourceManagerRpc.java @@ -0,0 +1,167 @@ +package com.google.gcloud.resourcemanager.spi; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.FIELDS; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.FILTER; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.PAGE_SIZE; +import static com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Option.PAGE_TOKEN; +import static java.net.HttpURLConnection.HTTP_FORBIDDEN; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.json.jackson.JacksonFactory; +import com.google.api.services.cloudresourcemanager.Cloudresourcemanager; +import com.google.api.services.cloudresourcemanager.model.GetIamPolicyRequest; +import com.google.api.services.cloudresourcemanager.model.ListProjectsResponse; +import com.google.api.services.cloudresourcemanager.model.Policy; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.api.services.cloudresourcemanager.model.SetIamPolicyRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsResponse; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.resourcemanager.ResourceManagerException; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class DefaultResourceManagerRpc implements ResourceManagerRpc { + + private final Cloudresourcemanager resourceManager; + + public DefaultResourceManagerRpc(ResourceManagerOptions options) { + HttpTransport transport = options.httpTransportFactory().create(); + HttpRequestInitializer initializer = options.httpRequestInitializer(); + resourceManager = + new Cloudresourcemanager.Builder(transport, new JacksonFactory(), initializer) + .setRootUrl(options.host()) + .setApplicationName(options.applicationName()) + .build(); + } + + private static ResourceManagerException translate(IOException exception) { + return new ResourceManagerException(exception); + } + + @Override + public Project create(Project project) { + try { + return resourceManager.projects().create(project).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void delete(String projectId) { + try { + resourceManager.projects().delete(projectId).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Project get(String projectId, Map options) { + try { + return resourceManager.projects() + .get(projectId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + ResourceManagerException translated = translate(ex); + if (translated.code() == HTTP_FORBIDDEN || translated.code() == HTTP_NOT_FOUND) { + // Service can return either 403 or 404 to signify that the project doesn't exist. + return null; + } else { + throw translated; + } + } + } + + @Override + public Tuple> list(Map options) { + try { + ListProjectsResponse response = resourceManager.projects() + .list() + .setFields(FIELDS.getString(options)) + .setFilter(FILTER.getString(options)) + .setPageSize(PAGE_SIZE.getInt(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + return Tuple.>of( + response.getNextPageToken(), response.getProjects()); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void undelete(String projectId) { + try { + resourceManager.projects().undelete(projectId).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Project replace(Project project) { + try { + return resourceManager.projects().update(project.getProjectId(), project).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Policy getPolicy(String projectId) throws ResourceManagerException { + try { + return resourceManager.projects() + .getIamPolicy(projectId, new GetIamPolicyRequest()) + .execute(); + } catch (IOException ex) { + ResourceManagerException translated = translate(ex); + if (translated.code() == HTTP_FORBIDDEN) { + // Service returns permission denied if policy doesn't exist. + return null; + } else { + throw translated; + } + } + } + + @Override + public Policy replacePolicy(String projectId, Policy newPolicy) throws ResourceManagerException { + try { + return resourceManager.projects() + .setIamPolicy(projectId, new SetIamPolicyRequest().setPolicy(newPolicy)).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public List testPermissions(String projectId, List permissions) + throws ResourceManagerException { + try { + TestIamPermissionsResponse response = resourceManager.projects() + .testIamPermissions( + projectId, new TestIamPermissionsRequest().setPermissions(permissions)) + .execute(); + Set permissionsOwned = + ImmutableSet.copyOf(firstNonNull(response.getPermissions(), ImmutableList.of())); + ImmutableList.Builder answer = ImmutableList.builder(); + for (String p : permissions) { + answer.add(permissionsOwned.contains(p)); + } + return answer.build(); + } catch (IOException ex) { + throw translate(ex); + } + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java new file mode 100644 index 000000000000..d6ec068a92a3 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpc.java @@ -0,0 +1,149 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager.spi; + +import com.google.api.services.cloudresourcemanager.model.Policy; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.gcloud.resourcemanager.ResourceManagerException; + +import java.util.List; +import java.util.Map; + +public interface ResourceManagerRpc { + + enum Option { + FILTER("filter"), + FIELDS("fields"), + PAGE_SIZE("pageSize"), + PAGE_TOKEN("pageToken"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Integer getInt(Map options) { + return get(options); + } + } + + class Tuple { + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + /** + * Creates a new project. + * + * @throws ResourceManagerException upon failure + */ + Project create(Project project); + + /** + * Marks the project identified by the specified project ID for deletion. + * + * @throws ResourceManagerException upon failure + */ + void delete(String projectId); + + /** + * Retrieves the project identified by the specified project ID. Returns {@code null} if the + * project is not found or if the user doesn't have read permissions for the project. + * + * @throws ResourceManagerException upon failure + */ + Project get(String projectId, Map options); + + /** + * Lists the projects visible to the current user. + * + * @throws ResourceManagerException upon failure + */ + Tuple> list(Map options); + + /** + * Restores the project identified by the specified project ID. Undelete will only succeed if the + * project has a lifecycle state of {@code DELETE_REQUESTED} state. The caller must have modify + * permissions for this project. + * + * @throws ResourceManagerException upon failure + */ + void undelete(String projectId); + + /** + * Replaces the attributes of the project. The caller must have modify permissions for this + * project. + * + * @throws ResourceManagerException upon failure + */ + Project replace(Project project); + + /** + * Returns the IAM policy associated with a project. + * + * @throws ResourceManagerException upon failure + */ + Policy getPolicy(String projectId); + + /** + * Replaces the IAM policy associated with the given project. + * + * @throws ResourceManagerException upon failure + */ + Policy replacePolicy(String projectId, Policy newPolicy); + + /** + * Tests whether the caller has the given permissions. Returns a list of booleans corresponding to + * whether or not the user has the permission in the same position of input list. + * + * @throws ResourceManagerException upon failure + */ + List testPermissions(String projectId, List permissions); + + // TODO(ajaykannan): implement "Organization" functionality when available (issue #319) +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java new file mode 100644 index 000000000000..4dbd1a00d4c7 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/spi/ResourceManagerRpcFactory.java @@ -0,0 +1,28 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager.spi; + +import com.google.gcloud.resourcemanager.ResourceManagerOptions; +import com.google.gcloud.spi.ServiceRpcFactory; + +/** + * An interface for Resource Manager RPC factory. + * Implementation will be loaded via {@link java.util.ServiceLoader}. + */ +public interface ResourceManagerRpcFactory + extends ServiceRpcFactory { +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java new file mode 100644 index 000000000000..8ddca18b6261 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java @@ -0,0 +1,725 @@ +package com.google.gcloud.resourcemanager.testing; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; +import static java.net.HttpURLConnection.HTTP_OK; + +import com.google.api.client.json.JsonFactory; +import com.google.api.services.cloudresourcemanager.model.Binding; +import com.google.api.services.cloudresourcemanager.model.Policy; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.api.services.cloudresourcemanager.model.SetIamPolicyRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsRequest; +import com.google.api.services.cloudresourcemanager.model.TestIamPermissionsResponse; +import com.google.common.base.Joiner; +import com.google.common.base.Objects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.io.ByteStreams; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.resourcemanager.ResourceManager.Permission; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.joda.time.format.ISODateTimeFormat; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentSkipListMap; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; + +/** + * Utility to create a local Resource Manager mock for testing. + * + *

    The mock runs in a separate thread, listening for HTTP requests on the local machine at an + * ephemeral port. While this mock attempts to simulate the Cloud Resource Manager, there are some + * divergences in behavior. The following is a non-exhaustive list of some of those behavioral + * differences: + * + *

      + *
    • This mock assumes you have adequate permissions for any action. Related to this, + * testIamPermissions always indicates that the caller has all permissions listed in the + * request. + *
    • IAM policies are set to an empty policy with version 0 (only legacy roles supported) upon + * project creation. The actual service will not have an empty list of bindings and may also + * set your version to 1. + *
    • There is no input validation for the policy provided when replacing a policy. + *
    • In this mock, projects never move from the DELETE_REQUESTED lifecycle state to + * DELETE_IN_PROGRESS without an explicit call to the utility method + * {@link #changeLifecycleState}. Similarly, a project is never completely removed without an + * explicit call to the utility method {@link #removeProject}. + *
    • The messages in the error responses given by this mock do not necessarily match the messages + * given by the actual service. + *
    + */ +@SuppressWarnings("restriction") +public class LocalResourceManagerHelper { + private static final Logger log = Logger.getLogger(LocalResourceManagerHelper.class.getName()); + private static final JsonFactory jsonFactory = + new com.google.api.client.json.jackson.JacksonFactory(); + private static final Random PROJECT_NUMBER_GENERATOR = new Random(); + private static final String VERSION = "v1beta1"; + private static final String CONTEXT = "/" + VERSION + "/projects"; + private static final URI BASE_CONTEXT; + private static final Set SUPPORTED_COMPRESSION_ENCODINGS = + ImmutableSet.of("gzip", "x-gzip"); + private static final Pattern LIST_FIELDS_PATTERN = + Pattern.compile("(.*?)projects\\((.*?)\\)(.*?)"); + private static final String[] NO_FIELDS = {}; + private static final Set PERMISSIONS = new HashSet<>(); + + static { + for (Permission permission : Permission.values()) { + PERMISSIONS.add(permission.value()); + } + try { + BASE_CONTEXT = new URI(CONTEXT); + } catch (URISyntaxException e) { + throw new RuntimeException( + "Could not initialize LocalResourceManagerHelper due to URISyntaxException.", e); + } + } + + // see https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects + private static final Set PERMISSIBLE_PROJECT_NAME_PUNCTUATION = + ImmutableSet.of('-', '\'', '"', ' ', '!'); + + private final HttpServer server; + private final ConcurrentSkipListMap projects = new ConcurrentSkipListMap<>(); + private final Map policies = new HashMap<>(); + private final int port; + + private static class Response { + private final int code; + private final String body; + + Response(int code, String body) { + this.code = code; + this.body = body; + } + + int code() { + return code; + } + + String body() { + return body; + } + } + + private enum Error { + ABORTED(409, "global", "aborted", "ABORTED"), + ALREADY_EXISTS(409, "global", "alreadyExists", "ALREADY_EXISTS"), + PERMISSION_DENIED(403, "global", "forbidden", "PERMISSION_DENIED"), + FAILED_PRECONDITION(400, "global", "failedPrecondition", "FAILED_PRECONDITION"), + INVALID_ARGUMENT(400, "global", "badRequest", "INVALID_ARGUMENT"), + BAD_REQUEST(400, "global", "badRequest", "BAD_REQUEST"), + INTERNAL_ERROR(500, "global", "internalError", "INTERNAL_ERROR"); + + private final int code; + private final String domain; + private final String reason; + private final String status; + + Error(int code, String domain, String reason, String status) { + this.code = code; + this.domain = domain; + this.reason = reason; + this.status = status; + } + + Response response(String message) { + try { + return new Response(code, toJson(message)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when generating JSON error response"); + } + } + + private String toJson(String message) throws IOException { + Map errors = new HashMap<>(); + errors.put("domain", domain); + errors.put("message", message); + errors.put("reason", reason); + Map args = new HashMap<>(); + args.put("errors", ImmutableList.of(errors)); + args.put("code", code); + args.put("message", message); + args.put("status", status); + return jsonFactory.toString(ImmutableMap.of("error", args)); + } + } + + private class RequestHandler implements HttpHandler { + @Override + public void handle(HttpExchange exchange) { + // see https://cloud.google.com/resource-manager/reference/rest/ + Response response; + String path = BASE_CONTEXT.relativize(exchange.getRequestURI()).getPath(); + String requestMethod = exchange.getRequestMethod(); + try { + switch (requestMethod) { + case "POST": + response = handlePost(exchange, path); + break; + case "DELETE": + response = delete(projectIdFromUri(path)); + break; + case "GET": + if (!path.isEmpty()) { + response = + get(projectIdFromUri(path), parseFields(exchange.getRequestURI().getQuery())); + } else { + response = list(parseListOptions(exchange.getRequestURI().getQuery())); + } + break; + case "PUT": + String requestBody = + decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); + response = + replace(projectIdFromUri(path), jsonFactory.fromString(requestBody, Project.class)); + break; + default: + response = Error.BAD_REQUEST.response( + "The server could not understand the following request URI: " + requestMethod + " " + + path); + } + } catch (IOException e) { + response = Error.BAD_REQUEST.response(e.getMessage()); + } + writeResponse(exchange, response); + } + } + + private Response handlePost(HttpExchange exchange, String path) throws IOException { + String requestBody = decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); + if (!path.contains(":")) { + return create(jsonFactory.fromString(requestBody, Project.class)); + } else { + switch (path.split(":", 2)[1]) { + case "undelete": + return undelete(projectIdFromUri(path)); + case "getIamPolicy": + return getPolicy(projectIdFromUri(path)); + case "setIamPolicy": + return replacePolicy(projectIdFromUri(path), + jsonFactory.fromString(requestBody, SetIamPolicyRequest.class).getPolicy()); + case "testIamPermissions": + return testPermissions(projectIdFromUri(path), + jsonFactory.fromString(requestBody, TestIamPermissionsRequest.class) + .getPermissions()); + default: + return Error.BAD_REQUEST.response( + "The server could not understand the following request URI: POST " + path); + } + } + } + + private static void writeResponse(HttpExchange exchange, Response response) { + exchange.getResponseHeaders().set("Content-type", "application/json; charset=UTF-8"); + OutputStream outputStream = exchange.getResponseBody(); + try { + exchange.getResponseHeaders().add("Connection", "close"); + exchange.sendResponseHeaders(response.code(), response.body().length()); + outputStream.write(response.body().getBytes(StandardCharsets.UTF_8)); + outputStream.close(); + } catch (IOException e) { + log.log(Level.WARNING, "IOException encountered when sending response.", e); + } + } + + private static String decodeContent(Headers headers, InputStream inputStream) throws IOException { + List contentEncoding = headers.get("Content-encoding"); + InputStream input = inputStream; + try { + if (contentEncoding != null && !contentEncoding.isEmpty()) { + String encoding = contentEncoding.get(0); + if (SUPPORTED_COMPRESSION_ENCODINGS.contains(encoding)) { + input = new GZIPInputStream(inputStream); + } else if (!encoding.equals("identity")) { + throw new IOException( + "The request has the following unsupported HTTP content encoding: " + encoding); + } + } + return new String(ByteStreams.toByteArray(input), StandardCharsets.UTF_8); + } catch (IOException e) { + throw new IOException("Exception encountered when decoding request content.", e); + } + } + + private static String projectIdFromUri(String path) throws IOException { + if (path.isEmpty()) { + throw new IOException("The URI path '" + path + "' doesn't have a project ID."); + } + return path.split(":")[0]; + } + + private static String[] parseFields(String query) { + if (query != null && !query.isEmpty()) { + String[] querySplit = query.split("="); + return querySplit.length > 1 ? querySplit[1].split(",") : null; + } + return null; + } + + private static Map parseListOptions(String query) throws IOException { + Map options = new HashMap<>(); + if (query != null) { + String[] args = query.split("&"); + for (String arg : args) { + String[] argEntry = arg.split("="); + switch (argEntry[0]) { + case "fields": + // List fields are in the form "projects(field1, field2, ...),nextPageToken" + Matcher matcher = LIST_FIELDS_PATTERN.matcher(argEntry[1]); + if (matcher.matches()) { + options.put("projectFields", matcher.group(2).split(",")); + options.put("listFields", (matcher.group(1) + matcher.group(3)).split(",")); + } else { + options.put("projectFields", NO_FIELDS); + options.put("listFields", argEntry[1].split(",")); + } + break; + case "filter": + options.put("filter", argEntry[1].split(" ")); + break; + case "pageToken": + options.put("pageToken", argEntry[1]); + break; + case "pageSize": + int pageSize = Integer.parseInt(argEntry[1]); + if (pageSize < 1) { + throw new IOException("Page size must be greater than 0."); + } + options.put("pageSize", pageSize); + break; + } + } + } + return options; + } + + private static String checkForProjectErrors(Project project) { + if (project.getProjectId() == null) { + return "Project ID cannot be empty."; + } + if (!isValidIdOrLabel(project.getProjectId(), 6, 30)) { + return "Project " + project.getProjectId() + " has an invalid ID." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + "/projects" + + " for more information."; + } + if (project.getName() != null) { + for (char c : project.getName().toCharArray()) { + if (!PERMISSIBLE_PROJECT_NAME_PUNCTUATION.contains(c) && !Character.isLetterOrDigit(c)) { + return "Project " + project.getProjectId() + " has an invalid name." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + + "/projects for more information."; + } + } + } + if (project.getLabels() != null) { + if (project.getLabels().size() > 256) { + return "Project " + project.getProjectId() + " exceeds the limit of 256 labels."; + } + for (Map.Entry entry : project.getLabels().entrySet()) { + if (!isValidIdOrLabel(entry.getKey(), 1, 63) + || !isValidIdOrLabel(entry.getValue(), 0, 63)) { + return "Project " + project.getProjectId() + " has an invalid label entry." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + + "/projects for more information."; + } + } + } + return null; + } + + private static boolean isValidIdOrLabel(String value, int minLength, int maxLength) { + for (char c : value.toCharArray()) { + if (c != '-' && !Character.isDigit(c) && !Character.isLowerCase(c)) { + return false; + } + } + if (!value.isEmpty() && (!Character.isLetter(value.charAt(0)) || value.endsWith("-"))) { + return false; + } + return value.length() >= minLength && value.length() <= maxLength; + } + + synchronized Response create(Project project) { + String customErrorMessage = checkForProjectErrors(project); + if (customErrorMessage != null) { + return Error.INVALID_ARGUMENT.response(customErrorMessage); + } else { + project.setLifecycleState("ACTIVE"); + project.setProjectNumber(Math.abs(PROJECT_NUMBER_GENERATOR.nextLong() % Long.MAX_VALUE)); + project.setCreateTime(ISODateTimeFormat.dateTime().print(System.currentTimeMillis())); + if (projects.putIfAbsent(project.getProjectId(), project) != null) { + return Error.ALREADY_EXISTS.response( + "A project with the same project ID (" + project.getProjectId() + ") already exists."); + } + Policy emptyPolicy = new Policy() + .setBindings(Collections.emptyList()) + .setEtag(UUID.randomUUID().toString()) + .setVersion(0); + policies.put(project.getProjectId(), emptyPolicy); + try { + String createdProjectStr = jsonFactory.toString(project); + return new Response(HTTP_OK, createdProjectStr); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error serializing project " + project.getProjectId()); + } + } + } + + synchronized Response delete(String projectId) { + Project project = projects.get(projectId); + if (project == null) { + return Error.PERMISSION_DENIED.response( + "Error when deleting " + projectId + " because the project was not found."); + } + if (!project.getLifecycleState().equals("ACTIVE")) { + return Error.FAILED_PRECONDITION.response( + "Error when deleting " + projectId + " because the lifecycle state was not ACTIVE."); + } else { + project.setLifecycleState("DELETE_REQUESTED"); + return new Response(HTTP_OK, "{}"); + } + } + + Response get(String projectId, String[] fields) { + Project project = projects.get(projectId); + if (project != null) { + try { + return new Response(HTTP_OK, jsonFactory.toString(extractFields(project, fields))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing project " + project.getProjectId()); + } + } else { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + } + + Response list(Map options) { + List projectsSerialized = new ArrayList<>(); + String[] filters = (String[]) options.get("filter"); + if (filters != null && !isValidFilter(filters)) { + return Error.INVALID_ARGUMENT.response("Could not parse the filter."); + } + String[] projectFields = (String[]) options.get("projectFields"); + int count = 0; + String pageToken = (String) options.get("pageToken"); + Integer pageSize = (Integer) options.get("pageSize"); + String nextPageToken = null; + Map projectsToScan = projects; + if (pageToken != null) { + projectsToScan = projects.tailMap(pageToken); + } + for (Project p : projectsToScan.values()) { + if (pageSize != null && count >= pageSize) { + nextPageToken = p.getProjectId(); + break; + } + boolean includeProject = includeProject(p, filters); + if (includeProject) { + count++; + try { + projectsSerialized.add(jsonFactory.toString(extractFields(p, projectFields))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing project " + p.getProjectId()); + } + } + } + String[] listFields = (String[]) options.get("listFields"); + StringBuilder responseBody = new StringBuilder(); + responseBody.append('{'); + // If fields parameter is set but no project field is selected we must return no projects. + if (!(projectFields != null && projectFields.length == 0)) { + responseBody.append("\"projects\": ["); + Joiner.on(",").appendTo(responseBody, projectsSerialized); + responseBody.append(']'); + } + if (nextPageToken != null && (listFields == null + || ImmutableSet.copyOf(listFields).contains("nextPageToken"))) { + if (responseBody.length() > 1) { + responseBody.append(','); + } + responseBody.append("\"nextPageToken\": \""); + responseBody.append(nextPageToken); + responseBody.append('"'); + } + responseBody.append('}'); + return new Response(HTTP_OK, responseBody.toString()); + } + + private static boolean isValidFilter(String[] filters) { + for (String filter : filters) { + String field = filter.toLowerCase().split(":")[0]; + if (!("id".equals(field) || "name".equals(field) || field.startsWith("labels."))) { + return false; + } + } + return true; + } + + private static boolean includeProject(Project project, String[] filters) { + if (filters == null) { + return true; + } + for (String filter : filters) { + String[] filterEntry = filter.toLowerCase().split(":"); + String filterType = filterEntry[0]; + if ("id".equals(filterType)) { + if (!satisfiesFilter(project.getProjectId(), filterEntry[1])) { + return false; + } + } else if ("name".equals(filterType)) { + if (!satisfiesFilter(project.getName(), filterEntry[1])) { + return false; + } + } else if (filterType.startsWith("labels.")) { + String labelKey = filterType.substring("labels.".length()); + if (project.getLabels() != null) { + String labelValue = project.getLabels().get(labelKey); + if (!satisfiesFilter(labelValue, filterEntry[1])) { + return false; + } + } + } + } + return true; + } + + private static boolean satisfiesFilter(String projectValue, String filterValue) { + if (projectValue == null) { + return false; + } + return "*".equals(filterValue) || filterValue.equals(projectValue.toLowerCase()); + } + + private static Project extractFields(Project fullProject, String[] fields) { + if (fields == null) { + return fullProject; + } + Project project = new Project(); + for (String field : fields) { + switch (field) { + case "createTime": + project.setCreateTime(fullProject.getCreateTime()); + break; + case "labels": + project.setLabels(fullProject.getLabels()); + break; + case "lifecycleState": + project.setLifecycleState(fullProject.getLifecycleState()); + break; + case "name": + project.setName(fullProject.getName()); + break; + case "parent": + project.setParent(fullProject.getParent()); + break; + case "projectId": + project.setProjectId(fullProject.getProjectId()); + break; + case "projectNumber": + project.setProjectNumber(fullProject.getProjectNumber()); + break; + } + } + return project; + } + + synchronized Response replace(String projectId, Project project) { + Project originalProject = projects.get(projectId); + if (originalProject == null) { + return Error.PERMISSION_DENIED.response( + "Error when replacing " + projectId + " because the project was not found."); + } else if (!originalProject.getLifecycleState().equals("ACTIVE")) { + return Error.FAILED_PRECONDITION.response( + "Error when replacing " + projectId + " because the lifecycle state was not ACTIVE."); + } else if (!Objects.equal(originalProject.getParent(), project.getParent())) { + return Error.INVALID_ARGUMENT.response( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it."); + } + project.setProjectId(projectId); + project.setLifecycleState(originalProject.getLifecycleState()); + project.setCreateTime(originalProject.getCreateTime()); + project.setProjectNumber(originalProject.getProjectNumber()); + // replace cannot fail because both this method and removeProject are synchronized + projects.replace(projectId, project); + try { + return new Response(HTTP_OK, jsonFactory.toString(project)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when serializing project " + projectId); + } + } + + synchronized Response undelete(String projectId) { + Project project = projects.get(projectId); + Response response; + if (project == null) { + response = Error.PERMISSION_DENIED.response( + "Error when undeleting " + projectId + " because the project was not found."); + } else if (!project.getLifecycleState().equals("DELETE_REQUESTED")) { + response = Error.FAILED_PRECONDITION.response("Error when undeleting " + projectId + + " because the lifecycle state was not DELETE_REQUESTED."); + } else { + project.setLifecycleState("ACTIVE"); + response = new Response(HTTP_OK, "{}"); + } + return response; + } + + synchronized Response getPolicy(String projectId) { + Policy policy = policies.get(projectId); + if (policy == null) { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + try { + return new Response(HTTP_OK, jsonFactory.toString(policy)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing the IAM policy for " + projectId); + } + } + + synchronized Response replacePolicy(String projectId, Policy policy) { + Policy originalPolicy = policies.get(projectId); + if (originalPolicy == null) { + return Error.PERMISSION_DENIED.response("Error when replacing the policy for " + projectId + + " because the project was not found."); + } + String etag = policy.getEtag(); + if (etag != null && !originalPolicy.getEtag().equals(etag)) { + return Error.ABORTED.response("Policy etag mismatch when replacing the policy for project " + + projectId + ", please retry the read."); + } + policy.setEtag(UUID.randomUUID().toString()); + policy.setVersion(originalPolicy.getVersion()); + policies.put(projectId, policy); + try { + return new Response(HTTP_OK, jsonFactory.toString(policy)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing the policy for project " + projectId); + } + } + + synchronized Response testPermissions(String projectId, List permissions) { + if (!projects.containsKey(projectId)) { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + for (String p : permissions) { + if (!PERMISSIONS.contains(p)) { + return Error.INVALID_ARGUMENT.response("Invalid permission: " + p); + } + } + try { + return new Response(HTTP_OK, + jsonFactory.toString(new TestIamPermissionsResponse().setPermissions(permissions))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when serializing permissions " + permissions); + } + } + + private LocalResourceManagerHelper() { + try { + server = HttpServer.create(new InetSocketAddress(0), 0); + port = server.getAddress().getPort(); + server.createContext(CONTEXT, new RequestHandler()); + } catch (IOException e) { + throw new RuntimeException("Could not bind the mock Resource Manager server.", e); + } + } + + /** + * Creates a {@code LocalResourceManagerHelper} object that listens to requests on the local + * machine. + */ + public static LocalResourceManagerHelper create() { + return new LocalResourceManagerHelper(); + } + + /** + * Returns a {@link ResourceManagerOptions} instance that sets the host to use the mock server. + */ + public ResourceManagerOptions options() { + return ResourceManagerOptions.builder() + .host("http://localhost:" + port) + .authCredentials(AuthCredentials.noAuth()) + .build(); + } + + /** + * Starts the thread that runs the Resource Manager server. + */ + public void start() { + server.start(); + } + + /** + * Stops the thread that runs the mock Resource Manager server. + */ + public void stop() { + server.stop(1); + } + + /** + * Utility method to change the lifecycle state of the specified project. + * + * @return true if the lifecycle state was successfully updated, false otherwise + */ + public synchronized boolean changeLifecycleState(String projectId, String lifecycleState) { + checkArgument( + "ACTIVE".equals(lifecycleState) || "DELETE_REQUESTED".equals(lifecycleState) + || "DELETE_IN_PROGRESS".equals(lifecycleState), + "Lifecycle state must be ACTIVE, DELETE_REQUESTED, or DELETE_IN_PROGRESS"); + Project project = projects.get(checkNotNull(projectId)); + if (project != null) { + project.setLifecycleState(lifecycleState); + return true; + } + return false; + } + + /** + * Utility method to remove the specified project. + * + *

    This method can be used to fully remove a project (to mimic when the server completely + * deletes a project). + * + * @return true if the project was successfully deleted, false if the project didn't exist + */ + public synchronized boolean removeProject(String projectId) { + // Because this method is synchronized, any code that relies on non-atomic read/write operations + // should not fail if that code is also synchronized. + policies.remove(checkNotNull(projectId)); + return projects.remove(projectId) != null; + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java new file mode 100644 index 000000000000..7e5519f7d085 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java @@ -0,0 +1,32 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A testing helper for Google Cloud Resource Manager. + * + *

    A simple usage example: + * Before the test: + *

     {@code
    + * LocalResourceManagerHelper resourceManagerHelper = LocalResourceManagerHelper.create();
    + * ResourceManager resourceManager = resourceManagerHelper.options().service();
    + * } 
    + * + *

    After the test: + *

     {@code
    + * resourceManagerHelper.stop();
    + * } 
    + */ +package com.google.gcloud.resourcemanager.testing; diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java new file mode 100644 index 000000000000..829094816664 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java @@ -0,0 +1,727 @@ +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.api.services.cloudresourcemanager.model.Binding; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.resourcemanager.spi.DefaultResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc.Tuple; +import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; + +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class LocalResourceManagerHelperTest { + + private static final String DEFAULT_PARENT_ID = "12345"; + private static final String DEFAULT_PARENT_TYPE = "organization"; + private static final com.google.api.services.cloudresourcemanager.model.ResourceId PARENT = + new com.google.api.services.cloudresourcemanager.model.ResourceId() + .setId(DEFAULT_PARENT_ID) + .setType(DEFAULT_PARENT_TYPE); + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + private static final LocalResourceManagerHelper RESOURCE_MANAGER_HELPER = + LocalResourceManagerHelper.create(); + private static final ResourceManagerRpc rpc = + new DefaultResourceManagerRpc(RESOURCE_MANAGER_HELPER.options()); + private static final com.google.api.services.cloudresourcemanager.model.Project PARTIAL_PROJECT = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "partial-project"); + private static final com.google.api.services.cloudresourcemanager.model.Project COMPLETE_PROJECT = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("complete-project") + .setName("full project") + .setLabels(ImmutableMap.of("k1", "v1", "k2", "v2")); + private static final com.google.api.services.cloudresourcemanager.model.Project + PROJECT_WITH_PARENT = + copyFrom(COMPLETE_PROJECT).setProjectId("project-with-parent-id").setParent(PARENT); + private static final List BINDINGS = ImmutableList.of( + new Binding().setRole("roles/owner").setMembers(ImmutableList.of("user:me@gmail.com")), + new Binding().setRole("roles/viewer").setMembers(ImmutableList.of("group:group@gmail.com"))); + private static final com.google.api.services.cloudresourcemanager.model.Policy POLICY = + new com.google.api.services.cloudresourcemanager.model.Policy().setBindings(BINDINGS); + + @BeforeClass + public static void beforeClass() { + RESOURCE_MANAGER_HELPER.start(); + } + + private static com.google.api.services.cloudresourcemanager.model.Project copyFrom( + com.google.api.services.cloudresourcemanager.model.Project from) { + return new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(from.getProjectId()) + .setName(from.getName()) + .setLabels(from.getLabels() != null ? ImmutableMap.copyOf(from.getLabels()) : null) + .setProjectNumber(from.getProjectNumber()) + .setCreateTime(from.getCreateTime()) + .setLifecycleState(from.getLifecycleState()) + .setParent(from.getParent() != null ? from.getParent().clone() : null); + } + + private void clearProjects() { + for (com.google.api.services.cloudresourcemanager.model.Project project : + rpc.list(EMPTY_RPC_OPTIONS).y()) { + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + } + } + + @Before + public void setUp() { + clearProjects(); + } + + @AfterClass + public static void afterClass() { + RESOURCE_MANAGER_HELPER.stop(); + } + + @Test + public void testCreate() { + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.create(PARTIAL_PROJECT); + compareReadWriteFields(PARTIAL_PROJECT, returnedProject); + assertEquals("ACTIVE", returnedProject.getLifecycleState()); + assertNull(returnedProject.getLabels()); + assertNull(returnedProject.getName()); + assertNull(returnedProject.getParent()); + assertNotNull(returnedProject.getProjectNumber()); + assertNotNull(returnedProject.getCreateTime()); + com.google.api.services.cloudresourcemanager.model.Policy policy = + rpc.getPolicy(PARTIAL_PROJECT.getProjectId()); + assertEquals(Collections.emptyList(), policy.getBindings()); + assertNotNull(policy.getEtag()); + assertEquals(0, policy.getVersion().intValue()); + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); + assertEquals(POLICY.getBindings(), rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getBindings()); + try { + rpc.create(PARTIAL_PROJECT); + fail("Should fail, project already exists."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("A project with the same project ID") + && e.getMessage().endsWith("already exists.")); + assertEquals( + POLICY.getBindings(), rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getBindings()); + } + returnedProject = rpc.create(PROJECT_WITH_PARENT); + compareReadWriteFields(PROJECT_WITH_PARENT, returnedProject); + assertEquals("ACTIVE", returnedProject.getLifecycleState()); + assertNotNull(returnedProject.getProjectNumber()); + assertNotNull(returnedProject.getCreateTime()); + } + + @Test + public void testIsInvalidProjectId() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project(); + String invalidIDMessageSubstring = "invalid ID"; + expectInvalidArgumentException(project, "Project ID cannot be empty."); + project.setProjectId("abcde"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("this-project-id-is-more-than-thirty-characters-long"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("project-id-with-invalid-character-?"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("-invalid-start-character"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("invalid-ending-character-"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("some-valid-project-id-12345"); + rpc.create(project); + assertNotNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS)); + } + + private void expectInvalidArgumentException( + com.google.api.services.cloudresourcemanager.model.Project project, + String errorMessageSubstring) { + try { + rpc.create(project); + fail("Should fail because of an invalid argument."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains(errorMessageSubstring)); + } + } + + @Test + public void testIsInvalidProjectName() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "some-project-id"); + rpc.create(project); + assertNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS).getName()); + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + project.setName("This is a valid name-'\"!"); + rpc.create(project); + assertEquals(project.getName(), rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS).getName()); + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + project.setName("invalid-character-,"); + try { + rpc.create(project); + fail("Should fail because of invalid project name."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("invalid name")); + } + } + + @Test + public void testIsInvalidProjectLabels() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "some-valid-project-id"); + String invalidLabelMessageSubstring = "invalid label entry"; + project.setLabels(ImmutableMap.of("", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of( + "this-project-label-is-more-than-sixty-three-characters-long-so-it-should-fail", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of( + "k1", "this-project-label-is-more-than-sixty-three-characters-long-so-it-should-fail")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1?", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "v1*")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("-k1", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "-v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1-", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "v1-")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + Map tooManyLabels = new HashMap<>(); + for (int i = 0; i < 257; i++) { + tooManyLabels.put("k" + Integer.toString(i), "v" + Integer.toString(i)); + } + project.setLabels(tooManyLabels); + expectInvalidArgumentException(project, "exceeds the limit of 256 labels"); + project.setLabels(ImmutableMap.of("k-1", "")); + rpc.create(project); + assertNotNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS)); + assertTrue(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS) + .getLabels() + .get("k-1") + .isEmpty()); + } + + @Test + public void testDelete() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + assertEquals( + "DELETE_REQUESTED", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + try { + rpc.delete("some-nonexistant-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("not found.")); + } + } + + @Test + public void testDeleteWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + try { + rpc.delete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testDeleteWhenDeleteRequested() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_REQUESTED"); + try { + rpc.delete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testGet() { + rpc.create(COMPLETE_PROJECT); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId()); + assertNull(rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS)); + } + + @Test + public void testGetWithOptions() { + com.google.api.services.cloudresourcemanager.model.Project originalProject = + rpc.create(COMPLETE_PROJECT); + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "projectId,name,createTime"); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), rpcOptions); + assertFalse(COMPLETE_PROJECT.equals(returnedProject)); + assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); + assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); + assertEquals(originalProject.getCreateTime(), returnedProject.getCreateTime()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getLabels()); + } + + @Test + public void testList() { + Tuple> projects = + rpc.list(EMPTY_RPC_OPTIONS); + assertNull(projects.x()); + assertFalse(projects.y().iterator().hasNext()); + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_REQUESTED"); + rpc.create(PROJECT_WITH_PARENT); + projects = rpc.list(EMPTY_RPC_OPTIONS); + for (com.google.api.services.cloudresourcemanager.model.Project p : projects.y()) { + if (p.getProjectId().equals(COMPLETE_PROJECT.getProjectId())) { + compareReadWriteFields(COMPLETE_PROJECT, p); + } else if (p.getProjectId().equals(PROJECT_WITH_PARENT.getProjectId())) { + compareReadWriteFields(PROJECT_WITH_PARENT, p); + } else { + fail("Unexpected project in list."); + } + } + } + + @Test + public void testInvalidListPaging() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, -1); + try { + rpc.list(rpcOptions); + } catch (ResourceManagerException e) { + assertEquals("Page size must be greater than 0.", e.getMessage()); + } + } + + @Test + public void testListPaging() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, 1); + rpc.create(PARTIAL_PROJECT); + rpc.create(COMPLETE_PROJECT); + Tuple> projects = + rpc.list(rpcOptions); + assertNotNull(projects.x()); + Iterator iterator = + projects.y().iterator(); + compareReadWriteFields(COMPLETE_PROJECT, iterator.next()); + assertFalse(iterator.hasNext()); + rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_TOKEN, projects.x()); + projects = rpc.list(rpcOptions); + iterator = projects.y().iterator(); + compareReadWriteFields(PARTIAL_PROJECT, iterator.next()); + assertFalse(iterator.hasNext()); + assertNull(projects.x()); + } + + @Test + public void testListFieldOptions() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, + "projects(projectId,name,labels),nextPageToken"); + rpc.create(PROJECT_WITH_PARENT); + Tuple> projects = + rpc.list(rpcOptions); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + projects.y().iterator().next(); + assertFalse(PROJECT_WITH_PARENT.equals(returnedProject)); + assertEquals(PROJECT_WITH_PARENT.getProjectId(), returnedProject.getProjectId()); + assertEquals(PROJECT_WITH_PARENT.getName(), returnedProject.getName()); + assertEquals(PROJECT_WITH_PARENT.getLabels(), returnedProject.getLabels()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getCreateTime()); + } + + @Test + public void testListPageTokenFieldOptions() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, 1); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "nextPageToken,projects(projectId,name)"); + rpc.create(PARTIAL_PROJECT); + rpc.create(COMPLETE_PROJECT); + Tuple> projects = + rpc.list(rpcOptions); + assertNotNull(projects.x()); + Iterator iterator = + projects.y().iterator(); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = iterator.next(); + assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); + assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); + assertNull(returnedProject.getLabels()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getCreateTime()); + assertFalse(iterator.hasNext()); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_TOKEN, projects.x()); + projects = rpc.list(rpcOptions); + iterator = projects.y().iterator(); + returnedProject = iterator.next(); + assertEquals(PARTIAL_PROJECT.getProjectId(), returnedProject.getProjectId()); + assertEquals(PARTIAL_PROJECT.getName(), returnedProject.getName()); + assertNull(returnedProject.getLabels()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getCreateTime()); + assertNull(projects.x()); + } + + @Test + public void testListNoPageTokenFieldOptions() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, 1); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "projects(projectId,name)"); + rpc.create(PARTIAL_PROJECT); + rpc.create(COMPLETE_PROJECT); + Tuple> projects = + rpc.list(rpcOptions); + assertNull(projects.x()); + Iterator iterator = + projects.y().iterator(); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = iterator.next(); + assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); + assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); + assertNull(returnedProject.getLabels()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getCreateTime()); + assertFalse(iterator.hasNext()); + } + + @Test + public void testListPageTokenNoFieldsOptions() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, 1); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "nextPageToken"); + rpc.create(PARTIAL_PROJECT); + rpc.create(COMPLETE_PROJECT); + Tuple> projects = + rpc.list(rpcOptions); + assertNotNull(projects.x()); + assertNull(projects.y()); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_TOKEN, projects.x()); + projects = rpc.list(rpcOptions); + assertNull(projects.x()); + assertNull(projects.y()); + } + + @Test + public void testListFilterOptions() { + Map rpcFilterOptions = new HashMap<>(); + rpcFilterOptions.put( + ResourceManagerRpc.Option.FILTER, "id:* name:myProject labels.color:blue LABELS.SIZE:*"); + com.google.api.services.cloudresourcemanager.model.Project matchingProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("matching-project") + .setName("MyProject") + .setLabels(ImmutableMap.of("color", "blue", "size", "big")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject1 = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("non-matching-project1") + .setName("myProject"); + nonMatchingProject1.setLabels(ImmutableMap.of("color", "blue")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject2 = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("non-matching-project2") + .setName("myProj") + .setLabels(ImmutableMap.of("color", "blue", "size", "big")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject3 = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "non-matching-project3"); + rpc.create(matchingProject); + rpc.create(nonMatchingProject1); + rpc.create(nonMatchingProject2); + rpc.create(nonMatchingProject3); + for (com.google.api.services.cloudresourcemanager.model.Project p : + rpc.list(rpcFilterOptions).y()) { + assertFalse(p.equals(nonMatchingProject1)); + assertFalse(p.equals(nonMatchingProject2)); + compareReadWriteFields(matchingProject, p); + } + } + + @Test + public void testReplace() { + com.google.api.services.cloudresourcemanager.model.Project createdProject = + rpc.create(COMPLETE_PROJECT); + String newName = "new name"; + Map newLabels = ImmutableMap.of("new k1", "new v1"); + com.google.api.services.cloudresourcemanager.model.Project anotherCompleteProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(COMPLETE_PROJECT.getProjectId()) + .setName(newName) + .setLabels(newLabels) + .setProjectNumber(987654321L) + .setCreateTime("2000-01-01T00:00:00.001Z") + .setLifecycleState("DELETE_REQUESTED"); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.replace(anotherCompleteProject); + compareReadWriteFields(anotherCompleteProject, returnedProject); + assertEquals(createdProject.getProjectNumber(), returnedProject.getProjectNumber()); + assertEquals(createdProject.getCreateTime(), returnedProject.getCreateTime()); + assertEquals(createdProject.getLifecycleState(), returnedProject.getLifecycleState()); + com.google.api.services.cloudresourcemanager.model.Project nonexistantProject = + new com.google.api.services.cloudresourcemanager.model.Project(); + nonexistantProject.setProjectId("some-project-id-that-does-not-exist"); + try { + rpc.replace(nonexistantProject); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testReplaceWhenDeleteRequested() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + COMPLETE_PROJECT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testReplaceWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + COMPLETE_PROJECT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testReplaceAddingParent() { + rpc.create(COMPLETE_PROJECT); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(COMPLETE_PROJECT.getProjectId()) + .setParent(PARENT); + try { + rpc.replace(anotherProject); + fail("Should fail because the project's parent was modified after creation."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertEquals( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it.", + e.getMessage()); + } + } + + @Test + public void testReplaceRemovingParent() { + rpc.create(PROJECT_WITH_PARENT); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + PROJECT_WITH_PARENT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project's parent was unset."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertEquals( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it.", + e.getMessage()); + } + } + + @Test + public void testUndelete() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + assertEquals( + "DELETE_REQUESTED", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + com.google.api.services.cloudresourcemanager.model.Project revivedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); + compareReadWriteFields(COMPLETE_PROJECT, revivedProject); + assertEquals("ACTIVE", revivedProject.getLifecycleState()); + try { + rpc.undelete("invalid-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testUndeleteWhenActive() { + rpc.create(COMPLETE_PROJECT); + try { + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not deleted."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("lifecycle state was not DELETE_REQUESTED")); + } + } + + @Test + public void testUndeleteWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + try { + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is in the process of being deleted."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("lifecycle state was not DELETE_REQUESTED")); + } + } + + @Test + public void testGetPolicy() { + assertNull(rpc.getPolicy("nonexistent-project")); + rpc.create(PARTIAL_PROJECT); + com.google.api.services.cloudresourcemanager.model.Policy policy = + rpc.getPolicy(PARTIAL_PROJECT.getProjectId()); + assertEquals(Collections.emptyList(), policy.getBindings()); + assertNotNull(policy.getEtag()); + } + + @Test + public void testReplacePolicy() { + try { + rpc.replacePolicy("nonexistent-project", POLICY); + fail("Project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("project was not found")); + } + rpc.create(PARTIAL_PROJECT); + com.google.api.services.cloudresourcemanager.model.Policy invalidPolicy = + new com.google.api.services.cloudresourcemanager.model.Policy().setEtag("wrong-etag"); + try { + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), invalidPolicy); + fail("Invalid etag."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("Policy etag mismatch")); + } + String originalEtag = rpc.getPolicy(PARTIAL_PROJECT.getProjectId()).getEtag(); + com.google.api.services.cloudresourcemanager.model.Policy newPolicy = + rpc.replacePolicy(PARTIAL_PROJECT.getProjectId(), POLICY); + assertEquals(POLICY.getBindings(), newPolicy.getBindings()); + assertNotNull(newPolicy.getEtag()); + assertNotEquals(originalEtag, newPolicy.getEtag()); + } + + @Test + public void testTestPermissions() { + List permissions = ImmutableList.of("resourcemanager.projects.get"); + try { + rpc.testPermissions("nonexistent-project", permissions); + fail("Nonexistent project."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertEquals("Project nonexistent-project not found.", e.getMessage()); + } + rpc.create(PARTIAL_PROJECT); + try { + rpc.testPermissions(PARTIAL_PROJECT.getProjectId(), ImmutableList.of("get")); + fail("Invalid permission."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertEquals("Invalid permission: get", e.getMessage()); + } + assertEquals(ImmutableList.of(true), + rpc.testPermissions(PARTIAL_PROJECT.getProjectId(), permissions)); + } + + @Test + public void testChangeLifecycleStatus() { + assertFalse(RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS")); + rpc.create(COMPLETE_PROJECT); + assertTrue(RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS")); + assertEquals( + "DELETE_IN_PROGRESS", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + try { + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "INVALID_STATE"); + fail("Should fail because of an invalid lifecycle state"); + } catch (IllegalArgumentException e) { + // ignore + } + } + + @Test + public void testRemoveProject() { + assertFalse(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); + rpc.create(COMPLETE_PROJECT); + assertNotNull(rpc.getPolicy(COMPLETE_PROJECT.getProjectId())); + assertTrue(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); + assertNull(rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS)); + assertNull(rpc.getPolicy(COMPLETE_PROJECT.getProjectId())); + } + + private void compareReadWriteFields( + com.google.api.services.cloudresourcemanager.model.Project expected, + com.google.api.services.cloudresourcemanager.model.Project actual) { + assertEquals(expected.getProjectId(), actual.getProjectId()); + assertEquals(expected.getName(), actual.getName()); + assertEquals(expected.getLabels(), actual.getLabels()); + assertEquals(expected.getParent(), actual.getParent()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java new file mode 100644 index 000000000000..e6d0105838b7 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/PolicyTest.java @@ -0,0 +1,79 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.Identity; +import com.google.gcloud.resourcemanager.Policy.Role; +import com.google.gcloud.resourcemanager.Policy.Role.Type; + +import org.junit.Test; + +public class PolicyTest { + + private static final Identity ALL_USERS = Identity.allUsers(); + private static final Identity ALL_AUTH_USERS = Identity.allAuthenticatedUsers(); + private static final Identity USER = Identity.user("abc@gmail.com"); + private static final Identity SERVICE_ACCOUNT = + Identity.serviceAccount("service-account@gmail.com"); + private static final Identity GROUP = Identity.group("group@gmail.com"); + private static final Identity DOMAIN = Identity.domain("google.com"); + private static final Policy SIMPLE_POLICY = Policy.builder() + .addBinding(Role.owner(), ImmutableSet.of(USER)) + .addBinding(Role.viewer(), ImmutableSet.of(ALL_USERS)) + .addBinding(Role.editor(), ImmutableSet.of(ALL_AUTH_USERS, DOMAIN)) + .addBinding(Role.rawRole("some-role"), ImmutableSet.of(SERVICE_ACCOUNT, GROUP)) + .build(); + private static final Policy FULL_POLICY = + new Policy.Builder(SIMPLE_POLICY.bindings(), "etag", 1).build(); + + @Test + public void testIamPolicyToBuilder() { + assertEquals(FULL_POLICY, FULL_POLICY.toBuilder().build()); + assertEquals(SIMPLE_POLICY, SIMPLE_POLICY.toBuilder().build()); + } + + @Test + public void testPolicyToAndFromPb() { + assertEquals(FULL_POLICY, Policy.fromPb(FULL_POLICY.toPb())); + assertEquals(SIMPLE_POLICY, Policy.fromPb(SIMPLE_POLICY.toPb())); + } + + @Test + public void testRoleType() { + assertEquals(Type.OWNER, Role.owner().type()); + assertEquals(Type.EDITOR, Role.editor().type()); + assertEquals(Type.VIEWER, Role.viewer().type()); + assertNull(Role.rawRole("raw-role").type()); + } + + @Test + public void testEquals() { + Policy copy = Policy.builder() + .addBinding(Role.owner(), ImmutableSet.of(USER)) + .addBinding(Role.viewer(), ImmutableSet.of(ALL_USERS)) + .addBinding(Role.editor(), ImmutableSet.of(ALL_AUTH_USERS, DOMAIN)) + .addBinding(Role.rawRole("some-role"), ImmutableSet.of(SERVICE_ACCOUNT, GROUP)) + .build(); + assertEquals(SIMPLE_POLICY, copy); + assertNotEquals(SIMPLE_POLICY, FULL_POLICY); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java new file mode 100644 index 000000000000..3aaef8047322 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.Map; + +public class ProjectInfoTest { + + private static final String PROJECT_ID = "project-id"; + private static final String NAME = "myProj"; + private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); + private static final Long PROJECT_NUMBER = 123L; + private static final Long CREATE_TIME_MILLIS = 123456789L; + private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; + private static final ProjectInfo.ResourceId PARENT = + new ProjectInfo.ResourceId("id", "organization"); + private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build(); + private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID).build(); + private static final ProjectInfo UNNAMED_PROJECT_FROM_LIST = + PARTIAL_PROJECT_INFO.toBuilder().name("Unnamed").build(); + + @Test + public void testBuilder() { + assertEquals(PROJECT_ID, FULL_PROJECT_INFO.projectId()); + assertEquals(NAME, FULL_PROJECT_INFO.name()); + assertEquals(LABELS, FULL_PROJECT_INFO.labels()); + assertEquals(PROJECT_NUMBER, FULL_PROJECT_INFO.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, FULL_PROJECT_INFO.createTimeMillis()); + assertEquals(STATE, FULL_PROJECT_INFO.state()); + + assertEquals(PROJECT_ID, PARTIAL_PROJECT_INFO.projectId()); + assertEquals(null, PARTIAL_PROJECT_INFO.name()); + assertTrue(PARTIAL_PROJECT_INFO.labels().isEmpty()); + assertEquals(null, PARTIAL_PROJECT_INFO.projectNumber()); + assertEquals(null, PARTIAL_PROJECT_INFO.createTimeMillis()); + assertEquals(null, PARTIAL_PROJECT_INFO.state()); + } + + @Test + public void testToBuilder() { + compareProjects(FULL_PROJECT_INFO, FULL_PROJECT_INFO.toBuilder().build()); + compareProjects(PARTIAL_PROJECT_INFO, PARTIAL_PROJECT_INFO.toBuilder().build()); + } + + @Test + public void testToAndFromPb() { + assertTrue(FULL_PROJECT_INFO.toPb().getCreateTime().endsWith("Z")); + compareProjects(FULL_PROJECT_INFO, ProjectInfo.fromPb(FULL_PROJECT_INFO.toPb())); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(PARTIAL_PROJECT_INFO.toPb())); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(UNNAMED_PROJECT_FROM_LIST.toPb())); + } + + @Test + public void testEquals() { + compareProjects( + FULL_PROJECT_INFO, + ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build()); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.builder(PROJECT_ID).build()); + assertNotEquals(FULL_PROJECT_INFO, PARTIAL_PROJECT_INFO); + } + + private void compareProjects(ProjectInfo expected, ProjectInfo value) { + assertEquals(expected, value); + assertEquals(expected.projectId(), value.projectId()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.labels(), value.labels()); + assertEquals(expected.projectNumber(), value.projectNumber()); + assertEquals(expected.createTimeMillis(), value.createTimeMillis()); + assertEquals(expected.state(), value.state()); + assertEquals(expected.parent(), value.parent()); + } +} + diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java new file mode 100644 index 000000000000..882ec77197f3 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java @@ -0,0 +1,223 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; + +public class ProjectTest { + private static final String PROJECT_ID = "project-id"; + private static final String NAME = "myProj"; + private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); + private static final Long PROJECT_NUMBER = 123L; + private static final Long CREATE_TIME_MILLIS = 123456789L; + private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; + private static final ProjectInfo PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .build(); + + private ResourceManager serviceMockReturnsOptions = createStrictMock(ResourceManager.class); + private ResourceManagerOptions mockOptions = createMock(ResourceManagerOptions.class); + private ResourceManager resourceManager; + private Project expectedProject; + private Project project; + + @Before + public void setUp() { + resourceManager = createStrictMock(ResourceManager.class); + } + + @After + public void tearDown() throws Exception { + verify(resourceManager); + } + + private void initializeExpectedProject(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); + expectedProject = + new Project(serviceMockReturnsOptions, new ProjectInfo.BuilderImpl(PROJECT_INFO)); + } + + private void initializeProject() { + project = new Project(resourceManager, new ProjectInfo.BuilderImpl(PROJECT_INFO)); + } + + @Test + public void testToBuilder() { + initializeExpectedProject(4); + replay(resourceManager); + compareProjects(expectedProject, expectedProject.toBuilder().build()); + } + + @Test + public void testBuilder() { + expect(resourceManager.options()).andReturn(mockOptions).times(7); + replay(resourceManager); + Project.Builder builder = + new Project.Builder(new Project(resourceManager, new ProjectInfo.BuilderImpl("wrong-id"))); + Project project = builder.projectId(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .build(); + assertEquals(PROJECT_ID, project.projectId()); + assertEquals(NAME, project.name()); + assertEquals(LABELS, project.labels()); + assertEquals(PROJECT_NUMBER, project.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, project.createTimeMillis()); + assertEquals(STATE, project.state()); + assertEquals(resourceManager.options(), project.resourceManager().options()); + assertNull(project.parent()); + ResourceId parent = new ResourceId("id", "type"); + project = project.toBuilder() + .clearLabels() + .addLabel("k3", "v3") + .addLabel("k4", "v4") + .removeLabel("k4") + .parent(parent) + .build(); + assertEquals(PROJECT_ID, project.projectId()); + assertEquals(NAME, project.name()); + assertEquals(ImmutableMap.of("k3", "v3"), project.labels()); + assertEquals(PROJECT_NUMBER, project.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, project.createTimeMillis()); + assertEquals(STATE, project.state()); + assertEquals(resourceManager.options(), project.resourceManager().options()); + assertEquals(parent, project.parent()); + } + + @Test + public void testGet() { + initializeExpectedProject(1); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(expectedProject); + replay(resourceManager); + Project loadedProject = resourceManager.get(PROJECT_INFO.projectId()); + assertEquals(expectedProject, loadedProject); + } + + @Test + public void testReload() { + initializeExpectedProject(2); + ProjectInfo newInfo = PROJECT_INFO.toBuilder().addLabel("k3", "v3").build(); + Project expectedProject = + new Project(serviceMockReturnsOptions, new ProjectInfo.BuilderImpl(newInfo)); + expect(resourceManager.options()).andReturn(mockOptions); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(expectedProject); + replay(resourceManager); + initializeProject(); + Project newProject = project.reload(); + assertEquals(expectedProject, newProject); + } + + @Test + public void testLoadNull() { + initializeExpectedProject(1); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + replay(resourceManager); + assertNull(resourceManager.get(PROJECT_INFO.projectId())); + } + + @Test + public void testReloadNull() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + replay(resourceManager); + Project reloadedProject = + new Project(resourceManager, new ProjectInfo.BuilderImpl(PROJECT_INFO)).reload(); + assertNull(reloadedProject); + } + + @Test + public void testResourceManager() { + initializeExpectedProject(1); + replay(resourceManager); + assertEquals(serviceMockReturnsOptions, expectedProject.resourceManager()); + } + + @Test + public void testDelete() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + resourceManager.delete(PROJECT_INFO.projectId()); + replay(resourceManager); + initializeProject(); + project.delete(); + } + + @Test + public void testUndelete() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + resourceManager.undelete(PROJECT_INFO.projectId()); + replay(resourceManager); + initializeProject(); + project.undelete(); + } + + @Test + public void testReplace() { + initializeExpectedProject(2); + Project expectedReplacedProject = expectedProject.toBuilder().addLabel("k3", "v3").build(); + expect(resourceManager.options()).andReturn(mockOptions).times(2); + expect(resourceManager.replace(anyObject(Project.class))).andReturn(expectedReplacedProject); + replay(resourceManager); + initializeProject(); + Project newProject = + new Project(resourceManager, new ProjectInfo.BuilderImpl(expectedReplacedProject)); + Project actualReplacedProject = newProject.replace(); + compareProjectInfos(expectedReplacedProject, actualReplacedProject); + } + + private void compareProjects(Project expected, Project value) { + assertEquals(expected, value); + compareProjectInfos(expected, value); + assertEquals(expected.resourceManager().options(), value.resourceManager().options()); + } + + private void compareProjectInfos(ProjectInfo expected, ProjectInfo value) { + assertEquals(expected.projectId(), value.projectId()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.labels(), value.labels()); + assertEquals(expected.projectNumber(), value.projectNumber()); + assertEquals(expected.createTimeMillis(), value.createTimeMillis()); + assertEquals(expected.state(), value.state()); + assertEquals(expected.parent(), value.parent()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java new file mode 100644 index 000000000000..388f38f31c35 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class ResourceManagerExceptionTest { + + @Test + public void testResourceManagerException() { + ResourceManagerException exception = new ResourceManagerException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(429, "message"); + assertEquals(429, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(403, "message"); + assertEquals(403, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new ResourceManagerException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + ResourceManagerException cause = new ResourceManagerException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + ResourceManagerException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java new file mode 100644 index 000000000000..a69880c5d064 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -0,0 +1,446 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Identity; +import com.google.gcloud.Page; +import com.google.gcloud.resourcemanager.Policy.Role; +import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; +import com.google.gcloud.resourcemanager.ResourceManager.Permission; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectField; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectGetOption; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectListOption; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpc; +import com.google.gcloud.resourcemanager.spi.ResourceManagerRpcFactory; +import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; + +import org.easymock.EasyMock; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class ResourceManagerImplTest { + + private static final LocalResourceManagerHelper RESOURCE_MANAGER_HELPER = + LocalResourceManagerHelper.create(); + private static final ResourceManager RESOURCE_MANAGER = + RESOURCE_MANAGER_HELPER.options().service(); + private static final ProjectGetOption GET_FIELDS = + ProjectGetOption.fields(ProjectField.NAME, ProjectField.CREATE_TIME); + private static final ProjectListOption LIST_FIELDS = + ProjectListOption.fields(ProjectField.NAME, ProjectField.LABELS); + private static final ProjectListOption LIST_FILTER = + ProjectListOption.filter("id:* name:myProject labels.color:blue LABELS.SIZE:*"); + private static final ProjectInfo PARTIAL_PROJECT = ProjectInfo.builder("partial-project").build(); + private static final ResourceId PARENT = new ResourceId("id", "type"); + private static final ProjectInfo COMPLETE_PROJECT = ProjectInfo.builder("complete-project") + .name("name") + .labels(ImmutableMap.of("k1", "v1")) + .parent(PARENT) + .build(); + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + private static final Policy POLICY = Policy.builder() + .addBinding(Role.owner(), Identity.user("me@gmail.com")) + .addBinding(Role.editor(), Identity.serviceAccount("serviceaccount@gmail.com")) + .build(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @BeforeClass + public static void beforeClass() { + RESOURCE_MANAGER_HELPER.start(); + } + + @Before + public void setUp() { + clearProjects(); + } + + private void clearProjects() { + for (Project project : RESOURCE_MANAGER.list().values()) { + RESOURCE_MANAGER_HELPER.removeProject(project.projectId()); + } + } + + @AfterClass + public static void afterClass() { + RESOURCE_MANAGER_HELPER.stop(); + } + + private void compareReadWriteFields(ProjectInfo expected, ProjectInfo actual) { + assertEquals(expected.projectId(), actual.projectId()); + assertEquals(expected.name(), actual.name()); + assertEquals(expected.labels(), actual.labels()); + assertEquals(expected.parent(), actual.parent()); + } + + @Test + public void testCreate() { + Project returnedProject = RESOURCE_MANAGER.create(PARTIAL_PROJECT); + compareReadWriteFields(PARTIAL_PROJECT, returnedProject); + assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertNull(returnedProject.name()); + assertNull(returnedProject.parent()); + assertNotNull(returnedProject.projectNumber()); + assertNotNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + try { + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + fail("Should fail, project already exists."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("A project with the same project ID") + && e.getMessage().endsWith("already exists.")); + } + returnedProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertNotNull(returnedProject.projectNumber()); + assertNotNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testDelete() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); + assertEquals(ProjectInfo.State.DELETE_REQUESTED, + RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); + try { + RESOURCE_MANAGER.delete("some-nonexistant-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("not found.")); + } + } + + @Test + public void testGet() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.projectId()); + assertNull(RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId())); + } + + @Test + public void testGetWithOptions() { + Project originalProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId(), GET_FIELDS); + assertFalse(COMPLETE_PROJECT.equals(returnedProject)); + assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); + assertEquals(originalProject.createTimeMillis(), returnedProject.createTimeMillis()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertTrue(returnedProject.labels().isEmpty()); + assertEquals(RESOURCE_MANAGER, originalProject.resourceManager()); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testList() { + Page projects = RESOURCE_MANAGER.list(); + assertFalse(projects.values().iterator().hasNext()); + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + for (Project p : RESOURCE_MANAGER.list().values()) { + if (p.projectId().equals(PARTIAL_PROJECT.projectId())) { + compareReadWriteFields(PARTIAL_PROJECT, p); + } else if (p.projectId().equals(COMPLETE_PROJECT.projectId())) { + compareReadWriteFields(COMPLETE_PROJECT, p); + } else { + fail("Some unexpected project returned by list."); + } + assertSame(RESOURCE_MANAGER, p.resourceManager()); + } + } + + @Test + public void testListPaging() { + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Page page = RESOURCE_MANAGER.list(ProjectListOption.pageSize(1)); + assertNotNull(page.nextPageCursor()); + Iterator iterator = page.values().iterator(); + compareReadWriteFields(COMPLETE_PROJECT, iterator.next()); + assertFalse(iterator.hasNext()); + page = page.nextPage(); + iterator = page.values().iterator(); + compareReadWriteFields(PARTIAL_PROJECT, iterator.next()); + assertFalse(iterator.hasNext()); + assertNull(page.nextPageCursor()); + } + + @Test + public void testListFieldOptions() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Page projects = RESOURCE_MANAGER.list(LIST_FIELDS); + Project returnedProject = projects.iterateAll().next(); + assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); + assertEquals(COMPLETE_PROJECT.labels(), returnedProject.labels()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testListPagingWithFieldOptions() { + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Page projects = RESOURCE_MANAGER.list(LIST_FIELDS, ProjectListOption.pageSize(1)); + assertNotNull(projects.nextPageCursor()); + Iterator iterator = projects.values().iterator(); + Project returnedProject = iterator.next(); + assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); + assertEquals(COMPLETE_PROJECT.labels(), returnedProject.labels()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + assertFalse(iterator.hasNext()); + projects = projects.nextPage(); + iterator = projects.values().iterator(); + returnedProject = iterator.next(); + assertEquals(PARTIAL_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(PARTIAL_PROJECT.name(), returnedProject.name()); + assertEquals(PARTIAL_PROJECT.labels(), returnedProject.labels()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + assertFalse(iterator.hasNext()); + assertNull(projects.nextPageCursor()); + } + + @Test + public void testListFilterOptions() { + ProjectInfo matchingProject = ProjectInfo.builder("matching-project") + .name("MyProject") + .labels(ImmutableMap.of("color", "blue", "size", "big")) + .build(); + ProjectInfo nonMatchingProject1 = ProjectInfo.builder("non-matching-project1") + .name("myProject") + .labels(ImmutableMap.of("color", "blue")) + .build(); + ProjectInfo nonMatchingProject2 = ProjectInfo.builder("non-matching-project2") + .name("myProj") + .labels(ImmutableMap.of("color", "blue", "size", "big")) + .build(); + ProjectInfo nonMatchingProject3 = ProjectInfo.builder("non-matching-project3").build(); + RESOURCE_MANAGER.create(matchingProject); + RESOURCE_MANAGER.create(nonMatchingProject1); + RESOURCE_MANAGER.create(nonMatchingProject2); + RESOURCE_MANAGER.create(nonMatchingProject3); + for (Project p : RESOURCE_MANAGER.list(LIST_FILTER).values()) { + assertFalse(p.equals(nonMatchingProject1)); + assertFalse(p.equals(nonMatchingProject2)); + compareReadWriteFields(matchingProject, p); + assertSame(RESOURCE_MANAGER, p.resourceManager()); + } + } + + @Test + public void testReplace() { + ProjectInfo createdProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Map newLabels = ImmutableMap.of("new k1", "new v1"); + ProjectInfo anotherCompleteProject = ProjectInfo.builder(COMPLETE_PROJECT.projectId()) + .labels(newLabels) + .projectNumber(987654321L) + .createTimeMillis(230682061315L) + .state(ProjectInfo.State.DELETE_REQUESTED) + .parent(createdProject.parent()) + .build(); + Project returnedProject = RESOURCE_MANAGER.replace(anotherCompleteProject); + compareReadWriteFields(anotherCompleteProject, returnedProject); + assertEquals(createdProject.projectNumber(), returnedProject.projectNumber()); + assertEquals(createdProject.createTimeMillis(), returnedProject.createTimeMillis()); + assertEquals(createdProject.state(), returnedProject.state()); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + ProjectInfo nonexistantProject = + ProjectInfo.builder("some-project-id-that-does-not-exist").build(); + try { + RESOURCE_MANAGER.replace(nonexistantProject); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testUndelete() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); + assertEquals( + ProjectInfo.State.DELETE_REQUESTED, + RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); + RESOURCE_MANAGER.undelete(COMPLETE_PROJECT.projectId()); + ProjectInfo revivedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + compareReadWriteFields(COMPLETE_PROJECT, revivedProject); + assertEquals(ProjectInfo.State.ACTIVE, revivedProject.state()); + try { + RESOURCE_MANAGER.undelete("invalid-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testGetPolicy() { + assertNull(RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.projectId())); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.replacePolicy(COMPLETE_PROJECT.projectId(), POLICY); + Policy retrieved = RESOURCE_MANAGER.getPolicy(COMPLETE_PROJECT.projectId()); + assertEquals(POLICY.bindings(), retrieved.bindings()); + assertNotNull(retrieved.etag()); + assertEquals(0, retrieved.version().intValue()); + } + + @Test + public void testReplacePolicy() { + try { + RESOURCE_MANAGER.replacePolicy("nonexistent-project", POLICY); + fail("Project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().endsWith("project was not found.")); + } + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + Policy oldPolicy = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.projectId()); + RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), POLICY); + try { + RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), oldPolicy); + fail("Policy with an invalid etag didn't cause error."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().contains("Policy etag mismatch")); + } + String originalEtag = RESOURCE_MANAGER.getPolicy(PARTIAL_PROJECT.projectId()).etag(); + Policy newPolicy = RESOURCE_MANAGER.replacePolicy(PARTIAL_PROJECT.projectId(), POLICY); + assertEquals(POLICY.bindings(), newPolicy.bindings()); + assertNotNull(newPolicy.etag()); + assertNotEquals(originalEtag, newPolicy.etag()); + } + + @Test + public void testTestPermissions() { + List permissions = ImmutableList.of(Permission.GET); + try { + RESOURCE_MANAGER.testPermissions("nonexistent-project", permissions); + fail("Nonexistent project"); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertEquals("Project nonexistent-project not found.", e.getMessage()); + } + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + assertEquals(ImmutableList.of(true), + RESOURCE_MANAGER.testPermissions(PARTIAL_PROJECT.projectId(), permissions)); + assertEquals(ImmutableList.of(true, true), RESOURCE_MANAGER.testPermissions( + PARTIAL_PROJECT.projectId(), Permission.DELETE, Permission.GET)); + } + + @Test + public void testRetryableException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = ResourceManagerOptions.builder() + .serviceRpcFactory(rpcFactoryMock) + .build() + .service(); + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new ResourceManagerException(500, "Internal Error")) + .andReturn(PARTIAL_PROJECT.toPb()); + EasyMock.replay(resourceManagerRpcMock); + Project returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + assertEquals( + new Project(resourceManagerMock, new ProjectInfo.BuilderImpl(PARTIAL_PROJECT)), + returnedProject); + } + + @Test + public void testNonRetryableException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = ResourceManagerOptions.builder() + .serviceRpcFactory(rpcFactoryMock) + .build() + .service(); + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new ResourceManagerException( + 403, "Project " + PARTIAL_PROJECT.projectId() + " not found.")) + .once(); + EasyMock.replay(resourceManagerRpcMock); + thrown.expect(ResourceManagerException.class); + thrown.expectMessage("Project " + PARTIAL_PROJECT.projectId() + " not found."); + resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + } + + @Test + public void testRuntimeException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = + ResourceManagerOptions.builder().serviceRpcFactory(rpcFactoryMock).build().service(); + String exceptionMessage = "Artificial runtime exception"; + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new RuntimeException(exceptionMessage)); + EasyMock.replay(resourceManagerRpcMock); + thrown.expect(ResourceManagerException.class); + thrown.expectMessage(exceptionMessage); + resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java new file mode 100644 index 000000000000..f71f5d7989d6 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java @@ -0,0 +1,99 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.Identity; +import com.google.gcloud.PageImpl; +import com.google.gcloud.RetryParams; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.util.Collections; + +public class SerializationTest { + +private static final ResourceManager RESOURCE_MANAGER = + ResourceManagerOptions.defaultInstance().service(); + private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder("id1").build(); + private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder("id") + .name("name") + .labels(ImmutableMap.of("key", "value")) + .projectNumber(123L) + .state(ProjectInfo.State.ACTIVE) + .createTimeMillis(1234L) + .build(); + private static final Project PROJECT = + new Project(RESOURCE_MANAGER, new ProjectInfo.BuilderImpl(FULL_PROJECT_INFO)); + private static final PageImpl PAGE_RESULT = + new PageImpl<>(null, "c", Collections.singletonList(PROJECT)); + private static final ResourceManager.ProjectGetOption PROJECT_GET_OPTION = + ResourceManager.ProjectGetOption.fields(ResourceManager.ProjectField.NAME); + private static final ResourceManager.ProjectListOption PROJECT_LIST_OPTION = + ResourceManager.ProjectListOption.filter("name:*"); + private static final Policy POLICY = Policy.builder() + .addBinding(Policy.Role.viewer(), ImmutableSet.of(Identity.user("abc@gmail.com"))) + .build(); + + @Test + public void testServiceOptions() throws Exception { + ResourceManagerOptions options = ResourceManagerOptions.builder().build(); + ResourceManagerOptions serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + options = options.toBuilder() + .projectId("some-unnecessary-project-ID") + .retryParams(RetryParams.defaultInstance()) + .build(); + serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + } + + @Test + public void testModelAndRequests() throws Exception { + Serializable[] objects = {PARTIAL_PROJECT_INFO, FULL_PROJECT_INFO, PROJECT, PAGE_RESULT, + PROJECT_GET_OPTION, PROJECT_LIST_OPTION, POLICY}; + for (Serializable obj : objects) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @SuppressWarnings("unchecked") + private T serializeAndDeserialize(T obj) throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-storage/README.md b/gcloud-java-storage/README.md index 717fd1f1f3e4..0ee05b31c10c 100644 --- a/gcloud-java-storage/README.md +++ b/gcloud-java-storage/README.md @@ -6,6 +6,8 @@ Java idiomatic client for [Google Cloud Storage] (https://cloud.google.com/stora [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-storage.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-storage.svg) +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java) +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969) - [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) - [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html) @@ -15,19 +17,27 @@ Java idiomatic client for [Google Cloud Storage] (https://cloud.google.com/stora Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-storage - 0.0.10 + 0.1.5 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-storage:0.1.5' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.1.5" +``` Example Application ------------------- -[`StorageExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java) is a simple command line interface that provides some of Cloud Storage's functionality. Read more about using the application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/StorageExample.html). +[`StorageExample`](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/StorageExample.java) is a simple command line interface that provides some of Cloud Storage's functionality. Read more about using the application on the [`StorageExample` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/storage/StorageExample.html). Authentication -------------- @@ -48,32 +58,102 @@ Cloud Storage for your project. See the ``gcloud-java`` API [storage documentation][storage-api] to learn how to interact with the Cloud Storage using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Storage JSON API enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to use Google Cloud Storage. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-storage` library. See the [Quickstart](#quickstart) section to add `gcloud-java-storage` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Storage, you must create a service object with credentials. You can then make API calls by calling methods on the Storage service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: + +```java +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +Storage storage = StorageOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Stored objects are called "blobs" in `gcloud-java` and are organized into containers called "buckets". `Blob`, a subclass of `BlobInfo`, adds a layer of service-related functionality over `BlobInfo`. Similarly, `Bucket` adds a layer of service-related functionality over `BucketInfo`. In this code snippet, we will create a new bucket and upload a blob to that bucket. + +Add the following imports at the top of your file: ```java import static java.nio.charset.StandardCharsets.UTF_8; import com.google.gcloud.storage.Blob; -import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageOptions; +import com.google.gcloud.storage.Bucket; +import com.google.gcloud.storage.BucketInfo; +``` + +Then add the following code to create a bucket and upload a simple blob. + +*Important: Bucket names have to be globally unique. If you choose a bucket name that already exists, you'll get a helpful error message telling you to choose another name. In the code below, replace "my_unique_bucket" with a unique bucket name. See more about naming rules [here](https://cloud.google.com/storage/docs/bucket-naming?hl=en#requirements).* -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; - -Storage storage = StorageOptions.getDefaultInstance().service(); -Blob blob = new Blob(storage, "bucket", "blob_name"); -if (!blob.exists()) { - storage2.create(blob.info(), "Hello, Cloud Storage!".getBytes(UTF_8)); -} else { - System.out.println("Updating content for " + blob.info().name()); - byte[] prevContent = blob.content(); - System.out.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); +```java +// Create a bucket +String bucketName = "my_unique_bucket"; // Change this to something unique +Bucket bucket = storage.create(BucketInfo.of(bucketName)); + +// Upload a blob to the newly created bucket +BlobId blobId = BlobId.of(bucketName, "my_blob_name"); +Blob blob = storage.create( + "my_blob_name", "a simple blob".getBytes(UTF_8), "text/plain"); +``` + +At this point, you will be able to see your newly created bucket and blob on the Google Developers Console. + +#### Retrieving data +Now that we have content uploaded to the server, we can see how to read data from the server. Add the following line to your program to get back the blob we uploaded. + +```java +String blobContent = new String(blob.content(), UTF_8); +``` + +#### Listing buckets and contents of buckets +Suppose that you've added more buckets and blobs, and now you want to see the names of your buckets and the contents of each one. Add the following imports: + +```java +import java.util.Iterator; +``` + +Then add the following code to list all your buckets and all the blobs inside your newly created bucket. + +```java +// List all your buckets +Iterator bucketIterator = storage.list().iterateAll(); +System.out.println("My buckets:"); +while (bucketIterator.hasNext()) { + System.out.println(bucketIterator.next()); +} + +// List the blobs in a particular bucket +Iterator blobIterator = bucket.list().iterateAll(); +System.out.println("My blobs:"); +while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); } ``` +#### Complete source code + +In +[CreateAndListBucketsAndBlobs.java](../gcloud-java-examples/src/main/java/com/google/gcloud/examples/storage/snippets/CreateAndListBucketsAndBlobs.java) +we put together all the code shown above into one program. The program assumes that you are +running on Compute Engine or from your own desktop. To run the example on App Engine, simply move +the code from the main method to your application's servlet class and change the print statements to +display on your webpage. + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -100,7 +180,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -109,6 +191,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-storage [cloud-platform]: https://cloud.google.com/ @@ -117,3 +200,4 @@ Apache 2.0 - See [LICENSE] for more information. [cloud-storage-docs]: https://cloud.google.com/storage/docs/overview [cloud-storage-create-bucket]: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets [storage-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html +[cloud-storage-activation]:https://cloud.google.com/storage/docs/signup?hl=en diff --git a/gcloud-java-storage/pom.xml b/gcloud-java-storage/pom.xml index 82b67277f4fe..d5f0f6d98660 100644 --- a/gcloud-java-storage/pom.xml +++ b/gcloud-java-storage/pom.xml @@ -1,7 +1,6 @@ 4.0.0 - com.google.gcloud gcloud-java-storage jar GCloud Java storage @@ -11,7 +10,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.6-SNAPSHOT gcloud-java-storage @@ -25,13 +24,17 @@ com.google.apis google-api-services-storage - v1-rev33-1.20.0 + v1-rev62-1.21.0 compile - - com.google.guava - guava-jdk5 - + + com.google.guava + guava-jdk5 + + + com.google.api-client + google-api-client + @@ -43,7 +46,7 @@ org.easymock easymock - 3.3 + 3.4 test diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java index 3d9731352400..4203d79351b7 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java @@ -23,7 +23,10 @@ import java.util.Objects; /** - * Access Control List on for buckets or blobs. + * Access Control List for buckets or blobs. + * + * @see + * About Access Control Lists */ public final class Acl implements Serializable { @@ -36,6 +39,9 @@ public enum Role { OWNER, READER, WRITER } + /** + * Base class for Access Control List entities. + */ public abstract static class Entity implements Serializable { private static final long serialVersionUID = -2707407252771255840L; @@ -52,23 +58,29 @@ public enum Type { this.value = value; } + /** + * Returns the type of entity. + */ public Type type() { return type; } + /** + * Returns the entity's value. + */ protected String value() { return value; } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object obj) { + if (this == obj) { return true; } - if (o == null || getClass() != o.getClass()) { + if (obj == null || getClass() != obj.getClass()) { return false; } - Entity entity = (Entity) o; + Entity entity = (Entity) obj; return Objects.equals(type, entity.type) && Objects.equals(value, entity.value); } @@ -112,42 +124,75 @@ static Entity fromPb(String entity) { } } + /** + * Class for ACL Domain entities. + */ public static final class Domain extends Entity { private static final long serialVersionUID = -3033025857280447253L; + /** + * Creates a domain entity. + * + * @param domain the domain associated to this entity + */ public Domain(String domain) { super(Type.DOMAIN, domain); } + /** + * Returns the domain associated to this entity. + */ public String domain() { return value(); } } + /** + * Class for ACL Group entities. + */ public static final class Group extends Entity { private static final long serialVersionUID = -1660987136294408826L; + /** + * Creates a group entity. + * + * @param email the group email + */ public Group(String email) { super(Type.GROUP, email); } + /** + * Returns the group email. + */ public String email() { return value(); } } + /** + * Class for ACL User entities. + */ public static final class User extends Entity { private static final long serialVersionUID = 3076518036392737008L; private static final String ALL_USERS = "allUsers"; private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers"; + /** + * Creates a user entity. + * + * @param email the user email + */ public User(String email) { super(Type.USER, email); } + /** + * Returns the user email. + */ public String email() { return value(); } @@ -174,27 +219,42 @@ public static User ofAllAuthenticatedUsers() { } } + /** + * Class for ACL Project entities. + */ public static final class Project extends Entity { private static final long serialVersionUID = 7933776866530023027L; - private final ProjectRole pRole; + private final ProjectRole projectRole; private final String projectId; public enum ProjectRole { OWNERS, EDITORS, VIEWERS } - public Project(ProjectRole pRole, String projectId) { - super(Type.PROJECT, pRole.name().toLowerCase() + "-" + projectId); - this.pRole = pRole; + /** + * Creates a project entity. + * + * @param projectRole a role in the project, used to select project's teams + * @param projectId id of the project + */ + public Project(ProjectRole projectRole, String projectId) { + super(Type.PROJECT, projectRole.name().toLowerCase() + "-" + projectId); + this.projectRole = projectRole; this.projectId = projectId; } + /** + * Returns the role in the project for this entity. + */ public ProjectRole projectRole() { - return pRole; + return projectRole; } + /** + * Returns the project id for this entity. + */ public String projectId() { return projectId; } @@ -214,19 +274,35 @@ String toPb() { } } - public Acl(Entity entity, Role role) { + private Acl(Entity entity, Role role) { this.entity = entity; this.role = role; } + /** + * Returns the entity for this ACL object. + */ public Entity entity() { return entity; } + /** + * Returns the role associated to the entity in this ACL object. + */ public Role role() { return role; } + /** + * Returns an Acl object. + * + * @param entity the entity for this ACL object + * @param role the role to associate to the {@code entity} object + */ + public static Acl of(Entity entity, Role role) { + return new Acl(entity, role); + } + @Override public int hashCode() { return Objects.hash(entity, role); @@ -261,11 +337,11 @@ ObjectAccessControl toObjectPb() { static Acl fromPb(ObjectAccessControl objectAccessControl) { Role role = Role.valueOf(objectAccessControl.getRole()); - return new Acl(Entity.fromPb(objectAccessControl.getEntity()), role); + return Acl.of(Entity.fromPb(objectAccessControl.getEntity()), role); } static Acl fromPb(BucketAccessControl bucketAccessControl) { Role role = Role.valueOf(bucketAccessControl.getRole()); - return new Acl(Entity.fromPb(bucketAccessControl.getEntity()), role); + return Acl.of(Entity.fromPb(bucketAccessControl.getEntity()), role); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java deleted file mode 100644 index fdcd84705555..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import java.io.Serializable; -import java.util.Collections; -import java.util.Iterator; -import java.util.Objects; - -/** - * Base implementation for Google Cloud storage list result. - */ -public class BaseListResult implements ListResult, Serializable { - - private static final long serialVersionUID = -6937287874908527950L; - - private final String cursor; - private final Iterable results; - private final NextPageFetcher pageFetcher; - - public interface NextPageFetcher extends Serializable { - ListResult nextPage(); - } - - public BaseListResult(NextPageFetcher pageFetcher, String cursor, Iterable results) { - this.pageFetcher = pageFetcher; - this.cursor = cursor; - this.results = results; - } - - @Override - public String nextPageCursor() { - return cursor; - } - - @Override - public ListResult nextPage() { - if (cursor == null || pageFetcher == null) { - return null; - } - return pageFetcher.nextPage(); - } - - @Override - public Iterator iterator() { - return results == null ? Collections.emptyIterator() : results.iterator(); - } - - @Override - public int hashCode() { - return Objects.hash(cursor, results); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BaseListResult)) { - return false; - } - BaseListResult other = (BaseListResult) obj; - return Objects.equals(cursor, other.cursor) - && Objects.equals(results, other.results); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java index 6e815648497a..bf77c731754e 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BlobSourceOption; import com.google.gcloud.storage.Storage.BlobTargetOption; @@ -35,13 +36,13 @@ public final class BatchRequest implements Serializable { private final Map> toDelete; private final Map> toUpdate; - private final Map> toGet; + private final Map> toGet; public static class Builder { private Map> toDelete = new LinkedHashMap<>(); private Map> toUpdate = new LinkedHashMap<>(); - private Map> toGet = new LinkedHashMap<>(); + private Map> toGet = new LinkedHashMap<>(); private Builder() {} @@ -72,7 +73,7 @@ public Builder update(BlobInfo blobInfo, BlobTargetOption... options) { /** * Retrieve metadata for the given blob. */ - public Builder get(String bucket, String blob, BlobSourceOption... options) { + public Builder get(String bucket, String blob, BlobGetOption... options) { toGet.put(BlobId.of(bucket, blob), Lists.newArrayList(options)); return this; } @@ -80,7 +81,7 @@ public Builder get(String bucket, String blob, BlobSourceOption... options) { /** * Retrieve metadata for the given blob. */ - public Builder get(BlobId blob, BlobSourceOption... options) { + public Builder get(BlobId blob, BlobGetOption... options) { toGet.put(blob, Lists.newArrayList(options)); return this; } @@ -120,7 +121,7 @@ public Map> toUpdate() { return toUpdate; } - public Map> toGet() { + public Map> toGet() { return toGet; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java index 9ac799e74a15..fe5f6f5743c8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java @@ -31,8 +31,8 @@ public final class BatchResponse implements Serializable { private static final long serialVersionUID = 1057416839397037706L; private final List> deleteResult; - private final List> updateResult; - private final List> getResult; + private final List> updateResult; + private final List> getResult; public static class Result implements Serializable { @@ -113,8 +113,8 @@ static Result empty() { } } - public BatchResponse(List> deleteResult, List> updateResult, - List> getResult) { + BatchResponse(List> deleteResult, List> updateResult, + List> getResult) { this.deleteResult = ImmutableList.copyOf(deleteResult); this.updateResult = ImmutableList.copyOf(updateResult); this.getResult = ImmutableList.copyOf(getResult); @@ -146,14 +146,14 @@ public List> deletes() { /** * Returns the results for the update operations using the request order. */ - public List> updates() { + public List> updates() { return updateResult; } /** * Returns the results for the get operations using the request order. */ - public List> gets() { + public List> gets() { return getResult; } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index 8f988922aad9..b6f668dada82 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -16,38 +16,58 @@ package com.google.gcloud.storage; -import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.gcloud.storage.Blob.BlobSourceOption.convert; +import static com.google.gcloud.storage.Blob.BlobSourceOption.toGetOptions; +import static com.google.gcloud.storage.Blob.BlobSourceOption.toSourceOptions; +import com.google.api.services.storage.model.StorageObject; import com.google.common.base.Function; -import com.google.common.collect.Lists; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; +import java.io.IOException; +import java.io.ObjectInputStream; import java.net.URL; -import java.util.Collections; +import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; /** * A Google cloud storage object. * - *

    - * Objects of this class are immutable. Operations that modify the blob like {@link #update} and + *

    Objects of this class are immutable. Operations that modify the blob like {@link #update} and * {@link #copyTo} return a new object. To get a {@code Blob} object with the most recent - * information use {@link #reload}. + * information use {@link #reload}. {@code Blob} adds a layer of service-related functionality over + * {@link BlobInfo}. *

    */ -public final class Blob { +public class Blob extends BlobInfo { - private final Storage storage; - private final BlobInfo info; + private static final long serialVersionUID = -6806832496717441434L; + private final StorageOptions options; + private transient Storage storage; + + static final Function, Blob> BLOB_FROM_PB_FUNCTION = + new Function, Blob>() { + @Override + public Blob apply(Tuple pb) { + return Blob.fromPb(pb.x(), pb.y()); + } + }; + + /** + * Class for specifying blob source options when {@code Blob} methods are used. + */ public static class BlobSourceOption extends Option { private static final long serialVersionUID = 214616862061934846L; @@ -56,7 +76,7 @@ private BlobSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } - private Storage.BlobSourceOption convert(BlobInfo blobInfo) { + private Storage.BlobSourceOption toSourceOptions(BlobInfo blobInfo) { switch (rpcOption()) { case IF_GENERATION_MATCH: return Storage.BlobSourceOption.generationMatch(blobInfo.generation()); @@ -71,84 +91,222 @@ private Storage.BlobSourceOption convert(BlobInfo blobInfo) { } } + private Storage.BlobGetOption toGetOption(BlobInfo blobInfo) { + switch (rpcOption()) { + case IF_GENERATION_MATCH: + return Storage.BlobGetOption.generationMatch(blobInfo.generation()); + case IF_GENERATION_NOT_MATCH: + return Storage.BlobGetOption.generationNotMatch(blobInfo.generation()); + case IF_METAGENERATION_MATCH: + return Storage.BlobGetOption.metagenerationMatch(blobInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BlobGetOption.metagenerationNotMatch(blobInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + /** + * Returns an option for blob's generation match. If this option is used the request will fail + * if generation does not match. + */ public static BlobSourceOption generationMatch() { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's generation mismatch. If this option is used the request will + * fail if generation matches. + */ public static BlobSourceOption generationNotMatch() { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobSourceOption metagenerationMatch() { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobSourceOption metagenerationNotMatch() { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } - static Storage.BlobSourceOption[] convert(BlobInfo blobInfo, BlobSourceOption... options) { + static Storage.BlobSourceOption[] toSourceOptions(BlobInfo blobInfo, + BlobSourceOption... options) { Storage.BlobSourceOption[] convertedOptions = new Storage.BlobSourceOption[options.length]; int index = 0; for (BlobSourceOption option : options) { - convertedOptions[index++] = option.convert(blobInfo); + convertedOptions[index++] = option.toSourceOptions(blobInfo); } return convertedOptions; } - } - /** - * Constructs a {@code Blob} object for the provided {@code BlobInfo}. The storage service is used - * to issue requests. - * - * @param storage the storage service used for issuing requests - * @param info blob's info - */ - public Blob(Storage storage, BlobInfo info) { - this.storage = checkNotNull(storage); - this.info = checkNotNull(info); + static Storage.BlobGetOption[] toGetOptions(BlobInfo blobInfo, BlobSourceOption... options) { + Storage.BlobGetOption[] convertedOptions = new Storage.BlobGetOption[options.length]; + int index = 0; + for (BlobSourceOption option : options) { + convertedOptions[index++] = option.toGetOption(blobInfo); + } + return convertedOptions; + } } /** - * Creates a {@code Blob} object for the provided bucket and blob names. Performs an RPC call to - * get the latest blob information. - * - * @param storage the storage service used for issuing requests - * @param bucket bucket's name - * @param blob blob's name - * @return the {@code Blob} object or {@code null} if not found. - * @throws StorageException upon failure + * Builder for {@code Blob}. */ - public static Blob load(Storage storage, String bucket, String blob) { - return load(storage, BlobId.of(bucket, blob)); - } + public static class Builder extends BlobInfo.Builder { - /** - * Creates a {@code Blob} object for the provided {@code blobId}. Performs an RPC call to get the - * latest blob information. - * - * @param storage the storage service used for issuing requests - * @param blobId blob's identifier - * @return the {@code Blob} object or {@code null} if not found. - * @throws StorageException upon failure - */ - public static Blob load(Storage storage, BlobId blobId) { - BlobInfo info = storage.get(blobId); - return info != null ? new Blob(storage, info) : null; - } + private final Storage storage; + private final BlobInfo.BuilderImpl infoBuilder; - /** - * Returns the blob's information. - */ - public BlobInfo info() { - return info; + Builder(Blob blob) { + this.storage = blob.storage(); + this.infoBuilder = new BlobInfo.BuilderImpl(blob); + } + + @Override + public Builder blobId(BlobId blobId) { + infoBuilder.blobId(blobId); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + public Builder contentType(String contentType) { + infoBuilder.contentType(contentType); + return this; + } + + @Override + public Builder contentDisposition(String contentDisposition) { + infoBuilder.contentDisposition(contentDisposition); + return this; + } + + @Override + public Builder contentLanguage(String contentLanguage) { + infoBuilder.contentLanguage(contentLanguage); + return this; + } + + @Override + public Builder contentEncoding(String contentEncoding) { + infoBuilder.contentEncoding(contentEncoding); + return this; + } + + @Override + Builder componentCount(Integer componentCount) { + infoBuilder.componentCount(componentCount); + return this; + } + + @Override + public Builder cacheControl(String cacheControl) { + infoBuilder.cacheControl(cacheControl); + return this; + } + + @Override + public Builder acl(List acl) { + infoBuilder.acl(acl); + return this; + } + + @Override + Builder owner(Acl.Entity owner) { + infoBuilder.owner(owner); + return this; + } + + @Override + Builder size(Long size) { + infoBuilder.size(size); + return this; + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + public Builder md5(String md5) { + infoBuilder.md5(md5); + return this; + } + + @Override + public Builder crc32c(String crc32c) { + infoBuilder.crc32c(crc32c); + return this; + } + + @Override + Builder mediaLink(String mediaLink) { + infoBuilder.mediaLink(mediaLink); + return this; + } + + @Override + public Builder metadata(Map metadata) { + infoBuilder.metadata(metadata); + return this; + } + + @Override + Builder metageneration(Long metageneration) { + infoBuilder.metageneration(metageneration); + return this; + } + + @Override + Builder deleteTime(Long deleteTime) { + infoBuilder.deleteTime(deleteTime); + return this; + } + + @Override + Builder updateTime(Long updateTime) { + infoBuilder.updateTime(updateTime); + return this; + } + + @Override + Builder isDirectory(boolean isDirectory) { + infoBuilder.isDirectory(isDirectory); + return this; + } + + @Override + public Blob build() { + return new Blob(storage, infoBuilder); + } } - /** - * Returns the blob's id. - */ - public BlobId id() { - return info.blobId(); + Blob(Storage storage, BlobInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.storage = checkNotNull(storage); + this.options = storage.options(); } /** @@ -159,7 +317,10 @@ public BlobId id() { * @throws StorageException upon failure */ public boolean exists(BlobSourceOption... options) { - return storage.get(info.blobId(), convert(info, options)) != null; + int length = options.length; + Storage.BlobGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); + getOptions[length] = Storage.BlobGetOption.fields(); + return storage.get(blobId(), getOptions) != null; } /** @@ -169,18 +330,18 @@ public boolean exists(BlobSourceOption... options) { * @throws StorageException upon failure */ public byte[] content(Storage.BlobSourceOption... options) { - return storage.readAllBytes(info.blobId(), options); + return storage.readAllBytes(blobId(), options); } /** - * Fetches current blob's latest information. + * Fetches current blob's latest information. Returns {@code null} if the blob does not exist. * * @param options blob read options - * @return a {@code Blob} object with latest information + * @return a {@code Blob} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Blob reload(BlobSourceOption... options) { - return new Blob(storage, storage.get(info.blobId(), convert(info, options))); + return storage.get(blobId(), toGetOptions(this, options)); } /** @@ -189,27 +350,37 @@ public Blob reload(BlobSourceOption... options) { * {@link #delete} operations. A new {@code Blob} object is returned. By default no checks are * made on the metadata generation of the current blob. If you want to update the information only * if the current blob metadata are at their latest version use the {@code metagenerationMatch} - * option: {@code blob.update(newInfo, BlobTargetOption.metagenerationMatch())}. - *

    - * Original metadata are merged with metadata in the provided {@code blobInfo}. To replace - * metadata instead you first have to unset them. Unsetting metadata can be done by setting the - * provided {@code blobInfo}'s metadata to {@code null}. + * option: {@code newBlob.update(BlobTargetOption.metagenerationMatch())}. + * + *

    Original metadata are merged with metadata in the provided in this {@code blob}. To replace + * metadata instead you first have to unset them. Unsetting metadata can be done by setting this + * {@code blob}'s metadata to {@code null}. *

    - *

    - * Example usage of replacing blob's metadata: - *

        {@code blob.update(blob.info().toBuilder().metadata(null).build());}
    -   *    {@code blob.update(blob.info().toBuilder().metadata(newMetadata).build());}
    +   *
    +   * 

    Example usage of replacing blob's metadata: + *

     {@code
    +   * blob.toBuilder().metadata(null).build().update();
    +   * blob.toBuilder().metadata(newMetadata).build().update();
    +   * }
        * 
    * - * @param blobInfo new blob's information. Bucket and blob names must match the current ones * @param options update options * @return a {@code Blob} object with updated information * @throws StorageException upon failure */ - public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { - checkArgument(Objects.equals(blobInfo.bucket(), info.bucket()), "Bucket name must match"); - checkArgument(Objects.equals(blobInfo.name(), info.name()), "Blob name must match"); - return new Blob(storage, storage.update(blobInfo, options)); + public Blob update(BlobTargetOption... options) { + return storage.update(this, options); + } + + /** + * Deletes this blob. + * + * @param options blob delete options + * @return {@code true} if blob was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + public boolean delete(BlobSourceOption... options) { + return storage.delete(blobId(), toSourceOptions(this, options)); } /** @@ -223,22 +394,14 @@ public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { * @throws StorageException upon failure */ public CopyWriter copyTo(BlobId targetBlob, BlobSourceOption... options) { - CopyRequest copyRequest = CopyRequest.builder().source(info.bucket(), info.name()) - .sourceOptions(convert(info, options)).target(targetBlob).build(); + CopyRequest copyRequest = CopyRequest.builder() + .source(bucket(), name()) + .sourceOptions(toSourceOptions(this, options)) + .target(targetBlob) + .build(); return storage.copy(copyRequest); } - /** - * Deletes this blob. - * - * @param options blob delete options - * @return true if blob was deleted - * @throws StorageException upon failure - */ - public boolean delete(BlobSourceOption... options) { - return storage.delete(info.blobId(), convert(info, options)); - } - /** * Sends a copy request for the current blob to the target bucket, preserving its name. Possibly * copying also some of the metadata (e.g. content-type). @@ -250,7 +413,7 @@ public boolean delete(BlobSourceOption... options) { * @throws StorageException upon failure */ public CopyWriter copyTo(String targetBucket, BlobSourceOption... options) { - return copyTo(targetBucket, info.name(), options); + return copyTo(targetBucket, name(), options); } /** @@ -269,42 +432,74 @@ public CopyWriter copyTo(String targetBucket, String targetBlob, BlobSourceOptio } /** - * Returns a {@code BlobReadChannel} object for reading this blob's content. + * Returns a {@code ReadChannel} object for reading this blob's content. * * @param options blob read options * @throws StorageException upon failure */ - public BlobReadChannel reader(BlobSourceOption... options) { - return storage.reader(info.blobId(), convert(info, options)); + public ReadChannel reader(BlobSourceOption... options) { + return storage.reader(blobId(), toSourceOptions(this, options)); } /** - * Returns a {@code BlobWriteChannel} object for writing to this blob. By default any md5 and + * Returns a {@code WriteChannel} object for writing to this blob. By default any md5 and * crc32c values in the current blob are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * * @param options target blob options * @throws StorageException upon failure */ - public BlobWriteChannel writer(BlobWriteOption... options) { - return storage.writer(info, options); + public WriteChannel writer(BlobWriteOption... options) { + return storage.writer(this, options); } /** - * Generates a signed URL for this blob. If you want to allow access to for a fixed amount of time - * for this blob, you can use this method to generate a URL that is only valid within a certain - * time period. This is particularly useful if you don't want publicly accessible blobs, but don't - * want to require users to explicitly log in. + * Generates a signed URL for this blob. If you want to allow access for a fixed amount of time to + * this blob, you can use this method to generate a URL that is only valid within a certain time + * period. This is particularly useful if you don't want publicly accessible blobs, but also don't + * want to require users to explicitly log in. Signing a URL requires a service account and its + * associated private key. If a {@link AuthCredentials.ServiceAccountAuthCredentials} was passed + * to {@link StorageOptions.Builder#authCredentials(AuthCredentials)} or the default credentials + * are being used and the environment variable {@code GOOGLE_APPLICATION_CREDENTIALS} is set, then + * {@code signUrl} will use that service account and associated key to sign the URL. If the + * credentials passed to {@link StorageOptions} do not expose a private key (this is the case for + * App Engine credentials, Compute Engine credentials and Google Cloud SDK credentials) then + * {@code signUrl} will throw an {@link IllegalArgumentException} unless a service account with + * associated key is passed using the {@code SignUrlOption.serviceAccount()} option. The service + * account and private key passed with {@code SignUrlOption.serviceAccount()} have priority over + * any credentials set with {@link StorageOptions.Builder#authCredentials(AuthCredentials)}. + * + *

    Example usage of creating a signed URL that is valid for 2 weeks, using the default + * credentials for signing the URL: + *

     {@code
    +   * blob.signUrl(14, TimeUnit.DAYS);
    +   * }
    + * + *

    Example usage of creating a signed URL passing the {@code SignUrlOption.serviceAccount()} + * option, that will be used for signing the URL: + *

     {@code
    +   * blob.signUrl(14, TimeUnit.DAYS, SignUrlOption.serviceAccount(
    +   *     AuthCredentials.createForJson(new FileInputStream("/path/to/key.json"))));
    +   * }
    * * @param duration time until the signed URL expires, expressed in {@code unit}. The finer * granularity supported is 1 second, finer granularities will be truncated * @param unit time unit of the {@code duration} parameter * @param options optional URL signing options * @return a signed URL for this bucket and the specified options + * @throws IllegalArgumentException if + * {@link SignUrlOption#serviceAccount(AuthCredentials.ServiceAccountAuthCredentials)} was not + * used and no service account was provided to {@link StorageOptions} + * @throws IllegalArgumentException if the key associated to the provided service account is + * invalid + * @throws IllegalArgumentException if {@link SignUrlOption#withMd5()} option is used and + * {@link #md5()} is {@code null} + * @throws IllegalArgumentException if {@link SignUrlOption#withContentType()} option is used and + * {@link #contentType()} is {@code null} * @see Signed-URLs */ public URL signUrl(long duration, TimeUnit unit, SignUrlOption... options) { - return storage.signUrl(info, duration, unit, options); + return storage.signUrl(this, duration, unit, options); } /** @@ -314,75 +509,29 @@ public Storage storage() { return storage; } - /** - * Gets the requested blobs. A batch request is used to fetch blobs. - * - * @param storage the storage service used to issue the request - * @param blobs the blobs to get - * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has - * been denied the corresponding item in the list is {@code null}. - * @throws StorageException upon failure - */ - public static List get(final Storage storage, BlobId... blobs) { - checkNotNull(storage); - checkNotNull(blobs); - if (blobs.length == 0) { - return Collections.emptyList(); - } - return Collections.unmodifiableList(Lists.transform(storage.get(blobs), - new Function() { - @Override - public Blob apply(BlobInfo f) { - return f != null ? new Blob(storage, f) : null; - } - })); + @Override + public Builder toBuilder() { + return new Builder(this); } - /** - * Updates the requested blobs. A batch request is used to update blobs. Original metadata are - * merged with metadata in the provided {@code BlobInfo} objects. To replace metadata instead - * you first have to unset them. Unsetting metadata can be done by setting the provided - * {@code BlobInfo} objects metadata to {@code null}. See - * {@link #update(com.google.gcloud.storage.BlobInfo, - * com.google.gcloud.storage.Storage.BlobTargetOption...) } for a code example. - * - * @param storage the storage service used to issue the request - * @param infos the blobs to update - * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has - * been denied the corresponding item in the list is {@code null}. - * @throws StorageException upon failure - */ - public static List update(final Storage storage, BlobInfo... infos) { - checkNotNull(storage); - checkNotNull(infos); - if (infos.length == 0) { - return Collections.emptyList(); - } - return Collections.unmodifiableList(Lists.transform(storage.update(infos), - new Function() { - @Override - public Blob apply(BlobInfo f) { - return f != null ? new Blob(storage, f) : null; - } - })); + @Override + public final boolean equals(Object obj) { + return obj instanceof Blob && Objects.equals(toPb(), ((Blob) obj).toPb()) + && Objects.equals(options, ((Blob) obj).options); } - /** - * Deletes the requested blobs. A batch request is used to delete blobs. - * - * @param storage the storage service used to issue the request - * @param blobs the blobs to delete - * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. - * @throws StorageException upon failure - */ - public static List delete(Storage storage, BlobId... blobs) { - checkNotNull(storage); - checkNotNull(blobs); - if (blobs.length == 0) { - return Collections.emptyList(); - } - return storage.delete(blobs); + @Override + public final int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.storage = options.service(); + } + + static Blob fromPb(Storage storage, StorageObject storageObject) { + BlobInfo info = BlobInfo.fromPb(storageObject); + return new Blob(storage, new BlobInfo.BuilderImpl(info)); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java index eafebe09a4cb..d30003d632db 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java @@ -25,58 +25,97 @@ import java.util.Objects; /** - * Google Storage object identifier. + * Google Storage Object identifier. A {@code BlobId} object includes the name of the containing + * bucket, the blob's name and possibly the blob's generation. If {@link #generation()} is + * {@code null} the identifier refers to the latest blob's generation. */ public final class BlobId implements Serializable { private static final long serialVersionUID = -6156002883225601925L; private final String bucket; private final String name; + private final Long generation; - private BlobId(String bucket, String name) { + private BlobId(String bucket, String name, Long generation) { this.bucket = bucket; this.name = name; + this.generation = generation; } + /** + * Returns the name of the bucket containing the blob. + */ public String bucket() { return bucket; } + /** + * Returns the name of the blob. + */ public String name() { return name; } + /** + * Returns blob's data generation. Used for versioning. + */ + public Long generation() { + return generation; + } + @Override public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .toString(); } @Override public int hashCode() { - return Objects.hash(bucket, name); + return Objects.hash(bucket, name, generation); } @Override public boolean equals(Object obj) { return obj instanceof BlobId && Objects.equals(bucket, ((BlobId) obj).bucket) - && Objects.equals(name, ((BlobId) obj).name); + && Objects.equals(name, ((BlobId) obj).name) + && Objects.equals(generation, ((BlobId) obj).generation); } StorageObject toPb() { StorageObject storageObject = new StorageObject(); storageObject.setBucket(bucket); storageObject.setName(name); + storageObject.setGeneration(generation); return storageObject; } + /** + * Creates a blob identifier. Generation is set to {@code null}. + * + * @param bucket the name of the bucket that contains the blob + * @param name the name of the blob + */ public static BlobId of(String bucket, String name) { - return new BlobId(checkNotNull(bucket), checkNotNull(name)); + return new BlobId(checkNotNull(bucket), checkNotNull(name), null); + } + + /** + * Creates a {@code BlobId} object. + * + * @param bucket name of the containing bucket + * @param name blob's name + * @param generation blob's data generation, used for versioning. If {@code null} the identifier + * refers to the latest blob's generation + */ + public static BlobId of(String bucket, String name, Long generation) { + return new BlobId(checkNotNull(bucket), checkNotNull(name), generation); } static BlobId fromPb(StorageObject storageObject) { - return BlobId.of(storageObject.getBucket(), storageObject.getName()); + return BlobId.of(storageObject.getBucket(), storageObject.getName(), + storageObject.getGeneration()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java index 01711a53613e..cf509c8f0961 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java @@ -47,22 +47,16 @@ * @see Concepts and * Terminology */ -public final class BlobInfo implements Serializable { +public class BlobInfo implements Serializable { - static final Function FROM_PB_FUNCTION = - new Function() { - @Override - public BlobInfo apply(StorageObject pb) { - return BlobInfo.fromPb(pb); - } - }; - static final Function TO_PB_FUNCTION = + static final Function INFO_TO_PB_FUNCTION = new Function() { @Override public StorageObject apply(BlobInfo blobInfo) { return blobInfo.toPb(); } }; + private static final long serialVersionUID = 2228487739943277159L; private final BlobId blobId; private final String id; @@ -76,7 +70,6 @@ public StorageObject apply(BlobInfo blobInfo) { private final String crc32c; private final String mediaLink; private final Map metadata; - private final Long generation; private final Long metageneration; private final Long deleteTime; private final Long updateTime; @@ -85,6 +78,7 @@ public StorageObject apply(BlobInfo blobInfo) { private final String contentDisposition; private final String contentLanguage; private final Integer componentCount; + private final boolean isDirectory; /** * This class is meant for internal use only. Users are discouraged from using this class. @@ -97,7 +91,112 @@ public Set> entrySet() { } } - public static final class Builder { + /** + * Builder for {@code BlobInfo}. + */ + public abstract static class Builder { + + /** + * Sets the blob identity. + */ + public abstract Builder blobId(BlobId blobId); + + abstract Builder id(String id); + + /** + * Sets the blob's data content type. + * + * @see Content-Type + */ + public abstract Builder contentType(String contentType); + + /** + * Sets the blob's data content disposition. + * + * @see Content-Disposition + */ + public abstract Builder contentDisposition(String contentDisposition); + + /** + * Sets the blob's data content language. + * + * @see Content-Language + */ + public abstract Builder contentLanguage(String contentLanguage); + + /** + * Sets the blob's data content encoding. + * + * @see Content-Encoding + */ + public abstract Builder contentEncoding(String contentEncoding); + + abstract Builder componentCount(Integer componentCount); + + /** + * Sets the blob's data cache control. + * + * @see Cache-Control + */ + public abstract Builder cacheControl(String cacheControl); + + /** + * Sets the blob's access control configuration. + * + * @see + * About Access Control Lists + */ + public abstract Builder acl(List acl); + + abstract Builder owner(Acl.Entity owner); + + abstract Builder size(Long size); + + abstract Builder etag(String etag); + + abstract Builder selfLink(String selfLink); + + /** + * Sets the MD5 hash of blob's data. MD5 value must be encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ + public abstract Builder md5(String md5); + + /** + * Sets the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ + public abstract Builder crc32c(String crc32c); + + abstract Builder mediaLink(String mediaLink); + + /** + * Sets the blob's user provided metadata. + */ + public abstract Builder metadata(Map metadata); + + abstract Builder metageneration(Long metageneration); + + abstract Builder deleteTime(Long deleteTime); + + abstract Builder updateTime(Long updateTime); + + abstract Builder isDirectory(boolean isDirectory); + + /** + * Creates a {@code BlobInfo} object. + */ + public abstract BlobInfo build(); + } + + static final class BuilderImpl extends Builder { private BlobId blobId; private String id; @@ -107,7 +206,7 @@ public static final class Builder { private String contentLanguage; private Integer componentCount; private String cacheControl; - private ImmutableList acl; + private List acl; private Acl.Entity owner; private Long size; private String etag; @@ -116,126 +215,174 @@ public static final class Builder { private String crc32c; private String mediaLink; private Map metadata; - private Long generation; private Long metageneration; private Long deleteTime; private Long updateTime; + private Boolean isDirectory; + + BuilderImpl(BlobId blobId) { + this.blobId = blobId; + } + + BuilderImpl(BlobInfo blobInfo) { + blobId = blobInfo.blobId; + id = blobInfo.id; + cacheControl = blobInfo.cacheControl; + contentEncoding = blobInfo.contentEncoding; + contentType = blobInfo.contentType; + contentDisposition = blobInfo.contentDisposition; + contentLanguage = blobInfo.contentLanguage; + componentCount = blobInfo.componentCount; + acl = blobInfo.acl; + owner = blobInfo.owner; + size = blobInfo.size; + etag = blobInfo.etag; + selfLink = blobInfo.selfLink; + md5 = blobInfo.md5; + crc32c = blobInfo.crc32c; + mediaLink = blobInfo.mediaLink; + metadata = blobInfo.metadata; + metageneration = blobInfo.metageneration; + deleteTime = blobInfo.deleteTime; + updateTime = blobInfo.updateTime; + isDirectory = blobInfo.isDirectory; + } - private Builder() {} - + @Override public Builder blobId(BlobId blobId) { this.blobId = checkNotNull(blobId); return this; } + @Override Builder id(String id) { this.id = id; return this; } + @Override public Builder contentType(String contentType) { this.contentType = firstNonNull(contentType, Data.nullOf(String.class)); return this; } + @Override public Builder contentDisposition(String contentDisposition) { this.contentDisposition = firstNonNull(contentDisposition, Data.nullOf(String.class)); return this; } + @Override public Builder contentLanguage(String contentLanguage) { this.contentLanguage = firstNonNull(contentLanguage, Data.nullOf(String.class)); return this; } + @Override public Builder contentEncoding(String contentEncoding) { this.contentEncoding = firstNonNull(contentEncoding, Data.nullOf(String.class)); return this; } + @Override Builder componentCount(Integer componentCount) { this.componentCount = componentCount; return this; } + @Override public Builder cacheControl(String cacheControl) { this.cacheControl = firstNonNull(cacheControl, Data.nullOf(String.class)); return this; } + @Override public Builder acl(List acl) { this.acl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + @Override Builder owner(Acl.Entity owner) { this.owner = owner; return this; } + @Override Builder size(Long size) { this.size = size; return this; } + @Override Builder etag(String etag) { this.etag = etag; return this; } + @Override Builder selfLink(String selfLink) { this.selfLink = selfLink; return this; } + @Override public Builder md5(String md5) { this.md5 = firstNonNull(md5, Data.nullOf(String.class)); return this; } + @Override public Builder crc32c(String crc32c) { this.crc32c = firstNonNull(crc32c, Data.nullOf(String.class)); return this; } + @Override Builder mediaLink(String mediaLink) { this.mediaLink = mediaLink; return this; } + @Override public Builder metadata(Map metadata) { this.metadata = metadata != null - ? new HashMap(metadata) : Data.nullOf(ImmutableEmptyMap.class); - return this; - } - - Builder generation(Long generation) { - this.generation = generation; + ? new HashMap<>(metadata) : Data.>nullOf(ImmutableEmptyMap.class); return this; } + @Override Builder metageneration(Long metageneration) { this.metageneration = metageneration; return this; } + @Override Builder deleteTime(Long deleteTime) { this.deleteTime = deleteTime; return this; } + @Override Builder updateTime(Long updateTime) { this.updateTime = updateTime; return this; } + @Override + Builder isDirectory(boolean isDirectory) { + this.isDirectory = isDirectory; + return this; + } + + @Override public BlobInfo build() { checkNotNull(blobId); return new BlobInfo(this); } } - private BlobInfo(Builder builder) { + BlobInfo(BuilderImpl builder) { blobId = builder.blobId; id = builder.id; cacheControl = builder.cacheControl; @@ -253,127 +400,223 @@ private BlobInfo(Builder builder) { crc32c = builder.crc32c; mediaLink = builder.mediaLink; metadata = builder.metadata; - generation = builder.generation; metageneration = builder.metageneration; deleteTime = builder.deleteTime; updateTime = builder.updateTime; + isDirectory = firstNonNull(builder.isDirectory, Boolean.FALSE); } + /** + * Returns the blob's identity. + */ public BlobId blobId() { return blobId; } + /** + * Returns the name of the containing bucket. + */ public String bucket() { return blobId().bucket(); } + /** + * Returns the blob's id. + */ public String id() { return id; } + /** + * Returns the blob's name. + */ public String name() { return blobId().name(); } + /** + * Returns the blob's data cache control. + * + * @see Cache-Control + */ public String cacheControl() { return Data.isNull(cacheControl) ? null : cacheControl; } + /** + * Returns the blob's access control configuration. + * + * @see + * About Access Control Lists + */ public List acl() { return acl; } + /** + * Returns the blob's owner. This will always be the uploader of the blob. + */ public Acl.Entity owner() { return owner; } + /** + * Returns the content length of the data in bytes. + * + * @see Content-Length + */ public Long size() { return size; } + /** + * Returns the blob's data content type. + * + * @see Content-Type + */ public String contentType() { return Data.isNull(contentType) ? null : contentType; } + /** + * Returns the blob's data content encoding. + * + * @see Content-Encoding + */ public String contentEncoding() { return Data.isNull(contentEncoding) ? null : contentEncoding; } + /** + * Returns the blob's data content disposition. + * + * @see Content-Disposition + */ public String contentDisposition() { return Data.isNull(contentDisposition) ? null : contentDisposition; } + /** + * Returns the blob's data content language. + * + * @see Content-Language + */ public String contentLanguage() { return Data.isNull(contentLanguage) ? null : contentLanguage; } + /** + * Returns the number of components that make up this blob. Components are accumulated through + * the {@link Storage#compose(Storage.ComposeRequest)} operation and are limited to a count of + * 1024, counting 1 for each non-composite component blob and componentCount for each composite + * component blob. This value is set only for composite blobs. + * + * @see Component Count + * Property + */ public Integer componentCount() { return componentCount; } + /** + * Returns HTTP 1.1 Entity tag for the blob. + * + * @see Entity Tags + */ public String etag() { return etag; } + /** + * Returns the URI of this blob as a string. + */ public String selfLink() { return selfLink; } + /** + * Returns the MD5 hash of blob's data encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ public String md5() { return Data.isNull(md5) ? null : md5; } + /** + * Returns the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ public String crc32c() { return Data.isNull(crc32c) ? null : crc32c; } + /** + * Returns the blob's media download link. + */ public String mediaLink() { return mediaLink; } + /** + * Returns blob's user provided metadata. + */ public Map metadata() { return metadata == null || Data.isNull(metadata) ? null : Collections.unmodifiableMap(metadata); } + /** + * Returns blob's data generation. Used for blob versioning. + */ public Long generation() { - return generation; + return blobId().generation(); } + /** + * Returns blob's metageneration. Used for preconditions and for detecting changes in metadata. + * A metageneration number is only meaningful in the context of a particular generation of a + * particular blob. + */ public Long metageneration() { return metageneration; } + /** + * Returns the deletion time of the blob. + */ public Long deleteTime() { return deleteTime; } + /** + * Returns the last modification time of the blob's metadata. + */ public Long updateTime() { return updateTime; } + /** + * Returns {@code true} if the current blob represents a directory. This can only happen if the + * blob is returned by {@link Storage#list(String, Storage.BlobListOption...)} when the + * {@link Storage.BlobListOption#currentDirectory()} option is used. When this is the case only + * {@link #blobId()} and {@link #size()} are set for the current blob: {@link BlobId#name()} ends + * with the '/' character, {@link BlobId#generation()} returns {@code null} and {@link #size()} is + * {@code 0}. + */ + public boolean isDirectory() { + return isDirectory; + } + + /** + * Returns a builder for the current blob. + */ public Builder toBuilder() { - return new Builder() - .blobId(blobId) - .id(id) - .generation(generation) - .cacheControl(cacheControl) - .contentEncoding(contentEncoding) - .contentType(contentType) - .contentDisposition(contentDisposition) - .contentLanguage(contentLanguage) - .componentCount(componentCount) - .crc32c(crc32c) - .md5(md5) - .deleteTime(deleteTime) - .updateTime(updateTime) - .mediaLink(mediaLink) - .metadata(metadata) - .metageneration(metageneration) - .acl(acl) - .owner(owner) - .size(size) - .etag(etag) - .selfLink(selfLink); + return new BuilderImpl(this); } @Override @@ -381,6 +624,7 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .add("size", size()) .add("content-type", contentType()) .add("metadata", metadata()) @@ -394,7 +638,8 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof BlobInfo && Objects.equals(toPb(), ((BlobInfo) obj).toPb()); + return obj != null && obj.getClass().equals(BlobInfo.class) + && Objects.equals(toPb(), ((BlobInfo) obj).toPb()); } StorageObject toPb() { @@ -422,8 +667,9 @@ public ObjectAccessControl apply(Acl acl) { Map pbMetadata = metadata; if (metadata != null && !Data.isNull(metadata)) { pbMetadata = Maps.newHashMapWithExpectedSize(metadata.size()); - for (String key : metadata.keySet()) { - pbMetadata.put(key, firstNonNull(metadata.get(key), Data.nullOf(String.class))); + for (Map.Entry entry : metadata.entrySet()) { + pbMetadata.put(entry.getKey(), + firstNonNull(entry.getValue(), Data.nullOf(String.class))); } } storageObject.setMetadata(pbMetadata); @@ -431,7 +677,6 @@ public ObjectAccessControl apply(Acl acl) { storageObject.setContentEncoding(contentEncoding); storageObject.setCrc32c(crc32c); storageObject.setContentType(contentType); - storageObject.setGeneration(generation); storageObject.setMd5Hash(md5); storageObject.setMediaLink(mediaLink); storageObject.setMetageneration(metageneration); @@ -444,16 +689,36 @@ public ObjectAccessControl apply(Acl acl) { return storageObject; } + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ public static Builder builder(BucketInfo bucketInfo, String name) { return builder(bucketInfo.name(), name); } + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ public static Builder builder(String bucket, String name) { - return new Builder().blobId(BlobId.of(bucket, name)); + return builder(BlobId.of(bucket, name)); + } + + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ + public static Builder builder(BucketInfo bucketInfo, String name, Long generation) { + return builder(bucketInfo.name(), name, generation); + } + + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ + public static Builder builder(String bucket, String name, Long generation) { + return builder(BlobId.of(bucket, name, generation)); } public static Builder builder(BlobId blobId) { - return new Builder().blobId(blobId); + return new BuilderImpl(blobId); } static BlobInfo fromPb(StorageObject storageObject) { @@ -470,9 +735,6 @@ static BlobInfo fromPb(StorageObject storageObject) { if (storageObject.getContentType() != null) { builder.contentType(storageObject.getContentType()); } - if (storageObject.getGeneration() != null) { - builder.generation(storageObject.getGeneration()); - } if (storageObject.getMd5Hash() != null) { builder.md5(storageObject.getMd5Hash()); } @@ -523,6 +785,9 @@ public Acl apply(ObjectAccessControl objectAccessControl) { } })); } + if (storageObject.containsKey("isDirectory")) { + builder.isDirectory(Boolean.TRUE); + } return builder.build(); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java deleted file mode 100644 index 9e6ec9dc5655..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.common.base.Function; -import com.google.common.collect.Iterators; - -import java.util.Iterator; -import java.util.Objects; - -/** - * Implementation of a paginated list of Google Cloud storage {@code Blob}. - */ -public class BlobListResult implements ListResult { - - private final ListResult infoList; - private final Storage storage; - - public BlobListResult(Storage storage, ListResult infoList) { - this.storage = checkNotNull(storage); - this.infoList = checkNotNull(infoList); - } - - @Override - public String nextPageCursor() { - return infoList.nextPageCursor(); - } - - @Override - public ListResult nextPage() { - ListResult nextPageInfoList = infoList.nextPage(); - if (nextPageInfoList == null) { - return null; - } - return new BlobListResult(storage, nextPageInfoList); - } - - @Override - public Iterator iterator() { - return Iterators.transform(infoList.iterator(), new Function() { - @Override - public Blob apply(BlobInfo info) { - return new Blob(storage, info); - } - }); - } - - @Override - public int hashCode() { - return Objects.hash(infoList); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BlobListResult)) { - return false; - } - BlobListResult other = (BlobListResult) obj; - return Objects.equals(infoList, other.infoList); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index 205dc4b97309..f9c6f912563d 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -16,45 +16,265 @@ package com.google.gcloud.storage; -import com.google.gcloud.Restorable; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.storage.model.StorageObject; +import com.google.common.base.MoreObjects; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; -import java.io.Closeable; import java.io.IOException; -import java.nio.channels.ReadableByteChannel; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.nio.channels.ClosedChannelException; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; /** - * A channel for reading data from a Google Cloud Storage object. - * - * Implementations of this class may buffer data internally to reduce remote calls. - * - * This class is @{link Serializable}, which allows incremental reads. + * Default implementation for ReadChannel. */ -public interface BlobReadChannel extends ReadableByteChannel, Closeable, - Restorable { - - /** - * Overridden to remove IOException. - * - * @see java.nio.channels.Channel#close() - */ +class BlobReadChannel implements ReadChannel { + + private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize = DEFAULT_CHUNK_SIZE; + + private final StorageRpc storageRpc; + private final StorageObject storageObject; + private int bufferPos; + private byte[] buffer; + + BlobReadChannel(StorageOptions serviceOptions, BlobId blob, + Map requestOptions) { + this.serviceOptions = serviceOptions; + this.blob = blob; + this.requestOptions = requestOptions; + isOpen = true; + storageRpc = serviceOptions.rpc(); + storageObject = blob.toPb(); + } + + @Override + public RestorableState capture() { + StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) + .position(position) + .isOpen(isOpen) + .endOfStream(endOfStream) + .chunkSize(chunkSize); + if (buffer != null) { + builder.position(position + bufferPos); + builder.endOfStream(false); + } + return builder.build(); + } + + @Override + public boolean isOpen() { + return isOpen; + } + @Override - void close(); - - void seek(int position) throws IOException; - - /** - * Sets the minimum size that will be read by a single RPC. - * Read data will be locally buffered until consumed. - */ - void chunkSize(int chunkSize); - - /** - * Captures the read channel state so that it can be saved and restored afterwards. - * - * @return a {@link RestorableState} object that contains the read channel state and can restore - * it afterwards. - */ + public void close() { + if (isOpen) { + buffer = null; + isOpen = false; + } + } + + private void validateOpen() throws ClosedChannelException { + if (!isOpen) { + throw new ClosedChannelException(); + } + } + + @Override + public void seek(int position) throws IOException { + validateOpen(); + this.position = position; + buffer = null; + bufferPos = 0; + endOfStream = false; + } + + @Override + public void chunkSize(int chunkSize) { + this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; + } + @Override - RestorableState capture(); + public int read(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + if (buffer == null) { + if (endOfStream) { + return -1; + } + final int toRead = Math.max(byteBuffer.remaining(), chunkSize); + try { + Tuple result = runWithRetries(new Callable>() { + @Override + public Tuple call() { + return storageRpc.read(storageObject, requestOptions, position, toRead); + } + }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); + if (result.y().length > 0 && lastEtag != null && !Objects.equals(result.x(), lastEtag)) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); + throw new StorageException(0, messageBuilder.toString()); + } + lastEtag = result.x(); + buffer = result.y(); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + if (toRead > buffer.length) { + endOfStream = true; + if (buffer.length == 0) { + buffer = null; + return -1; + } + } + } + int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); + byteBuffer.put(buffer, bufferPos, toWrite); + bufferPos += toWrite; + if (bufferPos >= buffer.length) { + position += buffer.length; + buffer = null; + bufferPos = 0; + } + return toWrite; + } + + static class StateImpl implements RestorableState, Serializable { + + private static final long serialVersionUID = 3889420316004453706L; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private final String lastEtag; + private final int position; + private final boolean isOpen; + private final boolean endOfStream; + private final int chunkSize; + + StateImpl(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.blob = builder.blob; + this.requestOptions = builder.requestOptions; + this.lastEtag = builder.lastEtag; + this.position = builder.position; + this.isOpen = builder.isOpen; + this.endOfStream = builder.endOfStream; + this.chunkSize = builder.chunkSize; + } + + static class Builder { + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize; + + private Builder(StorageOptions options, BlobId blob, Map reqOptions) { + this.serviceOptions = options; + this.blob = blob; + this.requestOptions = reqOptions; + } + + Builder lastEtag(String lastEtag) { + this.lastEtag = lastEtag; + return this; + } + + Builder position(int position) { + this.position = position; + return this; + } + + Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + Builder endOfStream(boolean endOfStream) { + this.endOfStream = endOfStream; + return this; + } + + Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder( + StorageOptions options, BlobId blob, Map reqOptions) { + return new Builder(options, blob, reqOptions); + } + + @Override + public ReadChannel restore() { + BlobReadChannel channel = new BlobReadChannel(serviceOptions, blob, requestOptions); + channel.lastEtag = lastEtag; + channel.position = position; + channel.isOpen = isOpen; + channel.endOfStream = endOfStream; + channel.chunkSize = chunkSize; + return channel; + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, blob, requestOptions, lastEtag, position, isOpen, + endOfStream, chunkSize); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof StateImpl)) { + return false; + } + final StateImpl other = (StateImpl) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.blob, other.blob) + && Objects.equals(this.requestOptions, other.requestOptions) + && Objects.equals(this.lastEtag, other.lastEtag) + && this.position == other.position + && this.isOpen == other.isOpen + && this.endOfStream == other.endOfStream + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("blob", blob) + .add("position", position) + .add("isOpen", isOpen) + .add("endOfStream", endOfStream) + .toString(); + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java deleted file mode 100644 index 09047a642218..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.Callable; - -/** - * Default implementation for BlobReadChannel. - */ -class BlobReadChannelImpl implements BlobReadChannel { - - private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - private int bufferPos; - private byte[] buffer; - - BlobReadChannelImpl(StorageOptions serviceOptions, BlobId blob, - Map requestOptions) { - this.serviceOptions = serviceOptions; - this.blob = blob; - this.requestOptions = requestOptions; - isOpen = true; - storageRpc = serviceOptions.rpc(); - storageObject = blob.toPb(); - } - - @Override - public RestorableState capture() { - StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) - .position(position) - .isOpen(isOpen) - .endOfStream(endOfStream) - .chunkSize(chunkSize); - if (buffer != null) { - builder.position(position + bufferPos); - builder.endOfStream(false); - } - return builder.build(); - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() { - if (isOpen) { - buffer = null; - isOpen = false; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public void seek(int position) throws IOException { - validateOpen(); - this.position = position; - buffer = null; - bufferPos = 0; - endOfStream = false; - } - - @Override - public void chunkSize(int chunkSize) { - this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; - } - - @Override - public int read(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - if (buffer == null) { - if (endOfStream) { - return -1; - } - final int toRead = Math.max(byteBuffer.remaining(), chunkSize); - try { - buffer = runWithRetries(new Callable() { - @Override - public byte[] call() { - return storageRpc.read(storageObject, requestOptions, position, toRead); - } - }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - if (toRead > buffer.length) { - endOfStream = true; - if (buffer.length == 0) { - buffer = null; - return -1; - } - } - } - int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); - byteBuffer.put(buffer, bufferPos, toWrite); - bufferPos += toWrite; - if (bufferPos >= buffer.length) { - position += buffer.length; - buffer = null; - bufferPos = 0; - } - return toWrite; - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 3889420316004453706L; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private final int position; - private final boolean isOpen; - private final boolean endOfStream; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blob = builder.blob; - this.requestOptions = builder.requestOptions; - this.position = builder.position; - this.isOpen = builder.isOpen; - this.endOfStream = builder.endOfStream; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize; - - private Builder(StorageOptions options, BlobId blob, Map reqOptions) { - this.serviceOptions = options; - this.blob = blob; - this.requestOptions = reqOptions; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder endOfStream(boolean endOfStream) { - this.endOfStream = endOfStream; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder( - StorageOptions options, BlobId blob, Map reqOptions) { - return new Builder(options, blob, reqOptions); - } - - @Override - public BlobReadChannel restore() { - BlobReadChannelImpl channel = new BlobReadChannelImpl(serviceOptions, blob, requestOptions); - channel.position = position; - channel.isOpen = isOpen; - channel.endOfStream = endOfStream; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blob, requestOptions, position, isOpen, endOfStream, - chunkSize); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blob, other.blob) - && Objects.equals(this.requestOptions, other.requestOptions) - && this.position == other.position - && this.isOpen == other.isOpen - && this.endOfStream == other.endOfStream - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blob", blob) - .add("position", position) - .add("isOpen", isOpen) - .add("endOfStream", endOfStream) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index a6208e5020ae..30b0ec870f51 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -16,36 +16,77 @@ package com.google.gcloud.storage; -import com.google.gcloud.Restorable; +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.storage.spi.StorageRpc; -import java.io.Closeable; -import java.nio.channels.WritableByteChannel; +import java.util.Map; /** - * A channel for writing data to a Google Cloud Storage object. - * - * Implementations of this class may further buffer data internally to reduce remote calls. Written - * data will only be visible after calling {@link #close()}. This class is serializable, to allow - * incremental writes. + * Write channel implementation to upload Google Cloud Storage blobs. */ -public interface BlobWriteChannel extends WritableByteChannel, Closeable, - Restorable { - - /** - * Sets the minimum size that will be written by a single RPC. - * Written data will be buffered and only flushed upon reaching this size or closing the channel. - */ - void chunkSize(int chunkSize); - - /** - * Captures the write channel state so that it can be saved and restored afterwards. The original - * {@code BlobWriteChannel} and the restored one should not both be used. Closing one channel - * causes the other channel to close, subsequent writes will fail. - * - * @return a {@link RestorableState} object that contains the write channel state and can restore - * it afterwards. - */ +class BlobWriteChannel extends BaseWriteChannel { + + BlobWriteChannel(StorageOptions options, BlobInfo blob, Map optionsMap) { + this(options, blob, options.rpc().open(blob.toPb(), optionsMap)); + } + + BlobWriteChannel(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + @Override - RestorableState capture(); + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), StorageImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + } + + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -9028324143780151286L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder extends BaseWriteChannel.BaseState.Builder { + + private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + + @Override + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + return new Builder(options, blobInfo, uploadId); + } + + @Override + public WriteChannel restore() { + BlobWriteChannel channel = new BlobWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java deleted file mode 100644 index 8c3254a28d44..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; -import static java.util.concurrent.Executors.callable; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Map; -import java.util.Objects; - -/** - * Default implementation for BlobWriteChannel. - */ -class BlobWriteChannelImpl implements BlobWriteChannel { - - private static final long serialVersionUID = 8675286882724938737L; - private static final int MIN_CHUNK_SIZE = 256 * 1024; - private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; - - private final StorageOptions options; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer = new byte[0]; - private int limit; - private boolean isOpen = true; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, - Map optionsMap) { - this.options = options; - this.blobInfo = blobInfo; - storageRpc = options.rpc(); - storageObject = blobInfo.toPb(); - uploadId = storageRpc.open(storageObject, optionsMap); - } - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.options = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - storageRpc = options.rpc(); - storageObject = blobInfo.toPb(); - } - - @Override - public RestorableState capture() { - byte[] bufferToSave = null; - if (isOpen) { - flush(); - bufferToSave = Arrays.copyOf(buffer, limit); - } - return StateImpl.builder(options, blobInfo, uploadId) - .position(position) - .buffer(bufferToSave) - .isOpen(isOpen) - .chunkSize(chunkSize) - .build(); - } - - private void flush() { - if (limit >= chunkSize) { - final int length = limit - limit % MIN_CHUNK_SIZE; - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, storageObject, position, length, false); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += length; - limit -= length; - byte[] temp = new byte[chunkSize]; - System.arraycopy(buffer, length, temp, 0, limit); - buffer = temp; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public int write(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - int toWrite = byteBuffer.remaining(); - int spaceInBuffer = buffer.length - limit; - if (spaceInBuffer >= toWrite) { - byteBuffer.get(buffer, limit, toWrite); - } else { - buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); - byteBuffer.get(buffer, limit, toWrite); - } - limit += toWrite; - flush(); - return toWrite; - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() throws IOException { - if (isOpen) { - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, storageObject, position, limit, true); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += buffer.length; - isOpen = false; - buffer = null; - } - } - - @Override - public void chunkSize(int chunkSize) { - chunkSize = (chunkSize / MIN_CHUNK_SIZE) * MIN_CHUNK_SIZE; - this.chunkSize = Math.max(MIN_CHUNK_SIZE, chunkSize); - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 8541062465055125619L; - - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private final int position; - private final byte[] buffer; - private final boolean isOpen; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blobInfo = builder.blobInfo; - this.uploadId = builder.uploadId; - this.position = builder.position; - this.buffer = builder.buffer; - this.isOpen = builder.isOpen; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer; - private boolean isOpen; - private int chunkSize; - - private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.serviceOptions = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder buffer(byte[] buffer) { - this.buffer = buffer; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - return new Builder(options, blobInfo, uploadId); - } - - @Override - public BlobWriteChannel restore() { - BlobWriteChannelImpl channel = new BlobWriteChannelImpl(serviceOptions, blobInfo, uploadId); - if (buffer != null) { - channel.buffer = buffer.clone(); - channel.limit = buffer.length; - } - channel.position = position; - channel.isOpen = isOpen; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blobInfo, uploadId, position, isOpen, chunkSize, - Arrays.hashCode(buffer)); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blobInfo, other.blobInfo) - && Objects.equals(this.uploadId, other.uploadId) - && Objects.deepEquals(this.buffer, other.buffer) - && this.position == other.position - && this.isOpen == other.isOpen - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blobInfo", blobInfo) - .add("uploadId", uploadId) - .add("position", position) - .add("isOpen", isOpen) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index 8d696dc2ab6b..5df305ff371c 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -18,65 +18,515 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.gcloud.storage.Bucket.BucketSourceOption.toGetOptions; +import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; +import com.google.common.base.Function; import com.google.common.base.MoreObjects; -import com.google.gcloud.storage.Storage.BlobSourceOption; -import com.google.gcloud.storage.Storage.BlobTargetOption; -import com.google.gcloud.storage.Storage.BlobWriteOption; -import com.google.gcloud.storage.Storage.BucketSourceOption; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.gcloud.Page; +import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BucketTargetOption; -import java.io.InputStream; +import com.google.gcloud.storage.spi.StorageRpc; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.Serializable; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Set; /** * A Google cloud storage bucket. * - *

    - * Objects of this class are immutable. Operations that modify the bucket like {@link #update} + *

    Objects of this class are immutable. Operations that modify the bucket like {@link #update} * return a new object. To get a {@code Bucket} object with the most recent information use - * {@link #reload}. + * {@link #reload}. {@code Bucket} adds a layer of service-related functionality over + * {@link BucketInfo}. *

    */ -public final class Bucket { +public final class Bucket extends BucketInfo { - private final Storage storage; - private final BucketInfo info; + private static final long serialVersionUID = 8574601739542252586L; + + private final StorageOptions options; + private transient Storage storage; /** - * Constructs a {@code Bucket} object for the provided {@code BucketInfo}. The storage service is - * used to issue requests. - * - * @param storage the storage service used for issuing requests - * @param info bucket's info + * Class for specifying bucket source options when {@code Bucket} methods are used. */ - public Bucket(Storage storage, BucketInfo info) { - this.storage = checkNotNull(storage); - this.info = checkNotNull(info); + public static class BucketSourceOption extends Option { + + private static final long serialVersionUID = 6928872234155522371L; + + private BucketSourceOption(StorageRpc.Option rpcOption) { + super(rpcOption, null); + } + + private Storage.BucketSourceOption toSourceOption(BucketInfo bucketInfo) { + switch (rpcOption()) { + case IF_METAGENERATION_MATCH: + return Storage.BucketSourceOption.metagenerationMatch(bucketInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BucketSourceOption.metagenerationNotMatch(bucketInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + private Storage.BucketGetOption toGetOption(BucketInfo bucketInfo) { + switch (rpcOption()) { + case IF_METAGENERATION_MATCH: + return Storage.BucketGetOption.metagenerationMatch(bucketInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BucketGetOption.metagenerationNotMatch(bucketInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ + public static BucketSourceOption metagenerationMatch() { + return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); + } + + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if metageneration matches. + */ + public static BucketSourceOption metagenerationNotMatch() { + return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); + } + + static Storage.BucketSourceOption[] toSourceOptions(BucketInfo bucketInfo, + BucketSourceOption... options) { + Storage.BucketSourceOption[] convertedOptions = + new Storage.BucketSourceOption[options.length]; + int index = 0; + for (BucketSourceOption option : options) { + convertedOptions[index++] = option.toSourceOption(bucketInfo); + } + return convertedOptions; + } + + static Storage.BucketGetOption[] toGetOptions(BucketInfo bucketInfo, + BucketSourceOption... options) { + Storage.BucketGetOption[] convertedOptions = new Storage.BucketGetOption[options.length]; + int index = 0; + for (BucketSourceOption option : options) { + convertedOptions[index++] = option.toGetOption(bucketInfo); + } + return convertedOptions; + } } /** - * Creates a {@code Bucket} object for the provided bucket name. Performs an RPC call to get the - * latest bucket information. - * - * @param storage the storage service used for issuing requests - * @param bucket bucket's name - * @return the {@code Bucket} object or {@code null} if not found. - * @throws StorageException upon failure + * Class for specifying blob target options when {@code Bucket} methods are used. */ - public static Bucket load(Storage storage, String bucket) { - BucketInfo info = storage.get(bucket); - return info != null ? new Bucket(storage, info) : null; + public static class BlobTargetOption extends Option { + + private static final Function TO_ENUM = + new Function() { + @Override + public StorageRpc.Option apply(BlobTargetOption blobTargetOption) { + return blobTargetOption.rpcOption(); + } + }; + private static final long serialVersionUID = 8345296337342509425L; + + private BlobTargetOption(StorageRpc.Option rpcOption, Object value) { + super(rpcOption, value); + } + + private StorageRpc.Tuple toTargetOption(BlobInfo blobInfo) { + BlobId blobId = blobInfo.blobId(); + switch (rpcOption()) { + case PREDEFINED_ACL: + return StorageRpc.Tuple.of(blobInfo, + Storage.BlobTargetOption.predefinedAcl((Storage.PredefinedAcl) value())); + case IF_GENERATION_MATCH: + blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); + return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), + Storage.BlobTargetOption.generationMatch()); + case IF_GENERATION_NOT_MATCH: + blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); + return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), + Storage.BlobTargetOption.generationNotMatch()); + case IF_METAGENERATION_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), + Storage.BlobTargetOption.metagenerationMatch()); + case IF_METAGENERATION_NOT_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), + Storage.BlobTargetOption.metagenerationNotMatch()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ + public static BlobTargetOption predefinedAcl(Storage.PredefinedAcl acl) { + return new BlobTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl); + } + + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + * This option can not be provided together with {@link #generationMatch(long)} or + * {@link #generationNotMatch(long)}. + */ + public static BlobTargetOption doesNotExist() { + return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, 0L); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match the provided value. This option can not be provided + * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. + */ + public static BlobTargetOption generationMatch(long generation) { + return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches the provided value. This option can not be provided + * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. + */ + public static BlobTargetOption generationNotMatch(long generation) { + return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); + } + + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match the provided value. This option can not be provided + * together with {@link #metagenerationNotMatch(long)}. + */ + public static BlobTargetOption metagenerationMatch(long metageneration) { + return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); + } + + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches the provided value. This option can not be provided together + * with {@link #metagenerationMatch(long)}. + */ + public static BlobTargetOption metagenerationNotMatch(long metageneration) { + return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); + } + + static StorageRpc.Tuple toTargetOptions( + BlobInfo info, BlobTargetOption... options) { + Set optionSet = + Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); + checkArgument(!(optionSet.contains(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH) + && optionSet.contains(StorageRpc.Option.IF_METAGENERATION_MATCH)), + "metagenerationMatch and metagenerationNotMatch options can not be both provided"); + checkArgument(!(optionSet.contains(StorageRpc.Option.IF_GENERATION_NOT_MATCH) + && optionSet.contains(StorageRpc.Option.IF_GENERATION_MATCH)), + "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); + Storage.BlobTargetOption[] convertedOptions = new Storage.BlobTargetOption[options.length]; + BlobInfo targetInfo = info; + int index = 0; + for (BlobTargetOption option : options) { + StorageRpc.Tuple target = + option.toTargetOption(targetInfo); + targetInfo = target.x(); + convertedOptions[index++] = target.y(); + } + return StorageRpc.Tuple.of(targetInfo, convertedOptions); + } } /** - * Returns the bucket's information. + * Class for specifying blob write options when {@code Bucket} methods are used. */ - public BucketInfo info() { - return info; + public static class BlobWriteOption implements Serializable { + + private static final Function TO_ENUM = + new Function() { + @Override + public Storage.BlobWriteOption.Option apply(BlobWriteOption blobWriteOption) { + return blobWriteOption.option; + } + }; + private static final long serialVersionUID = 4722190734541993114L; + + private final Storage.BlobWriteOption.Option option; + private final Object value; + + private StorageRpc.Tuple toWriteOption(BlobInfo blobInfo) { + BlobId blobId = blobInfo.blobId(); + switch (option) { + case PREDEFINED_ACL: + return StorageRpc.Tuple.of(blobInfo, + Storage.BlobWriteOption.predefinedAcl((Storage.PredefinedAcl) value)); + case IF_GENERATION_MATCH: + blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); + return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), + Storage.BlobWriteOption.generationMatch()); + case IF_GENERATION_NOT_MATCH: + blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); + return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), + Storage.BlobWriteOption.generationNotMatch()); + case IF_METAGENERATION_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), + Storage.BlobWriteOption.metagenerationMatch()); + case IF_METAGENERATION_NOT_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), + Storage.BlobWriteOption.metagenerationNotMatch()); + case IF_MD5_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().md5((String) value).build(), + Storage.BlobWriteOption.md5Match()); + case IF_CRC32C_MATCH: + return StorageRpc.Tuple.of(blobInfo.toBuilder().crc32c((String) value).build(), + Storage.BlobWriteOption.crc32cMatch()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + private BlobWriteOption(Storage.BlobWriteOption.Option option, Object value) { + this.option = option; + this.value = value; + } + + @Override + public int hashCode() { + return Objects.hash(option, value); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof BlobWriteOption)) { + return false; + } + final BlobWriteOption other = (BlobWriteOption) obj; + return this.option == other.option && Objects.equals(this.value, other.value); + } + + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ + public static BlobWriteOption predefinedAcl(Storage.PredefinedAcl acl) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.PREDEFINED_ACL, acl); + } + + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + * This option can not be provided together with {@link #generationMatch(long)} or + * {@link #generationNotMatch(long)}. + */ + public static BlobWriteOption doesNotExist() { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, 0L); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match the provided value. This option can not be provided + * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. + */ + public static BlobWriteOption generationMatch(long generation) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, generation); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if generation matches the provided value. This option can not be provided + * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. + */ + public static BlobWriteOption generationNotMatch(long generation) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH, + generation); + } + + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match the provided value. This option can not be provided + * together with {@link #metagenerationNotMatch(long)}. + */ + public static BlobWriteOption metagenerationMatch(long metageneration) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH, + metageneration); + } + + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches the provided value. This option can not be provided together + * with {@link #metagenerationMatch(long)}. + */ + public static BlobWriteOption metagenerationNotMatch(long metageneration) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH, + metageneration); + } + + /** + * Returns an option for blob's data MD5 hash match. If this option is used the request will + * fail if blobs' data MD5 hash does not match the provided value. + */ + public static BlobWriteOption md5Match(String md5) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_MD5_MATCH, md5); + } + + /** + * Returns an option for blob's data CRC32C checksum match. If this option is used the request + * will fail if blobs' data CRC32C checksum does not match the provided value. + */ + public static BlobWriteOption crc32cMatch(String crc32c) { + return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_CRC32C_MATCH, crc32c); + } + + static StorageRpc.Tuple toWriteOptions( + BlobInfo info, BlobWriteOption... options) { + Set optionSet = + Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); + checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH) + && optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH)), + "metagenerationMatch and metagenerationNotMatch options can not be both provided"); + checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH) + && optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH)), + "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); + Storage.BlobWriteOption[] convertedOptions = new Storage.BlobWriteOption[options.length]; + BlobInfo writeInfo = info; + int index = 0; + for (BlobWriteOption option : options) { + StorageRpc.Tuple write = option.toWriteOption(writeInfo); + writeInfo = write.x(); + convertedOptions[index++] = write.y(); + } + return StorageRpc.Tuple.of(writeInfo, convertedOptions); + } + } + + /** + * Builder for {@code Bucket}. + */ + public static class Builder extends BucketInfo.Builder { + private final Storage storage; + private final BucketInfo.BuilderImpl infoBuilder; + + Builder(Bucket bucket) { + this.storage = bucket.storage; + this.infoBuilder = new BucketInfo.BuilderImpl(bucket); + } + + @Override + public Builder name(String name) { + infoBuilder.name(name); + return this; + } + + @Override + Builder id(String id) { + infoBuilder.id(id); + return this; + } + + @Override + Builder owner(Acl.Entity owner) { + infoBuilder.owner(owner); + return this; + } + + @Override + Builder selfLink(String selfLink) { + infoBuilder.selfLink(selfLink); + return this; + } + + @Override + public Builder versioningEnabled(Boolean enable) { + infoBuilder.versioningEnabled(enable); + return this; + } + + @Override + public Builder indexPage(String indexPage) { + infoBuilder.indexPage(indexPage); + return this; + } + + @Override + public Builder notFoundPage(String notFoundPage) { + infoBuilder.notFoundPage(notFoundPage); + return this; + } + + @Override + public Builder deleteRules(Iterable rules) { + infoBuilder.deleteRules(rules); + return this; + } + + @Override + public Builder storageClass(String storageClass) { + infoBuilder.storageClass(storageClass); + return this; + } + + @Override + public Builder location(String location) { + infoBuilder.location(location); + return this; + } + + @Override + Builder etag(String etag) { + infoBuilder.etag(etag); + return this; + } + + @Override + Builder createTime(Long createTime) { + infoBuilder.createTime(createTime); + return this; + } + + @Override + Builder metageneration(Long metageneration) { + infoBuilder.metageneration(metageneration); + return this; + } + + @Override + public Builder cors(Iterable cors) { + infoBuilder.cors(cors); + return this; + } + + @Override + public Builder acl(Iterable acl) { + infoBuilder.acl(acl); + return this; + } + + @Override + public Builder defaultAcl(Iterable acl) { + infoBuilder.defaultAcl(acl); + return this; + } + + @Override + public Bucket build() { + return new Bucket(storage, infoBuilder); + } + } + + Bucket(Storage storage, BucketInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.storage = checkNotNull(storage); + this.options = storage.options(); } /** @@ -85,19 +535,22 @@ public BucketInfo info() { * @return true if this bucket exists, false otherwise * @throws StorageException upon failure */ - public boolean exists() { - return storage.get(info.name()) != null; + public boolean exists(BucketSourceOption... options) { + int length = options.length; + Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); + getOptions[length] = Storage.BucketGetOption.fields(); + return storage.get(name(), getOptions) != null; } /** - * Fetches current bucket's latest information. + * Fetches current bucket's latest information. Returns {@code null} if the bucket does not exist. * * @param options bucket read options - * @return a {@code Bucket} object with latest information + * @return a {@code Bucket} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Bucket reload(BucketSourceOption... options) { - return new Bucket(storage, storage.get(info.name(), options)); + return storage.get(name(), toGetOptions(this, options)); } /** @@ -105,71 +558,69 @@ public Bucket reload(BucketSourceOption... options) { * is returned. By default no checks are made on the metadata generation of the current bucket. * If you want to update the information only if the current bucket metadata are at their latest * version use the {@code metagenerationMatch} option: - * {@code bucket.update(newInfo, BucketTargetOption.metagenerationMatch())} + * {@code bucket.update(BucketTargetOption.metagenerationMatch())} * - * @param bucketInfo new bucket's information. Name must match the one of the current bucket * @param options update options * @return a {@code Bucket} object with updated information * @throws StorageException upon failure */ - public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { - checkArgument(Objects.equals(bucketInfo.name(), info.name()), "Bucket name must match"); - return new Bucket(storage, storage.update(bucketInfo, options)); + public Bucket update(BucketTargetOption... options) { + return storage.update(this, options); } /** * Deletes this bucket. * * @param options bucket delete options - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { - return storage.delete(info.name(), options); + return storage.delete(name(), toSourceOptions(this, options)); } /** * Returns the paginated list of {@code Blob} in this bucket. - * + * * @param options options for listing blobs * @throws StorageException upon failure */ - public ListResult list(Storage.BlobListOption... options) { - return new BlobListResult(storage, storage.list(info.name(), options)); + public Page list(Storage.BlobListOption... options) { + return storage.list(name(), options); } /** * Returns the requested blob in this bucket or {@code null} if not found. - * + * * @param blob name of the requested blob * @param options blob search options * @throws StorageException upon failure */ - public Blob get(String blob, BlobSourceOption... options) { - return new Blob(storage, storage.get(BlobId.of(info.name(), blob), options)); + public Blob get(String blob, BlobGetOption... options) { + return storage.get(BlobId.of(name(), blob), options); } /** * Returns a list of requested blobs in this bucket. Blobs that do not exist are null. - * + * * @param blobName1 first blob to get * @param blobName2 second blob to get * @param blobNames other blobs to get - * @return an immutable list of {@code Blob} objects. + * @return an immutable list of {@code Blob} objects * @throws StorageException upon failure */ public List get(String blobName1, String blobName2, String... blobNames) { BatchRequest.Builder batch = BatchRequest.builder(); - batch.get(info.name(), blobName1); - batch.get(info.name(), blobName2); + batch.get(name(), blobName1); + batch.get(name(), blobName2); for (String name : blobNames) { - batch.get(info.name(), name); + batch.get(name(), name); } List blobs = new ArrayList<>(blobNames.length); - BatchResponse response = storage.apply(batch.build()); - for (BatchResponse.Result result : response.gets()) { + BatchResponse response = storage.submit(batch.build()); + for (BatchResponse.Result result : response.gets()) { BlobInfo blobInfo = result.get(); - blobs.add(blobInfo != null ? new Blob(storage, blobInfo) : null); + blobs.add(blobInfo != null ? new Blob(storage, new BlobInfo.BuilderImpl(blobInfo)) : null); } return Collections.unmodifiableList(blobs); } @@ -179,39 +630,43 @@ public List get(String blobName1, String blobName2, String... blobNames) { * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are * computed and used for validating transferred data. - * + * * @param blob a blob name * @param content the blob content * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); - return new Blob(storage, storage.create(blobInfo, content, options)); + StorageRpc.Tuple target = + BlobTargetOption.toTargetOptions(blobInfo, options); + return storage.create(target.x(), content, target.y()); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. - * + * * @param blob a blob name * @param content the blob content as a stream * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, InputStream content, String contentType, BlobWriteOption... options) { - BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); - return new Blob(storage, storage.create(blobInfo, content, options)); + StorageRpc.Tuple write = + BlobWriteOption.toWriteOptions(blobInfo, options); + return storage.create(write.x(), content, write.y()); } /** @@ -220,4 +675,29 @@ public Blob create(String blob, InputStream content, String contentType, public Storage storage() { return storage; } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Bucket && Objects.equals(toPb(), ((Bucket) obj).toPb()) + && Objects.equals(options, ((Bucket) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.storage = options.service(); + } + + static Bucket fromPb(Storage storage, com.google.api.services.storage.model.Bucket bucketPb) { + return new Bucket(storage, new BucketInfo.BuilderImpl(BucketInfo.fromPb(bucketPb))); + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java index 5d69c54e0d96..a1de1a07e03e 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java @@ -16,8 +16,8 @@ package com.google.gcloud.storage; -import static com.google.api.client.repackaged.com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.Lists.transform; import com.google.api.client.json.jackson2.JacksonFactory; @@ -33,7 +33,6 @@ import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.gcloud.storage.Acl.Entity; import java.io.IOException; @@ -49,7 +48,7 @@ * @see Concepts and * Terminology */ -public final class BucketInfo implements Serializable { +public class BucketInfo implements Serializable { static final Function FROM_PB_FUNCTION = new Function() { @@ -80,9 +79,15 @@ public com.google.api.services.storage.model.Bucket apply(BucketInfo bucketInfo) private final List cors; private final List acl; private final List defaultAcl; - private final Location location; - private final StorageClass storageClass; - + private final String location; + private final String storageClass; + + /** + * Base class for bucket's delete rules. Allows to configure automatic deletion of blobs and blobs + * versions. + * + * @see Object Lifecycle Management + */ public abstract static class DeleteRule implements Serializable { private static final long serialVersionUID = 3137971668395933033L; @@ -153,11 +158,23 @@ static DeleteRule fromPb(Rule rule) { } } + /** + * Delete rule class that sets a Time To Live for blobs in the bucket. + * + * @see Object Lifecycle Management + */ public static class AgeDeleteRule extends DeleteRule { private static final long serialVersionUID = 5697166940712116380L; private final int daysToLive; + /** + * Creates an {@code AgeDeleteRule} object. + * + * @param daysToLive blobs' Time To Live expressed in days. The time when the age condition is + * considered to be satisfied is computed by adding {@code daysToLive} days to the + * midnight following blob's creation time in UTC. + */ public AgeDeleteRule(int daysToLive) { super(Type.AGE); this.daysToLive = daysToLive; @@ -200,16 +217,28 @@ private void readObject(ObjectInputStream in) throws IOException, rule = new JacksonFactory().fromString(in.readUTF(), Rule.class); } + @Override Rule toPb() { return rule; } } + /** + * Delete rule class for blobs in the bucket that have been created before a certain date. + * + * @see Object Lifecycle Management + */ public static class CreatedBeforeDeleteRule extends DeleteRule { private static final long serialVersionUID = 881692650279195867L; private final long timeMillis; + /** + * Creates an {@code CreatedBeforeDeleteRule} object. + * + * @param timeMillis a date in UTC. Blobs that have been created before midnight of the provided + * date meet the delete condition + */ public CreatedBeforeDeleteRule(long timeMillis) { super(Type.CREATE_BEFORE); this.timeMillis = timeMillis; @@ -225,11 +254,23 @@ void populateCondition(Rule.Condition condition) { } } + /** + * Delete rule class for versioned blobs. Specifies when to delete a blob's version according to + * the number of available newer versions for that blob. + * + * @see Object Lifecycle Management + */ public static class NumNewerVersionsDeleteRule extends DeleteRule { private static final long serialVersionUID = -1955554976528303894L; private final int numNewerVersions; + /** + * Creates an {@code NumNewerVersionsDeleteRule} object. + * + * @param numNewerVersions the number of newer versions. A blob's version meets the delete + * condition when {@code numNewerVersions} newer versions are available. + */ public NumNewerVersionsDeleteRule(int numNewerVersions) { super(Type.NUM_NEWER_VERSIONS); this.numNewerVersions = numNewerVersions; @@ -245,11 +286,22 @@ void populateCondition(Rule.Condition condition) { } } + /** + * Delete rule class to distinguish between live and archived blobs. + * + * @see Object Lifecycle Management + */ public static class IsLiveDeleteRule extends DeleteRule { private static final long serialVersionUID = -3502994563121313364L; private final boolean isLive; + /** + * Creates an {@code IsLiveDeleteRule} object. + * + * @param isLive if set to {@code true} live blobs meet the delete condition. If set to + * {@code false} delete condition is met by archived blobs. + */ public IsLiveDeleteRule(boolean isLive) { super(Type.IS_LIVE); this.isLive = isLive; @@ -265,135 +317,99 @@ void populateCondition(Rule.Condition condition) { } } - public static final class StorageClass implements Serializable { - - private static final long serialVersionUID = 374002156285326563L; - private static final ImmutableMap STRING_TO_OPTION; - private static final StorageClass NULL_VALUE = - new StorageClass(Data.nullOf(String.class)); - - private final String value; - - public enum Option { - DURABLE_REDUCED_AVAILABILITY, STANDARD; - - private final StorageClass storageClass; - - Option() { - storageClass = new StorageClass(name()); - } - } - - private StorageClass(String value) { - this.value = checkNotNull(value); - } - - @Override - public String toString() { - return value(); - } - - public String value() { - return value; - } - - public static StorageClass standard() { - return Option.STANDARD.storageClass; - } - - public static StorageClass durableReducedAvailability() { - return Option.DURABLE_REDUCED_AVAILABILITY.storageClass; - } - - public static StorageClass of(String value) { - Option option = STRING_TO_OPTION.get(value.toUpperCase()); - return option == null ? new StorageClass(value) : option.storageClass; - } - - static { - ImmutableMap.Builder map = ImmutableMap.builder(); - for (Option option : Option.values()) { - map.put(option.name(), option); - } - STRING_TO_OPTION = map.build(); - } - } - - public static final class Location implements Serializable { - - private static final long serialVersionUID = 9073107666838637662L; - private static final ImmutableMap STRING_TO_OPTION; - private static final Location NULL_VALUE = new Location(Data.nullOf(String.class)); - - private final String value; - - public enum Option { - US, EU, ASIA; - - private final Location location; - - Option() { - location = new Location(name()); - } - } - - private Location(String value) { - this.value = checkNotNull(value); - } - - @Override - public int hashCode() { - return Objects.hash(value); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - final Location other = (Location) obj; - return Objects.equals(this.value, other.value); - } - - @Override - public String toString() { - return value(); - } - - public String value() { - return value; - } - - public static Location us() { - return Option.US.location; - } - - public static Location eu() { - return Option.EU.location; - } - - public static Location asia() { - return Option.ASIA.location; - } - - public static Location of(String value) { - Option option = STRING_TO_OPTION.get(value.toUpperCase()); - return option == null ? new Location(value) : option.location; - } - - static { - ImmutableMap.Builder map = ImmutableMap.builder(); - for (Option option : Option.values()) { - map.put(option.name(), option); - } - STRING_TO_OPTION = map.build(); - } - } - - public static final class Builder { + /** + * Builder for {@code BucketInfo}. + */ + public abstract static class Builder { + /** + * Sets the bucket's name. + */ + public abstract Builder name(String name); + + abstract Builder id(String id); + + abstract Builder owner(Acl.Entity owner); + + abstract Builder selfLink(String selfLink); + + /** + * Sets whether versioning should be enabled for this bucket. When set to true, versioning is + * fully enabled. + */ + public abstract Builder versioningEnabled(Boolean enable); + + /** + * Sets the bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ + public abstract Builder indexPage(String indexPage); + + /** + * Sets the custom object to return when a requested resource is not found. + */ + public abstract Builder notFoundPage(String notFoundPage); + + /** + * Sets the bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ + public abstract Builder deleteRules(Iterable rules); + + /** + * Sets the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. A list of supported values is available + * here. + */ + public abstract Builder storageClass(String storageClass); + + /** + * Sets the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. A list of supported values is available + * here. + */ + public abstract Builder location(String location); + + abstract Builder etag(String etag); + + abstract Builder createTime(Long createTime); + + abstract Builder metageneration(Long metageneration); + + /** + * Sets the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ + public abstract Builder cors(Iterable cors); + + /** + * Sets the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ + public abstract Builder acl(Iterable acl); + + /** + * Sets the default access control configuration to apply to bucket's blobs when no other + * configuration is specified. + * + * @see + * About Access Control Lists + */ + public abstract Builder defaultAcl(Iterable acl); + + /** + * Creates a {@code BucketInfo} object. + */ + public abstract BucketInfo build(); + } + + static final class BuilderImpl extends Builder { private String id; private String name; @@ -402,105 +418,143 @@ public static final class Builder { private Boolean versioningEnabled; private String indexPage; private String notFoundPage; - private ImmutableList deleteRules; - private StorageClass storageClass; - private Location location; + private List deleteRules; + private String storageClass; + private String location; private String etag; private Long createTime; private Long metageneration; - private ImmutableList cors; - private ImmutableList acl; - private ImmutableList defaultAcl; - - private Builder() {} + private List cors; + private List acl; + private List defaultAcl; + + BuilderImpl(String name) { + this.name = name; + } + + BuilderImpl(BucketInfo bucketInfo) { + id = bucketInfo.id; + name = bucketInfo.name; + etag = bucketInfo.etag; + createTime = bucketInfo.createTime; + metageneration = bucketInfo.metageneration; + location = bucketInfo.location; + storageClass = bucketInfo.storageClass; + cors = bucketInfo.cors; + acl = bucketInfo.acl; + defaultAcl = bucketInfo.defaultAcl; + owner = bucketInfo.owner; + selfLink = bucketInfo.selfLink; + versioningEnabled = bucketInfo.versioningEnabled; + indexPage = bucketInfo.indexPage; + notFoundPage = bucketInfo.notFoundPage; + deleteRules = bucketInfo.deleteRules; + } + @Override public Builder name(String name) { this.name = checkNotNull(name); return this; } + @Override Builder id(String id) { this.id = id; return this; } + @Override Builder owner(Acl.Entity owner) { this.owner = owner; return this; } + @Override Builder selfLink(String selfLink) { this.selfLink = selfLink; return this; } + @Override public Builder versioningEnabled(Boolean enable) { this.versioningEnabled = firstNonNull(enable, Data.nullOf(Boolean.class)); return this; } + @Override public Builder indexPage(String indexPage) { this.indexPage = indexPage; return this; } + @Override public Builder notFoundPage(String notFoundPage) { this.notFoundPage = notFoundPage; return this; } + @Override public Builder deleteRules(Iterable rules) { this.deleteRules = rules != null ? ImmutableList.copyOf(rules) : null; return this; } - public Builder storageClass(StorageClass storageClass) { - this.storageClass = firstNonNull(storageClass, StorageClass.NULL_VALUE); + @Override + public Builder storageClass(String storageClass) { + this.storageClass = storageClass; return this; } - public Builder location(Location location) { - this.location = firstNonNull(location, Location.NULL_VALUE); + @Override + public Builder location(String location) { + this.location = location; return this; } + @Override Builder etag(String etag) { this.etag = etag; return this; } + @Override Builder createTime(Long createTime) { this.createTime = createTime; return this; } + @Override Builder metageneration(Long metageneration) { this.metageneration = metageneration; return this; } + @Override public Builder cors(Iterable cors) { this.cors = cors != null ? ImmutableList.copyOf(cors) : null; return this; } + @Override public Builder acl(Iterable acl) { this.acl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + @Override public Builder defaultAcl(Iterable acl) { this.defaultAcl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + @Override public BucketInfo build() { checkNotNull(name); return new BucketInfo(this); } } - private BucketInfo(Builder builder) { + BucketInfo(BuilderImpl builder) { id = builder.id; name = builder.name; etag = builder.etag; @@ -519,88 +573,143 @@ private BucketInfo(Builder builder) { deleteRules = builder.deleteRules; } + /** + * Returns the bucket's id. + */ public String id() { return id; } + /** + * Returns the bucket's name. + */ public String name() { return name; } + /** + * Returns the bucket's owner. This is always the project team's owner group. + */ public Entity owner() { return owner; } + /** + * Returns the URI of this bucket as a string. + */ public String selfLink() { return selfLink; } + /** + * Returns {@code true} if versioning is fully enabled for this bucket, {@code false} otherwise. + */ public Boolean versioningEnabled() { return Data.isNull(versioningEnabled) ? null : versioningEnabled; } + /** + * Returns bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ public String indexPage() { return indexPage; } + /** + * Returns the custom object to return when a requested resource is not found. + */ public String notFoundPage() { return notFoundPage; } + /** + * Returns bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ public List deleteRules() { return deleteRules; } + /** + * Returns HTTP 1.1 Entity tag for the bucket. + * + * @see Entity Tags + */ public String etag() { return etag; } + /** + * Returns the time at which the bucket was created. + */ public Long createTime() { return createTime; } + /** + * Returns the metadata generation of this bucket. + */ public Long metageneration() { return metageneration; } - public Location location() { - return location == null || Data.isNull(location.value) ? null : location; - } - - public StorageClass storageClass() { - return storageClass == null || Data.isNull(storageClass.value) ? null : storageClass; - } - + /** + * Returns the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. + * + * @see Bucket Locations + */ + public String location() { + return location; + } + + /** + * Returns the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. + * + * @see Storage Classes + */ + public String storageClass() { + return storageClass; + } + + /** + * Returns the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ public List cors() { return cors; } + /** + * Returns the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ public List acl() { return acl; } + /** + * Returns the default access control configuration for this bucket's blobs. + * + * @see + * About Access Control Lists + */ public List defaultAcl() { return defaultAcl; } + /** + * Returns a builder for the current bucket. + */ public Builder toBuilder() { - return new Builder() - .name(name) - .id(id) - .createTime(createTime) - .etag(etag) - .metageneration(metageneration) - .cors(cors) - .acl(acl) - .defaultAcl(defaultAcl) - .location(location) - .storageClass(storageClass) - .owner(owner) - .selfLink(selfLink) - .versioningEnabled(versioningEnabled) - .indexPage(indexPage) - .notFoundPage(notFoundPage) - .deleteRules(deleteRules); + return new BuilderImpl(this); } @Override @@ -610,7 +719,8 @@ public int hashCode() { @Override public boolean equals(Object obj) { - return obj instanceof BucketInfo && Objects.equals(toPb(), ((BucketInfo) obj).toPb()); + return obj != null && obj.getClass().equals(BucketInfo.class) + && Objects.equals(toPb(), ((BucketInfo) obj).toPb()); } @Override @@ -633,10 +743,10 @@ com.google.api.services.storage.model.Bucket toPb() { bucketPb.setMetageneration(metageneration); } if (location != null) { - bucketPb.setLocation(location.value()); + bucketPb.setLocation(location); } if (storageClass != null) { - bucketPb.setStorageClass(storageClass.value()); + bucketPb.setStorageClass(storageClass); } if (cors != null) { bucketPb.setCors(transform(cors, Cors.TO_PB_FUNCTION)); @@ -683,16 +793,22 @@ public Rule apply(DeleteRule deleteRule) { return bucketPb; } + /** + * Creates a {@code BucketInfo} object for the provided bucket name. + */ public static BucketInfo of(String name) { return builder(name).build(); } + /** + * Returns a {@code BucketInfo} builder where the bucket's name is set to the provided name. + */ public static Builder builder(String name) { - return new Builder().name(name); + return new BuilderImpl(name); } static BucketInfo fromPb(com.google.api.services.storage.model.Bucket bucketPb) { - Builder builder = new Builder().name(bucketPb.getName()); + Builder builder = new BuilderImpl(bucketPb.getName()); if (bucketPb.getId() != null) { builder.id(bucketPb.getId()); } @@ -709,10 +825,10 @@ static BucketInfo fromPb(com.google.api.services.storage.model.Bucket bucketPb) builder.createTime(bucketPb.getTimeCreated().getValue()); } if (bucketPb.getLocation() != null) { - builder.location(Location.of(bucketPb.getLocation())); + builder.location(bucketPb.getLocation()); } if (bucketPb.getStorageClass() != null) { - builder.storageClass(StorageClass.of(bucketPb.getStorageClass())); + builder.storageClass(bucketPb.getStorageClass()); } if (bucketPb.getCors() != null) { builder.cors(transform(bucketPb.getCors(), Cors.FROM_PB_FUNCTION)); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java index 142f8d4b6de7..62b39e005369 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java @@ -22,9 +22,9 @@ import com.google.gcloud.Restorable; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.RewriteRequest; -import com.google.gcloud.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.RewriteRequest; +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse; import java.io.Serializable; import java.util.Map; @@ -55,11 +55,12 @@ public class CopyWriter implements Restorable { /** * Returns the updated information for the written blob. Calling this method when {@code isDone()} * is {@code false} will block until all pending chunks are copied. - *

    - * This method has the same effect of doing: - *

        {@code while (!copyWriter.isDone()) {
    -   *        copyWriter.copyChunk();
    -   *    }}
    +   *
    +   * 

    This method has the same effect of doing: + *

     {@code
    +   * while (!copyWriter.isDone()) {
    +   *    copyWriter.copyChunk();
    +   * }}
        * 
    * * @throws StorageException upon failure @@ -79,14 +80,14 @@ public long blobSize() { } /** - * Returns {@code true} of blob rewrite finished, {@code false} otherwise. + * Returns {@code true} if blob copy has finished, {@code false} otherwise. */ public boolean isDone() { return rewriteResponse.isDone; } /** - * Returns the number of bytes copied. + * Returns the number of bytes copied. */ public long totalBytesCopied() { return rewriteResponse.totalBytesRewritten; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java index a94359f17a79..bcbbd1030dbc 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java @@ -33,6 +33,9 @@ /** * Cross-Origin Resource Sharing (CORS) configuration for a bucket. + * + * @see + * Cross-Origin Resource Sharing (CORS) */ public final class Cors implements Serializable { @@ -57,6 +60,9 @@ public Bucket.Cors apply(Cors cors) { private final ImmutableList origins; private final ImmutableList responseHeaders; + /** + * Class for a CORS origin. + */ public static final class Origin implements Serializable { private static final long serialVersionUID = -4447958124895577993L; @@ -69,10 +75,16 @@ private Origin(String value) { this.value = checkNotNull(value); } + /** + * Returns an {@code Origin} object for all possible origins. + */ public static Origin any() { return ANY; } + /** + * Returns an {@code Origin} object for the given scheme, host and port. + */ public static Origin of(String scheme, String host, int port) { try { return of(new URI(scheme, null, host, port, null, null, null).toString()); @@ -81,6 +93,9 @@ public static Origin of(String scheme, String host, int port) { } } + /** + * Creates an {@code Origin} object for the provided value. + */ public static Origin of(String value) { if (ANY_URI.equals(value)) { return any(); @@ -111,6 +126,9 @@ public String value() { } } + /** + * CORS configuration builder. + */ public static final class Builder { private Integer maxAgeSeconds; @@ -120,26 +138,42 @@ public static final class Builder { private Builder() {} + /** + * Sets the max time in seconds in which a client can issue requests before sending a new + * preflight request. + */ public Builder maxAgeSeconds(Integer maxAgeSeconds) { this.maxAgeSeconds = maxAgeSeconds; return this; } + /** + * Sets the HTTP methods supported by this CORS configuration. + */ public Builder methods(Iterable methods) { this.methods = methods != null ? ImmutableList.copyOf(methods) : null; return this; } + /** + * Sets the origins for this CORS configuration. + */ public Builder origins(Iterable origins) { this.origins = origins != null ? ImmutableList.copyOf(origins) : null; return this; } + /** + * Sets the response headers supported by this CORS configuration. + */ public Builder responseHeaders(Iterable headers) { this.responseHeaders = headers != null ? ImmutableList.copyOf(headers) : null; return this; } + /** + * Creates a CORS configuration. + */ public Cors build() { return new Cors(this); } @@ -152,22 +186,38 @@ private Cors(Builder builder) { this.responseHeaders = builder.responseHeaders; } + /** + * Returns the max time in seconds in which a client can issue requests before sending a new + * preflight request. + */ public Integer maxAgeSeconds() { return maxAgeSeconds; } + /** + * Returns the HTTP methods supported by this CORS configuration. + */ public List methods() { return methods; } + /** + * Returns the origins in this CORS configuration. + */ public List origins() { return origins; } + /** + * Returns the response headers supported by this CORS configuration. + */ public List responseHeaders() { return responseHeaders; } + /** + * Returns a builder for this CORS configuration. + */ public Builder toBuilder() { return builder() .maxAgeSeconds(maxAgeSeconds) @@ -193,6 +243,9 @@ public boolean equals(Object obj) { && Objects.equals(responseHeaders, other.responseHeaders); } + /** + * Returns a CORS configuration builder. + */ public static Builder builder() { return new Builder(); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java index 798db688c8ec..65c55da7efc8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java @@ -19,13 +19,13 @@ import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.base.MoreObjects; -import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; import java.io.Serializable; import java.util.Objects; /** - * Base class for Storage operation option + * Base class for Storage operation option. */ class Option implements Serializable { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index 9bc971a09dba..c30111e50835 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -19,19 +19,26 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.gcloud.AuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; +import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.Service; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.InputStream; import java.io.Serializable; import java.net.URL; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -46,7 +53,7 @@ */ public interface Storage extends Service { - public static final String DEFAULT_CONTENT_TYPE = "application/octet-stream"; + String DEFAULT_CONTENT_TYPE = "application/octet-stream"; enum PredefinedAcl { AUTHENTICATED_READ("authenticatedRead"), @@ -69,6 +76,92 @@ String entry() { } } + enum BucketField { + ID("id"), + SELF_LINK("selfLink"), + NAME("name"), + TIME_CREATED("timeCreated"), + METAGENERATION("metageneration"), + ACL("acl"), + DEFAULT_OBJECT_ACL("defaultObjectAcl"), + OWNER("owner"), + LOCATION("location"), + WEBSITE("website"), + VERSIONING("versioning"), + CORS("cors"), + STORAGE_CLASS("storageClass"), + ETAG("etag"); + + private final String selector; + + BucketField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(BucketField... fields) { + HashSet fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(NAME.selector()); + for (BucketField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + enum BlobField { + ACL("acl"), + BUCKET("bucket"), + CACHE_CONTROL("cacheControl"), + COMPONENT_COUNT("componentCount"), + CONTENT_DISPOSITION("contentDisposition"), + CONTENT_ENCODING("contentEncoding"), + CONTENT_LANGUAGE("contentLanguage"), + CONTENT_TYPE("contentType"), + CRC32C("crc32c"), + ETAG("etag"), + GENERATION("generation"), + ID("id"), + KIND("kind"), + MD5HASH("md5Hash"), + MEDIA_LINK("mediaLink"), + METADATA("metadata"), + METAGENERATION("metageneration"), + NAME("name"), + OWNER("owner"), + SELF_LINK("selfLink"), + SIZE("size"), + STORAGE_CLASS("storageClass"), + TIME_DELETED("timeDeleted"), + UPDATED("updated"); + + private final String selector; + + BlobField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(BlobField... fields) { + HashSet fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(BUCKET.selector()); + fieldStrings.add(NAME.selector()); + for (BlobField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying bucket target options. + */ class BucketTargetOption extends Option { private static final long serialVersionUID = -5880204616982900975L; @@ -81,23 +174,40 @@ private BucketTargetOption(StorageRpc.Option rpcOption) { this(rpcOption, null); } + /** + * Returns an option for specifying bucket's predefined ACL configuration. + */ public static BucketTargetOption predefinedAcl(PredefinedAcl acl) { return new BucketTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option for specifying bucket's default ACL configuration for blobs. + */ public static BucketTargetOption predefinedDefaultObjectAcl(PredefinedAcl acl) { return new BucketTargetOption(StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL, acl.entry()); } + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BucketTargetOption metagenerationMatch() { return new BucketTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if metageneration matches. + */ public static BucketTargetOption metagenerationNotMatch() { return new BucketTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } } + /** + * Class for specifying bucket source options. + */ class BucketSourceOption extends Option { private static final long serialVersionUID = 5185657617120212117L; @@ -106,15 +216,68 @@ private BucketSourceOption(StorageRpc.Option rpcOption, long metageneration) { super(rpcOption, metageneration); } + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if bucket's metageneration does not match the provided value. + */ public static BucketSourceOption metagenerationMatch(long metageneration) { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if bucket's metageneration matches the provided value. + */ public static BucketSourceOption metagenerationNotMatch(long metageneration) { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } } + /** + * Class for specifying bucket source options. + */ + class BucketGetOption extends Option { + + private static final long serialVersionUID = 1901844869484087395L; + + private BucketGetOption(StorageRpc.Option rpcOption, long metageneration) { + super(rpcOption, metageneration); + } + + private BucketGetOption(StorageRpc.Option rpcOption, String value) { + super(rpcOption, value); + } + + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if bucket's metageneration does not match the provided value. + */ + public static BucketGetOption metagenerationMatch(long metageneration) { + return new BucketGetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); + } + + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if bucket's metageneration matches the provided value. + */ + public static BucketGetOption metagenerationNotMatch(long metageneration) { + return new BucketGetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); + } + + /** + * Returns an option to specify the bucket's fields to be returned by the RPC call. If this + * option is not provided all bucket's fields are returned. {@code BucketGetOption.fields}) can + * be used to specify only the fields of interest. Bucket name is always returned, even if not + * specified. + */ + public static BucketGetOption fields(BucketField... fields) { + return new BucketGetOption(StorageRpc.Option.FIELDS, BucketField.selector(fields)); + } + } + + /** + * Class for specifying blob target options. + */ class BlobTargetOption extends Option { private static final long serialVersionUID = 214616862061934846L; @@ -127,26 +290,48 @@ private BlobTargetOption(StorageRpc.Option rpcOption) { this(rpcOption, null); } + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ public static BlobTargetOption predefinedAcl(PredefinedAcl acl) { return new BlobTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + */ public static BlobTargetOption doesNotExist() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, 0L); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match. + */ public static BlobTargetOption generationMatch() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if generation matches. + */ public static BlobTargetOption generationNotMatch() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobTargetOption metagenerationMatch() { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobTargetOption metagenerationNotMatch() { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } @@ -172,6 +357,9 @@ static Tuple convert(BlobInfo info, BlobWriteOptio } } + /** + * Class for specifying blob write options. + */ class BlobWriteOption implements Serializable { private static final long serialVersionUID = -3880421670966224580L; @@ -218,64 +406,218 @@ public boolean equals(Object obj) { return this.option == other.option && Objects.equals(this.value, other.value); } + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ public static BlobWriteOption predefinedAcl(PredefinedAcl acl) { return new BlobWriteOption(Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + */ public static BlobWriteOption doesNotExist() { return new BlobWriteOption(Option.IF_GENERATION_MATCH, 0L); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match. + */ public static BlobWriteOption generationMatch() { return new BlobWriteOption(Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if generation matches. + */ public static BlobWriteOption generationNotMatch() { return new BlobWriteOption(Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobWriteOption metagenerationMatch() { return new BlobWriteOption(Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobWriteOption metagenerationNotMatch() { return new BlobWriteOption(Option.IF_METAGENERATION_NOT_MATCH); } + /** + * Returns an option for blob's data MD5 hash match. If this option is used the request will + * fail if blobs' data MD5 hash does not match. + */ public static BlobWriteOption md5Match() { return new BlobWriteOption(Option.IF_MD5_MATCH, true); } + /** + * Returns an option for blob's data CRC32C checksum match. If this option is used the request + * will fail if blobs' data CRC32C checksum does not match. + */ public static BlobWriteOption crc32cMatch() { return new BlobWriteOption(Option.IF_CRC32C_MATCH, true); } } + /** + * Class for specifying blob source options. + */ class BlobSourceOption extends Option { private static final long serialVersionUID = -3712768261070182991L; - private BlobSourceOption(StorageRpc.Option rpcOption, long value) { + private BlobSourceOption(StorageRpc.Option rpcOption, Long value) { super(rpcOption, value); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobSourceOption generationMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, null); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match the provided value. + */ public static BlobSourceOption generationMatch(long generation) { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobSourceOption generationNotMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches the provided value. + */ public static BlobSourceOption generationNotMatch(long generation) { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if blob's metageneration does not match the provided value. + */ public static BlobSourceOption metagenerationMatch(long metageneration) { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if blob's metageneration matches the provided value. + */ public static BlobSourceOption metagenerationNotMatch(long metageneration) { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } } + /** + * Class for specifying blob get options. + */ + class BlobGetOption extends Option { + + private static final long serialVersionUID = 803817709703661480L; + + private BlobGetOption(StorageRpc.Option rpcOption, Long value) { + super(rpcOption, value); + } + + private BlobGetOption(StorageRpc.Option rpcOption, String value) { + super(rpcOption, value); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobGetOption generationMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, (Long) null); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match the provided value. + */ + public static BlobGetOption generationMatch(long generation) { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobGetOption generationNotMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, (Long) null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches the provided value. + */ + public static BlobGetOption generationNotMatch(long generation) { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); + } + + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if blob's metageneration does not match the provided value. + */ + public static BlobGetOption metagenerationMatch(long metageneration) { + return new BlobGetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); + } + + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if blob's metageneration matches the provided value. + */ + public static BlobGetOption metagenerationNotMatch(long metageneration) { + return new BlobGetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); + } + + /** + * Returns an option to specify the blob's fields to be returned by the RPC call. If this option + * is not provided all blob's fields are returned. {@code BlobGetOption.fields}) can be used to + * specify only the fields of interest. Blob name and bucket are always returned, even if not + * specified. + */ + public static BlobGetOption fields(BlobField... fields) { + return new BlobGetOption(StorageRpc.Option.FIELDS, BlobField.selector(fields)); + } + } + + /** + * Class for specifying bucket list options. + */ class BucketListOption extends Option { private static final long serialVersionUID = 8754017079673290353L; @@ -284,19 +626,44 @@ private BucketListOption(StorageRpc.Option option, Object value) { super(option, value); } - public static BucketListOption maxResults(long maxResults) { - return new BucketListOption(StorageRpc.Option.MAX_RESULTS, maxResults); + /** + * Returns an option to specify the maximum number of buckets returned per page. + */ + public static BucketListOption pageSize(long pageSize) { + return new BucketListOption(StorageRpc.Option.MAX_RESULTS, pageSize); } + /** + * Returns an option to specify the page token from which to start listing buckets. + */ public static BucketListOption startPageToken(String pageToken) { return new BucketListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } + /** + * Returns an option to set a prefix to filter results to buckets whose names begin with this + * prefix. + */ public static BucketListOption prefix(String prefix) { return new BucketListOption(StorageRpc.Option.PREFIX, prefix); } + + /** + * Returns an option to specify the bucket's fields to be returned by the RPC call. If this + * option is not provided all bucket's fields are returned. {@code BucketListOption.fields}) can + * be used to specify only the fields of interest. Bucket name is always returned, even if not + * specified. + */ + public static BucketListOption fields(BucketField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("items(").append(BucketField.selector(fields)).append("),nextPageToken"); + return new BucketListOption(StorageRpc.Option.FIELDS, builder.toString()); + } } + /** + * Class for specifying blob list options. + */ class BlobListOption extends Option { private static final long serialVersionUID = 9083383524788661294L; @@ -305,23 +672,67 @@ private BlobListOption(StorageRpc.Option option, Object value) { super(option, value); } - public static BlobListOption maxResults(long maxResults) { - return new BlobListOption(StorageRpc.Option.MAX_RESULTS, maxResults); + /** + * Returns an option to specify the maximum number of blobs returned per page. + */ + public static BlobListOption pageSize(long pageSize) { + return new BlobListOption(StorageRpc.Option.MAX_RESULTS, pageSize); } + /** + * Returns an option to specify the page token from which to start listing blobs. + */ public static BlobListOption startPageToken(String pageToken) { return new BlobListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } + /** + * Returns an option to set a prefix to filter results to blobs whose names begin with this + * prefix. + */ public static BlobListOption prefix(String prefix) { return new BlobListOption(StorageRpc.Option.PREFIX, prefix); } - public static BlobListOption recursive(boolean recursive) { - return new BlobListOption(StorageRpc.Option.DELIMITER, recursive); + /** + * If specified, results are returned in a directory-like mode. Blobs whose names, after a + * possible {@link #prefix(String)}, do not contain the '/' delimiter are returned as is. Blobs + * whose names, after a possible {@link #prefix(String)}, contain the '/' delimiter, will have + * their name truncated after the delimiter and will be returned as {@link Blob} objects where + * only {@link Blob#blobId()}, {@link Blob#size()} and {@link Blob#isDirectory()} are set. For + * such directory blobs, ({@link BlobId#generation()} returns {@code null}), {@link Blob#size()} + * returns {@code 0} while {@link Blob#isDirectory()} returns {@code true}. Duplicate directory + * blobs are omitted. + */ + public static BlobListOption currentDirectory() { + return new BlobListOption(StorageRpc.Option.DELIMITER, true); + } + + /** + * If set to {@code true}, lists all versions of a blob. The default is {@code false}. + * + * @see Object Versioning + */ + public static BlobListOption versions(boolean versions) { + return new BlobListOption(StorageRpc.Option.VERSIONS, versions); + } + + /** + * Returns an option to specify the blob's fields to be returned by the RPC call. If this option + * is not provided all blob's fields are returned. {@code BlobListOption.fields}) can be used to + * specify only the fields of interest. Blob name and bucket are always returned, even if not + * specified. + */ + public static BlobListOption fields(BlobField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("items(").append(BlobField.selector(fields)).append("),nextPageToken"); + return new BlobListOption(StorageRpc.Option.FIELDS, builder.toString()); } } + /** + * Class for specifying signed URL options. + */ class SignUrlOption implements Serializable { private static final long serialVersionUID = 7850569877451099267L; @@ -381,6 +792,12 @@ public static SignUrlOption serviceAccount(ServiceAccountAuthCredentials credent } } + /** + * A class to contain all information needed for a Google Cloud Storage Compose operation. + * + * @see + * Compose Operation + */ class ComposeRequest implements Serializable { private static final long serialVersionUID = -7385681353748590911L; @@ -389,6 +806,9 @@ class ComposeRequest implements Serializable { private final BlobInfo target; private final List targetOptions; + /** + * Class for Compose source blobs. + */ public static class SourceBlob implements Serializable { private static final long serialVersionUID = 4094962795951990439L; @@ -420,6 +840,9 @@ public static class Builder { private final Set targetOptions = new LinkedHashSet<>(); private BlobInfo target; + /** + * Add source blobs for compose operation. + */ public Builder addSource(Iterable blobs) { for (String blob : blobs) { sourceBlobs.add(new SourceBlob(blob)); @@ -427,6 +850,9 @@ public Builder addSource(Iterable blobs) { return this; } + /** + * Add source blobs for compose operation. + */ public Builder addSource(String... blobs) { return addSource(Arrays.asList(blobs)); } @@ -439,21 +865,33 @@ public Builder addSource(String blob, long generation) { return this; } + /** + * Sets compose operation's target blob. + */ public Builder target(BlobInfo target) { this.target = target; return this; } + /** + * Sets compose operation's target blob options. + */ public Builder targetOptions(BlobTargetOption... options) { Collections.addAll(targetOptions, options); return this; } + /** + * Sets compose operation's target blob options. + */ public Builder targetOptions(Iterable options) { Iterables.addAll(targetOptions, options); return this; } + /** + * Creates a {@code ComposeRequest} object. + */ public ComposeRequest build() { checkArgument(!sourceBlobs.isEmpty()); checkNotNull(target); @@ -467,31 +905,59 @@ private ComposeRequest(Builder builder) { targetOptions = ImmutableList.copyOf(builder.targetOptions); } + /** + * Returns compose operation's source blobs. + */ public List sourceBlobs() { return sourceBlobs; } + /** + * Returns compose operation's target blob. + */ public BlobInfo target() { return target; } + /** + * Returns compose operation's target blob's options. + */ public List targetOptions() { return targetOptions; } + /** + * Creates a {@code ComposeRequest} object. + * + * @param sources source blobs names + * @param target target blob + */ public static ComposeRequest of(Iterable sources, BlobInfo target) { return builder().target(target).addSource(sources).build(); } + /** + * Creates a {@code ComposeRequest} object. + * + * @param bucket name of the bucket where the compose operation takes place + * @param sources source blobs names + * @param target target blob name + */ public static ComposeRequest of(String bucket, Iterable sources, String target) { return of(sources, BlobInfo.builder(BlobId.of(bucket, target)).build()); } + /** + * Returns a {@code ComposeRequest} builder. + */ public static Builder builder() { return new Builder(); } } + /** + * A class to contain all information needed for a Google Cloud Storage Copy operation. + */ class CopyRequest implements Serializable { private static final long serialVersionUID = -4498650529476219937L; @@ -513,7 +979,7 @@ public static class Builder { /** * Sets the blob to copy given bucket and blob name. * - * @return the builder. + * @return the builder */ public Builder source(String bucket, String blob) { this.source = BlobId.of(bucket, blob); @@ -523,7 +989,7 @@ public Builder source(String bucket, String blob) { /** * Sets the blob to copy given a {@link BlobId}. * - * @return the builder. + * @return the builder */ public Builder source(BlobId source) { this.source = source; @@ -533,7 +999,7 @@ public Builder source(BlobId source) { /** * Sets blob's source options. * - * @return the builder. + * @return the builder */ public Builder sourceOptions(BlobSourceOption... options) { Collections.addAll(sourceOptions, options); @@ -543,7 +1009,7 @@ public Builder sourceOptions(BlobSourceOption... options) { /** * Sets blob's source options. * - * @return the builder. + * @return the builder */ public Builder sourceOptions(Iterable options) { Iterables.addAll(sourceOptions, options); @@ -553,7 +1019,7 @@ public Builder sourceOptions(Iterable options) { /** * Sets the copy target. Target blob information is copied from source. * - * @return the builder. + * @return the builder */ public Builder target(BlobId target) { this.target = BlobInfo.builder(target).build(); @@ -565,7 +1031,7 @@ public Builder target(BlobId target) { * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code * target.contentType} is a required field. * - * @return the builder. + * @return the builder * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ public Builder target(BlobInfo target, BlobTargetOption... options) @@ -581,7 +1047,7 @@ public Builder target(BlobInfo target, BlobTargetOption... options) * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code * target.contentType} is a required field. * - * @return the builder. + * @return the builder * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ public Builder target(BlobInfo target, Iterable options) @@ -597,7 +1063,7 @@ public Builder target(BlobInfo target, Iterable options) * if source and target blob share the same location and storage class as copy is made with * one single RPC. * - * @return the builder. + * @return the builder */ public Builder megabytesCopiedPerChunk(Long megabytesCopiedPerChunk) { this.megabytesCopiedPerChunk = megabytesCopiedPerChunk; @@ -605,7 +1071,7 @@ public Builder megabytesCopiedPerChunk(Long megabytesCopiedPerChunk) { } /** - * Creates a {@code CopyRequest}. + * Creates a {@code CopyRequest} object. */ public CopyRequest build() { checkNotNull(source); @@ -623,7 +1089,7 @@ private CopyRequest(Builder builder) { } /** - * Returns the blob to rewrite, as a {@link BlobId}. + * Returns the blob to copy, as a {@link BlobId}. */ public BlobId source() { return source; @@ -637,7 +1103,7 @@ public List sourceOptions() { } /** - * Returns the rewrite target. + * Returns the {@link BlobInfo} for the target blob. */ public BlobInfo target() { return target; @@ -667,7 +1133,7 @@ public Long megabytesCopiedPerChunk() { * @param sourceBucket name of the bucket containing the source blob * @param sourceBlob name of the source blob * @param target a {@code BlobInfo} object for the target blob - * @return a copy request. + * @return a copy request * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) @@ -683,7 +1149,7 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo ta * * @param sourceBlobId a {@code BlobId} object for the source blob * @param target a {@code BlobInfo} object for the target blob - * @return a copy request. + * @return a copy request * @throws IllegalArgumentException if {@code target.contentType} is {@code null} */ public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) @@ -698,7 +1164,7 @@ public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) * @param sourceBucket name of the bucket containing both the source and the target blob * @param sourceBlob name of the source blob * @param targetBlob name of the target blob - * @return a copy request. + * @return a copy request */ public static CopyRequest of(String sourceBucket, String sourceBlob, String targetBlob) { return CopyRequest.builder() @@ -713,7 +1179,7 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, String targ * @param sourceBucket name of the bucket containing the source blob * @param sourceBlob name of the source blob * @param target a {@code BlobId} object for the target blob - * @return a copy request. + * @return a copy request */ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobId target) { return builder().source(sourceBucket, sourceBlob).target(target).build(); @@ -724,7 +1190,7 @@ public static CopyRequest of(String sourceBucket, String sourceBlob, BlobId targ * * @param sourceBlobId a {@code BlobId} object for the source blob * @param targetBlob name of the target blob, in the same bucket of the source blob - * @return a copy request. + * @return a copy request */ public static CopyRequest of(BlobId sourceBlobId, String targetBlob) { return CopyRequest.builder() @@ -738,7 +1204,7 @@ public static CopyRequest of(BlobId sourceBlobId, String targetBlob) { * * @param sourceBlobId a {@code BlobId} object for the source blob * @param targetBlobId a {@code BlobId} object for the target blob - * @return a copy request. + * @return a copy request */ public static CopyRequest of(BlobId sourceBlobId, BlobId targetBlobId) { return CopyRequest.builder() @@ -757,162 +1223,168 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx } /** - * Create a new bucket. + * Creates a new bucket. * - * @return a complete bucket information. + * @return a complete bucket * @throws StorageException upon failure */ - BucketInfo create(BucketInfo bucketInfo, BucketTargetOption... options); + Bucket create(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Create a new blob with no content. + * Creates a new blob with no content. * - * @return a complete blob information. + * @return a [@code Blob} with complete information * @throws StorageException upon failure */ - BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options); + Blob create(BlobInfo blobInfo, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of * {@code content} are computed and used for validating transferred data. * - * @return a complete blob information. + * @return a [@code Blob} with complete information * @throws StorageException upon failure * @see Hashes and ETags */ - BlobInfo create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options); + Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the - * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. + * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. The given + * input stream is closed upon success. * - * @return a complete blob information. + * @return a [@code Blob} with complete information * @throws StorageException upon failure */ - BlobInfo create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); + Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); /** - * Return the requested bucket or {@code null} if not found. + * Returns the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ - BucketInfo get(String bucket, BucketSourceOption... options); + Bucket get(String bucket, BucketGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(String bucket, String blob, BlobSourceOption... options); + Blob get(String bucket, String blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(BlobId blob, BlobSourceOption... options); + Blob get(BlobId blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(BlobId blob); + Blob get(BlobId blob); /** - * List the project's buckets. + * Lists the project's buckets. * * @throws StorageException upon failure */ - ListResult list(BucketListOption... options); + Page list(BucketListOption... options); /** - * List the bucket's blobs. + * Lists the bucket's blobs. If the {@link BlobListOption#currentDirectory()} option is provided, + * results are returned in a directory-like mode. * * @throws StorageException upon failure */ - ListResult list(String bucket, BlobListOption... options); + Page list(String bucket, BlobListOption... options); /** - * Update bucket information. + * Updates bucket information. * * @return the updated bucket * @throws StorageException upon failure */ - BucketInfo update(BucketInfo bucketInfo, BucketTargetOption... options); + Bucket update(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. - *

    - * Example usage of replacing blob's metadata: - *

        {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
    -   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
    +   *
    +   * 

    Example usage of replacing blob's metadata: + *

     {@code
    +   * service.update(BlobInfo.builder("bucket", "name").metadata(null).build());
    +   * service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());
    +   * }
        * 
    * * @return the updated blob * @throws StorageException upon failure */ - BlobInfo update(BlobInfo blobInfo, BlobTargetOption... options); + Blob update(BlobInfo blobInfo, BlobTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. - *

    - * Example usage of replacing blob's metadata: - *

        {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
    -   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
    +   *
    +   * 

    Example usage of replacing blob's metadata: + *

     {@code
    +   * service.update(BlobInfo.builder("bucket", "name").metadata(null).build());
    +   * service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());
    +   * }
        * 
    * * @return the updated blob * @throws StorageException upon failure */ - BlobInfo update(BlobInfo blobInfo); + Blob update(BlobInfo blobInfo); /** - * Delete the requested bucket. + * Deletes the requested bucket. * - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, BucketSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, String blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob); /** - * Send a compose request. + * Sends a compose request. * - * @return the composed blob. + * @return the composed blob * @throws StorageException upon failure */ - BlobInfo compose(ComposeRequest composeRequest); + Blob compose(ComposeRequest composeRequest); /** * Sends a copy request. Returns a {@link CopyWriter} object for the provided @@ -921,16 +1393,17 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * returns, regardless of the {@link CopyRequest#megabytesCopiedPerChunk} parameter. * If source and destination have different location or storage class {@link CopyWriter#result()} * might issue multiple RPC calls depending on blob's size. - *

    - * Example usage of copy: - *

        {@code BlobInfo blob = service.copy(copyRequest).result();}
    +   *
    +   * 

    Example usage of copy: + *

     {@code BlobInfo blob = service.copy(copyRequest).result();}
        * 
    * To explicitly issue chunk copy requests use {@link CopyWriter#copyChunk()} instead: - *
        {@code CopyWriter copyWriter = service.copy(copyRequest);
    -   *    while (!copyWriter.isDone()) {
    -   *        copyWriter.copyChunk();
    -   *    }
    -   *    BlobInfo blob = copyWriter.result();
    +   * 
     {@code
    +   * CopyWriter copyWriter = service.copy(copyRequest);
    +   * while (!copyWriter.isDone()) {
    +   *     copyWriter.copyChunk();
    +   * }
    +   * BlobInfo blob = copyWriter.result();
        * }
        * 
    * @@ -944,7 +1417,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Reads all the bytes from a blob. * - * @return the blob's content. + * @return the blob's content * @throws StorageException upon failure */ byte[] readAllBytes(String bucket, String blob, BlobSourceOption... options); @@ -952,60 +1425,100 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx /** * Reads all the bytes from a blob. * - * @return the blob's content. + * @return the blob's content * @throws StorageException upon failure */ byte[] readAllBytes(BlobId blob, BlobSourceOption... options); /** - * Send a batch request. + * Sends a batch request. * * @return the batch response * @throws StorageException upon failure */ - BatchResponse apply(BatchRequest batchRequest); + BatchResponse submit(BatchRequest batchRequest); /** - * Return a channel for reading the blob's content. + * Returns a channel for reading the blob's content. The blob's latest generation is read. If the + * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to + * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}. + * + *

    The {@link BlobSourceOption#generationMatch(long)} option can be provided to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if blob`s + * generation differs from the expected one. * * @throws StorageException upon failure */ - BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options); + ReadChannel reader(String bucket, String blob, BlobSourceOption... options); /** - * Return a channel for reading the blob's content. + * Returns a channel for reading the blob's content. If {@code blob.generation()} is set + * data corresponding to that generation is read. If {@code blob.generation()} is {@code null} + * the blob's latest generation is read. If the blob changes while reading (i.e. + * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} + * may throw {@link StorageException}. + * + *

    The {@link BlobSourceOption#generationMatch()} and + * {@link BlobSourceOption#generationMatch(long)} options can be used to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if the + * blob`s generation differs from the expected one. * * @throws StorageException upon failure */ - BlobReadChannel reader(BlobId blob, BlobSourceOption... options); + ReadChannel reader(BlobId blob, BlobSourceOption... options); /** - * Create a blob and return a channel for writing its content. By default any md5 and crc32c + * Creates a blob and return a channel for writing its content. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * * @throws StorageException upon failure */ - BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); + WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); /** - * Generates a signed URL for a blob. - * If you have a blob that you want to allow access to for a fixed - * amount of time, you can use this method to generate a URL that - * is only valid within a certain time period. - * This is particularly useful if you don't want publicly - * accessible blobs, but don't want to require users to explicitly log in. - *

    - * Example usage of creating a signed URL that is valid for 2 weeks: - *

       {@code
    -   *     service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS);
    +   * Generates a signed URL for a blob. If you have a blob that you want to allow access to for a
    +   * fixed amount of time, you can use this method to generate a URL that is only valid within a
    +   * certain time period. This is particularly useful if you don't want publicly accessible blobs,
    +   * but also don't want to require users to explicitly log in. Signing a URL requires a service
    +   * account and its associated private key. If a {@link ServiceAccountAuthCredentials} was passed
    +   * to {@link StorageOptions.Builder#authCredentials(AuthCredentials)} or the default credentials
    +   * are being used and the environment variable {@code GOOGLE_APPLICATION_CREDENTIALS} is set, then
    +   * {@code signUrl} will use that service account and associated key to sign the URL. If the
    +   * credentials passed to {@link StorageOptions} do not expose a private key (this is the case for
    +   * App Engine credentials, Compute Engine credentials and Google Cloud SDK credentials) then
    +   * {@code signUrl} will throw an {@link IllegalArgumentException} unless a service account with
    +   * associated key is passed using the {@code SignUrlOption.serviceAccount()} option. The service
    +   * account and private key passed with {@code SignUrlOption.serviceAccount()} have priority over
    +   * any credentials set with {@link StorageOptions.Builder#authCredentials(AuthCredentials)}.
    +   *
    +   * 

    Example usage of creating a signed URL that is valid for 2 weeks, using the default + * credentials for signing the URL: + *

     {@code
    +   * service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS);
    +   * }
    + * + *

    Example usage of creating a signed URL passing the {@code SignUrlOption.serviceAccount()} + * option, that will be used for signing the URL: + *

     {@code
    +   * service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS,
    +   *     SignUrlOption.serviceAccount(
    +   *         AuthCredentials.createForJson(new FileInputStream("/path/to/key.json"))));
        * }
    * * @param blobInfo the blob associated with the signed URL - * @param duration time until the signed URL expires, expressed in {@code unit}. The finer + * @param duration time until the signed URL expires, expressed in {@code unit}. The finest * granularity supported is 1 second, finer granularities will be truncated * @param unit time unit of the {@code duration} parameter * @param options optional URL signing options + * @throws IllegalArgumentException if {@code SignUrlOption.serviceAccount()} was not used and no + * service account was provided to {@link StorageOptions} + * @throws IllegalArgumentException if the key associated to the provided service account is + * invalid + * @throws IllegalArgumentException if {@code SignUrlOption.withMd5()} option is used and + * {@code blobInfo.md5()} is {@code null} + * @throws IllegalArgumentException if {@code SignUrlOption.withContentType()} option is used and + * {@code blobInfo.contentType()} is {@code null} * @see Signed-URLs */ URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options); @@ -1014,11 +1527,11 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * Gets the requested blobs. A batch request is used to perform this call. * * @param blobIds blobs to get - * @return an immutable list of {@code BlobInfo} objects. If a blob does not exist or access to it + * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it * has been denied the corresponding item in the list is {@code null}. * @throws StorageException upon failure */ - List get(BlobId... blobIds); + List get(BlobId... blobIds); /** * Updates the requested blobs. A batch request is used to perform this call. Original metadata @@ -1028,19 +1541,19 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx * {@link #update(com.google.gcloud.storage.BlobInfo)} for a code example. * * @param blobInfos blobs to update - * @return an immutable list of {@code BlobInfo} objects. If a blob does not exist or access to it + * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it * has been denied the corresponding item in the list is {@code null}. * @throws StorageException upon failure */ - List update(BlobInfo... blobInfos); + List update(BlobInfo... blobInfos); /** * Deletes the requested blobs. A batch request is used to perform this call. * * @param blobIds blobs to delete * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. + * list is {@code true}. If a blob was not found, deletion failed or access to the resource + * was denied the corresponding item is {@code false}. * @throws StorageException upon failure */ List delete(BlobId... blobIds); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java index e354e3a6d427..ee85b80d6e13 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java @@ -16,8 +16,14 @@ package com.google.gcloud.storage; -import com.google.gcloud.RetryHelper; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; /** * Storage service exception. @@ -25,29 +31,35 @@ * @see Google Cloud * Storage error codes */ -public class StorageException extends RuntimeException { +public class StorageException extends BaseServiceException { - private static final long serialVersionUID = -3748432005065428084L; - private static final int UNKNOWN_CODE = -1; + // see: https://cloud.google.com/storage/docs/resumable-uploads-xml#practices + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(504, null), + new Error(503, null), + new Error(502, null), + new Error(500, null), + new Error(429, null), + new Error(408, null), + new Error(null, "internalError")); - private final int code; - private final boolean retryable; + private static final long serialVersionUID = -4168430271327813063L; - public StorageException(int code, String message, boolean retryable) { - super(message); - this.code = code; - this.retryable = retryable; + public StorageException(int code, String message) { + super(code, message, null, true); } - /** - * Returns the code associated with this exception. - */ - public int code() { - return code; + public StorageException(IOException exception) { + super(exception, true); + } + + public StorageException(GoogleJsonError error) { + super(error, true); } - public boolean retryable() { - return retryable; + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** @@ -58,12 +70,7 @@ public boolean retryable() { * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ static StorageException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof StorageException) { - throw (StorageException) ex.getCause(); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new StorageException(UNKNOWN_CODE, ex.getMessage(), false); + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new StorageException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index ab85dc8b4609..d58c9e43aea9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -19,38 +19,38 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.gcloud.RetryHelper.runWithRetries; -import static com.google.gcloud.spi.StorageRpc.Option.DELIMITER; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; -import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static com.google.gcloud.storage.spi.StorageRpc.Option.DELIMITER; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.services.storage.model.StorageObject; +import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.common.base.Function; -import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Ints; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryHelper.RetryHelperException; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpc.RewriteResponse; -import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.storage.spi.StorageRpc.Tuple; import java.io.ByteArrayInputStream; import java.io.InputStream; @@ -68,35 +68,23 @@ import java.util.EnumMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; final class StorageImpl extends BaseService implements Storage { - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { - - private static final long serialVersionUID = -7758580330857881124L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof StorageException) { - boolean retriable = ((StorageException) exception).retryable(); - return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); private static final byte[] EMPTY_BYTE_ARRAY = {}; private static final String EMPTY_BYTE_ARRAY_MD5 = "1B2M2Y8AsgTpgAmY7PhCfg=="; private static final String EMPTY_BYTE_ARRAY_CRC32C = "AAAAAA=="; + private static final String PATH_DELIMITER = "/"; + + private static final Function, Boolean> DELETE_FUNCTION = + new Function, Boolean>() { + @Override + public Boolean apply(Tuple tuple) { + return tuple.y(); + } + }; private final StorageRpc storageRpc; @@ -106,11 +94,11 @@ public RetryResult beforeEval(Exception exception) { } @Override - public BucketInfo create(BucketInfo bucketInfo, BucketTargetOption... options) { + public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { final com.google.api.services.storage.model.Bucket bucketPb = bucketInfo.toPb(); final Map optionsMap = optionMap(bucketInfo, options); try { - return BucketInfo.fromPb(runWithRetries( + return Bucket.fromPb(this, runWithRetries( new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { @@ -123,7 +111,7 @@ public com.google.api.services.storage.model.Bucket call() { } @Override - public BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options) { + public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { BlobInfo updatedInfo = blobInfo.toBuilder() .md5(EMPTY_BYTE_ARRAY_MD5) .crc32c(EMPTY_BYTE_ARRAY_CRC32C) @@ -132,7 +120,7 @@ public BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options) { } @Override - public BlobInfo create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { content = firstNonNull(content, EMPTY_BYTE_ARRAY); BlobInfo updatedInfo = blobInfo.toBuilder() .md5(BaseEncoding.base64().encode(Hashing.md5().hashBytes(content).asBytes())) @@ -143,16 +131,16 @@ public BlobInfo create(BlobInfo blobInfo, byte[] content, BlobTargetOption... op } @Override - public BlobInfo create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { + public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { Tuple targetOptions = BlobTargetOption.convert(blobInfo, options); return create(targetOptions.x(), content, targetOptions.y()); } - private BlobInfo create(BlobInfo info, final InputStream content, BlobTargetOption... options) { + private Blob create(BlobInfo info, final InputStream content, BlobTargetOption... options) { final StorageObject blobPb = info.toPb(); final Map optionsMap = optionMap(info, options); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb(this, runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.create(blobPb, @@ -165,7 +153,7 @@ public StorageObject call() { } @Override - public BucketInfo get(String bucket, BucketSourceOption... options) { + public Bucket get(String bucket, BucketGetOption... options) { final com.google.api.services.storage.model.Bucket bucketPb = BucketInfo.of(bucket).toPb(); final Map optionsMap = optionMap(options); try { @@ -173,117 +161,94 @@ public BucketInfo get(String bucket, BucketSourceOption... options) { new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { - try { - return storageRpc.get(bucketPb, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(bucketPb, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : BucketInfo.fromPb(answer); + return answer == null ? null : Bucket.fromPb(this, answer); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } @Override - public BlobInfo get(String bucket, String blob, BlobSourceOption... options) { + public Blob get(String bucket, String blob, BlobGetOption... options) { return get(BlobId.of(bucket, blob), options); } @Override - public BlobInfo get(BlobId blob, BlobSourceOption... options) { + public Blob get(BlobId blob, BlobGetOption... options) { final StorageObject storedObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { StorageObject storageObject = runWithRetries(new Callable() { @Override public StorageObject call() { - try { - return storageRpc.get(storedObject, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(storedObject, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return storageObject == null ? null : BlobInfo.fromPb(storageObject); + return storageObject == null ? null : Blob.fromPb(this, storageObject); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } @Override - public BlobInfo get(BlobId blob) { - return get(blob, new BlobSourceOption[0]); + public Blob get(BlobId blob) { + return get(blob, new BlobGetOption[0]); } - private abstract static class BasePageFetcher - implements BaseListResult.NextPageFetcher { + private static class BucketPageFetcher implements NextPageFetcher { - private static final long serialVersionUID = 8236329004030295223L; - protected final Map requestOptions; - protected final StorageOptions serviceOptions; + private static final long serialVersionUID = 5850406828803613729L; + private final Map requestOptions; + private final StorageOptions serviceOptions; - BasePageFetcher(StorageOptions serviceOptions, String cursor, + BucketPageFetcher( + StorageOptions serviceOptions, String cursor, Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(StorageRpc.Option.PAGE_TOKEN, cursor, optionMap); this.serviceOptions = serviceOptions; - ImmutableMap.Builder builder = ImmutableMap.builder(); - if (cursor != null) { - builder.put(StorageRpc.Option.PAGE_TOKEN, cursor); - } - for (Map.Entry option : optionMap.entrySet()) { - if (option.getKey() != StorageRpc.Option.PAGE_TOKEN) { - builder.put(option.getKey(), option.getValue()); - } - } - this.requestOptions = builder.build(); - } - } - - private static class BucketPageFetcher extends BasePageFetcher { - - private static final long serialVersionUID = -5490616010200159174L; - - BucketPageFetcher(StorageOptions serviceOptions, String cursor, - Map optionMap) { - super(serviceOptions, cursor, optionMap); } @Override - public ListResult nextPage() { + public Page nextPage() { return listBuckets(serviceOptions, requestOptions); } } - private static class BlobPageFetcher extends BasePageFetcher { + private static class BlobPageFetcher implements NextPageFetcher { - private static final long serialVersionUID = -5490616010200159174L; + private static final long serialVersionUID = 81807334445874098L; + private final Map requestOptions; + private final StorageOptions serviceOptions; private final String bucket; BlobPageFetcher(String bucket, StorageOptions serviceOptions, String cursor, Map optionMap) { - super(serviceOptions, cursor, optionMap); + this.requestOptions = + PageImpl.nextRequestOptions(StorageRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; this.bucket = bucket; } @Override - public ListResult nextPage() { + public Page nextPage() { return listBlobs(bucket, serviceOptions, requestOptions); } } @Override - public ListResult list(BucketListOption... options) { + public Page list(BucketListOption... options) { return listBuckets(options(), optionMap(options)); } - private static ListResult listBuckets(final StorageOptions serviceOptions, + @Override + public Page list(final String bucket, BlobListOption... options) { + return listBlobs(bucket, options(), optionMap(options)); + } + + private static Page listBuckets(final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( @@ -294,27 +259,23 @@ public Tuple> cal } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable buckets = - result.y() == null ? ImmutableList.of() : Iterables.transform(result.y(), - new Function() { + Iterable buckets = + result.y() == null ? ImmutableList.of() : Iterables.transform(result.y(), + new Function() { @Override - public BucketInfo apply(com.google.api.services.storage.model.Bucket bucketPb) { - return BucketInfo.fromPb(bucketPb); + public Bucket apply(com.google.api.services.storage.model.Bucket bucketPb) { + return Bucket.fromPb(serviceOptions.service(), bucketPb); } }); - return new BaseListResult<>(new BucketPageFetcher(serviceOptions, cursor, optionsMap), cursor, + return new PageImpl<>( + new BucketPageFetcher(serviceOptions, cursor, optionsMap), cursor, buckets); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } - @Override - public ListResult list(final String bucket, BlobListOption... options) { - return listBlobs(bucket, options(), optionMap(options)); - } - - private static ListResult listBlobs(final String bucket, + private static Page listBlobs(final String bucket, final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( @@ -325,15 +286,17 @@ public Tuple> call() { } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); - Iterable blobs = - result.y() == null ? ImmutableList.of() : Iterables.transform(result.y(), - new Function() { + Iterable blobs = + result.y() == null + ? ImmutableList.of() + : Iterables.transform(result.y(), new Function() { @Override - public BlobInfo apply(StorageObject storageObject) { - return BlobInfo.fromPb(storageObject); + public Blob apply(StorageObject storageObject) { + return Blob.fromPb(serviceOptions.service(), storageObject); } }); - return new BaseListResult<>(new BlobPageFetcher(bucket, serviceOptions, cursor, optionsMap), + return new PageImpl<>( + new BlobPageFetcher(bucket, serviceOptions, cursor, optionsMap), cursor, blobs); } catch (RetryHelperException e) { @@ -342,11 +305,11 @@ public BlobInfo apply(StorageObject storageObject) { } @Override - public BucketInfo update(BucketInfo bucketInfo, BucketTargetOption... options) { + public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { final com.google.api.services.storage.model.Bucket bucketPb = bucketInfo.toPb(); final Map optionsMap = optionMap(bucketInfo, options); try { - return BucketInfo.fromPb(runWithRetries( + return Bucket.fromPb(this, runWithRetries( new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { @@ -359,11 +322,11 @@ public com.google.api.services.storage.model.Bucket call() { } @Override - public BlobInfo update(BlobInfo blobInfo, BlobTargetOption... options) { + public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { final StorageObject storageObject = blobInfo.toPb(); final Map optionsMap = optionMap(blobInfo, options); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb(this, runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.patch(storageObject, optionsMap); @@ -375,7 +338,7 @@ public StorageObject call() { } @Override - public BlobInfo update(BlobInfo blobInfo) { + public Blob update(BlobInfo blobInfo) { return update(blobInfo, new BlobTargetOption[0]); } @@ -403,7 +366,7 @@ public boolean delete(String bucket, String blob, BlobSourceOption... options) { @Override public boolean delete(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -422,18 +385,19 @@ public boolean delete(BlobId blob) { } @Override - public BlobInfo compose(final ComposeRequest composeRequest) { + public Blob compose(final ComposeRequest composeRequest) { final List sources = Lists.newArrayListWithCapacity(composeRequest.sourceBlobs().size()); for (ComposeRequest.SourceBlob sourceBlob : composeRequest.sourceBlobs()) { - sources.add(BlobInfo.builder(composeRequest.target().bucket(), sourceBlob.name()) - .generation(sourceBlob.generation()).build().toPb()); + sources.add(BlobInfo.builder( + BlobId.of(composeRequest.target().bucket(), sourceBlob.name(), sourceBlob.generation())) + .build().toPb()); } final StorageObject target = composeRequest.target().toPb(); final Map targetOptions = optionMap(composeRequest.target().generation(), composeRequest.target().metageneration(), composeRequest.targetOptions()); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + return Blob.fromPb(this, runWithRetries(new Callable() { @Override public StorageObject call() { return storageRpc.compose(sources, target, targetOptions); @@ -448,7 +412,7 @@ public StorageObject call() { public CopyWriter copy(final CopyRequest copyRequest) { final StorageObject source = copyRequest.source().toPb(); final Map sourceOptions = - optionMap(null, null, copyRequest.sourceOptions(), true); + optionMap(copyRequest.source().generation(), null, copyRequest.sourceOptions(), true); final StorageObject target = copyRequest.target().toPb(); final Map targetOptions = optionMap(copyRequest.target().generation(), copyRequest.target().metageneration(), copyRequest.targetOptions()); @@ -474,7 +438,7 @@ public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... optio @Override public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -488,12 +452,12 @@ public byte[] call() { } @Override - public BatchResponse apply(BatchRequest batchRequest) { + public BatchResponse submit(BatchRequest batchRequest) { List>> toDelete = Lists.newArrayListWithCapacity(batchRequest.toDelete().size()); for (Map.Entry> entry : batchRequest.toDelete().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); StorageObject storageObject = blob.toPb(); toDelete.add(Tuple.>of(storageObject, optionsMap)); } @@ -508,55 +472,52 @@ public BatchResponse apply(BatchRequest batchRequest) { } List>> toGet = Lists.newArrayListWithCapacity(batchRequest.toGet().size()); - for (Map.Entry> entry : batchRequest.toGet().entrySet()) { + for (Map.Entry> entry : batchRequest.toGet().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); toGet.add(Tuple.>of(blob.toPb(), optionsMap)); } StorageRpc.BatchResponse response = storageRpc.batch(new StorageRpc.BatchRequest(toDelete, toUpdate, toGet)); - List> deletes = transformBatchResult( - toDelete, response.deletes, Functions.identity()); - List> updates = transformBatchResult( - toUpdate, response.updates, BlobInfo.FROM_PB_FUNCTION); - List> gets = transformBatchResult( - toGet, response.gets, BlobInfo.FROM_PB_FUNCTION, HTTP_NOT_FOUND); + List> deletes = + transformBatchResult(toDelete, response.deletes, DELETE_FUNCTION); + List> updates = + transformBatchResult(toUpdate, response.updates, Blob.BLOB_FROM_PB_FUNCTION); + List> gets = + transformBatchResult(toGet, response.gets, Blob.BLOB_FROM_PB_FUNCTION); return new BatchResponse(deletes, updates, gets); } private List> transformBatchResult( Iterable>> request, - Map> results, Function transform, - int... nullOnErrorCodes) { - Set nullOnErrorCodesSet = Sets.newHashSet(Ints.asList(nullOnErrorCodes)); + Map> results, + Function, O> transform) { List> response = Lists.newArrayListWithCapacity(results.size()); for (Tuple tuple : request) { Tuple result = results.get(tuple.x()); - if (result.x() != null) { - response.add(BatchResponse.Result.of(transform.apply(result.x()))); + I object = result.x(); + StorageException exception = result.y(); + if (exception != null) { + response.add(new BatchResponse.Result(exception)); } else { - StorageException exception = result.y(); - if (nullOnErrorCodesSet.contains(exception.code())) { - //noinspection unchecked - response.add(BatchResponse.Result.empty()); - } else { - response.add(new BatchResponse.Result(exception)); - } + response.add(object != null + ? BatchResponse.Result.of(transform.apply(Tuple.of((Storage) this, object))) + : BatchResponse.Result.empty()); } } return response; } @Override - public BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { Map optionsMap = optionMap(options); - return new BlobReadChannelImpl(options(), BlobId.of(bucket, blob), optionsMap); + return new BlobReadChannel(options(), BlobId.of(bucket, blob), optionsMap); } @Override - public BlobReadChannel reader(BlobId blob, BlobSourceOption... options) { - Map optionsMap = optionMap(options); - return new BlobReadChannelImpl(options(), blob, optionsMap); + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + Map optionsMap = optionMap(blob, options); + return new BlobReadChannel(options(), blob, optionsMap); } @Override @@ -567,23 +528,24 @@ public BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { private BlobWriteChannel writer(BlobInfo blobInfo, BlobTargetOption... options) { final Map optionsMap = optionMap(blobInfo, options); - return new BlobWriteChannelImpl(options(), blobInfo, optionsMap); + return new BlobWriteChannel(options(), blobInfo, optionsMap); } @Override public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { - long expiration = TimeUnit.SECONDS.convert( - options().clock().millis() + unit.toMillis(duration), TimeUnit.MILLISECONDS); EnumMap optionMap = Maps.newEnumMap(SignUrlOption.Option.class); for (SignUrlOption option : options) { optionMap.put(option.option(), option.value()); } - ServiceAccountAuthCredentials cred = + ServiceAccountAuthCredentials authCred = (ServiceAccountAuthCredentials) optionMap.get(SignUrlOption.Option.SERVICE_ACCOUNT_CRED); - if (cred == null) { - checkArgument(options().authCredentials() instanceof ServiceAccountAuthCredentials, + ServiceAccountCredentials cred = authCred != null ? authCred.credentials() : null; + if (authCred == null) { + checkArgument( + this.options().authCredentials() != null + && this.options().authCredentials().credentials() instanceof ServiceAccountCredentials, "Signing key was not provided and could not be derived"); - cred = (ServiceAccountAuthCredentials) this.options().authCredentials(); + cred = (ServiceAccountCredentials) this.options().authCredentials().credentials(); } // construct signature - see https://cloud.google.com/storage/docs/access-control#Signed-URLs StringBuilder stBuilder = new StringBuilder(); @@ -603,6 +565,8 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio stBuilder.append(blobInfo.contentType()); } stBuilder.append('\n'); + long expiration = TimeUnit.SECONDS.convert( + options().clock().millis() + unit.toMillis(duration), TimeUnit.MILLISECONDS); stBuilder.append(expiration).append('\n'); StringBuilder path = new StringBuilder(); if (!blobInfo.bucket().startsWith("/")) { @@ -619,12 +583,12 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio stBuilder.append(path); try { Signature signer = Signature.getInstance("SHA256withRSA"); - signer.initSign(cred.privateKey()); + signer.initSign(cred.getPrivateKey()); signer.update(stBuilder.toString().getBytes(UTF_8)); + stBuilder = new StringBuilder("https://storage.googleapis.com").append(path); String signature = URLEncoder.encode(BaseEncoding.base64().encode(signer.sign()), UTF_8.name()); - stBuilder = new StringBuilder("https://storage.googleapis.com").append(path); - stBuilder.append("?GoogleAccessId=").append(cred.account()); + stBuilder.append("?GoogleAccessId=").append(cred.getClientEmail()); stBuilder.append("&Expires=").append(expiration); stBuilder.append("&Signature=").append(signature); return new URL(stBuilder.toString()); @@ -636,22 +600,22 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio } @Override - public List get(BlobId... blobIds) { + public List get(BlobId... blobIds) { BatchRequest.Builder requestBuilder = BatchRequest.builder(); for (BlobId blob : blobIds) { requestBuilder.get(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.gets(), null)); } @Override - public List update(BlobInfo... blobInfos) { + public List update(BlobInfo... blobInfos) { BatchRequest.Builder requestBuilder = BatchRequest.builder(); for (BlobInfo blobInfo : blobInfos) { requestBuilder.update(blobInfo); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.updates(), null)); } @@ -661,7 +625,7 @@ public List delete(BlobId... blobIds) { for (BlobId blob : blobIds) { requestBuilder.delete(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.deletes(), Boolean.FALSE)); } @@ -669,12 +633,29 @@ private static List transformResultList( List> results, final T errorValue) { return Lists.transform(results, new Function, T>() { @Override - public T apply(BatchResponse.Result f) { - return f.failed() ? errorValue : f.get(); + public T apply(BatchResponse.Result result) { + return result.failed() ? errorValue : result.get(); } }); } + private static void addToOptionMap(StorageRpc.Option option, T defaultValue, + Map map) { + addToOptionMap(option, option, defaultValue, map); + } + + private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.Option putOption, + T defaultValue, Map map) { + if (map.containsKey(getOption)) { + @SuppressWarnings("unchecked") + T value = (T) map.remove(getOption); + checkArgument(value != null || defaultValue != null, + "Option " + getOption.value() + " is missing a value"); + value = firstNonNull(value, defaultValue); + map.put(putOption, value); + } + } + private Map optionMap(Long generation, Long metaGeneration, Iterable options) { return optionMap(generation, metaGeneration, options, false); @@ -689,7 +670,7 @@ public T apply(BatchResponse.Result f) { } Boolean value = (Boolean) temp.remove(DELIMITER); if (Boolean.TRUE.equals(value)) { - temp.put(DELIMITER, options().pathDelimiter()); + temp.put(DELIMITER, PATH_DELIMITER); } if (useAsSource) { addToOptionMap(IF_GENERATION_MATCH, IF_SOURCE_GENERATION_MATCH, generation, temp); @@ -706,23 +687,6 @@ public T apply(BatchResponse.Result f) { return ImmutableMap.copyOf(temp); } - private static void addToOptionMap(StorageRpc.Option option, T defaultValue, - Map map) { - addToOptionMap(option, option, defaultValue, map); - } - - private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.Option putOption, - T defaultValue, Map map) { - if (map.containsKey(getOption)) { - @SuppressWarnings("unchecked") - T value = (T) map.remove(getOption); - checkArgument(value != null || defaultValue != null, - "Option " + getOption.value() + " is missing a value"); - value = firstNonNull(value, defaultValue); - map.put(putOption, value); - } - } - private Map optionMap(Option... options) { return optionMap(null, null, Arrays.asList(options)); } @@ -739,4 +703,8 @@ private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.O private Map optionMap(BlobInfo blobInfo, Option... options) { return optionMap(blobInfo.generation(), blobInfo.metageneration(), options); } + + private Map optionMap(BlobId blobId, Option... options) { + return optionMap(blobId.generation(), null, options); + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java index 16c17c3e8d98..e7e1c2778fa9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java @@ -16,14 +16,12 @@ package com.google.gcloud.storage; -import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableSet; import com.google.gcloud.ServiceOptions; -import com.google.gcloud.spi.DefaultStorageRpc; -import com.google.gcloud.spi.StorageRpc; -import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.spi.DefaultStorageRpc; +import com.google.gcloud.storage.spi.StorageRpc; +import com.google.gcloud.storage.spi.StorageRpcFactory; -import java.util.Objects; import java.util.Set; public class StorageOptions extends ServiceOptions { @@ -31,9 +29,6 @@ public class StorageOptions extends ServiceOptions SCOPES = ImmutableSet.of(GCS_SCOPE); - private static final String DEFAULT_PATH_DELIMITER = "/"; - - private final String pathDelimiter; public static class DefaultStorageFactory implements StorageFactory { @@ -58,24 +53,10 @@ public StorageRpc create(StorageOptions options) { public static class Builder extends ServiceOptions.Builder { - private String pathDelimiter; - private Builder() {} private Builder(StorageOptions options) { super(options); - pathDelimiter = options.pathDelimiter; - } - - /** - * Sets the path delimiter for the storage service. - * - * @param pathDelimiter the path delimiter to set - * @return the builder. - */ - public Builder pathDelimiter(String pathDelimiter) { - this.pathDelimiter = pathDelimiter; - return this; } @Override @@ -86,14 +67,15 @@ public StorageOptions build() { private StorageOptions(Builder builder) { super(StorageFactory.class, StorageRpcFactory.class, builder); - pathDelimiter = MoreObjects.firstNonNull(builder.pathDelimiter, DEFAULT_PATH_DELIMITER); } + @SuppressWarnings("unchecked") @Override protected StorageFactory defaultServiceFactory() { return DefaultStorageFactory.INSTANCE; } + @SuppressWarnings("unchecked") @Override protected StorageRpcFactory defaultRpcFactory() { return DefaultStorageRpcFactory.INSTANCE; @@ -105,12 +87,13 @@ protected Set scopes() { } /** - * Returns the storage service's path delimiter. + * Returns a default {@code StorageOptions} instance. */ - public String pathDelimiter() { - return pathDelimiter; + public static StorageOptions defaultInstance() { + return builder().build(); } + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { return new Builder(this); @@ -118,16 +101,12 @@ public Builder toBuilder() { @Override public int hashCode() { - return baseHashCode() ^ Objects.hash(pathDelimiter); + return baseHashCode(); } @Override public boolean equals(Object obj) { - if (!(obj instanceof StorageOptions)) { - return false; - } - StorageOptions other = (StorageOptions) obj; - return baseEquals(other) && Objects.equals(pathDelimiter, other.pathDelimiter); + return obj instanceof StorageOptions && baseEquals((StorageOptions) obj); } public static Builder builder() { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java index 2a09631be40a..181e63b08d0b 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java @@ -17,24 +17,37 @@ /** * A client to Google Cloud Storage. * - *

    A simple usage example: - *

    {@code
    - * StorageOptions options = StorageOptions.builder().projectId("project").build();
    - * Storage storage = options.service();
    + * 

    Here's a simple usage example for using gcloud-java from App/Compute Engine. This example + * shows how to create a Storage blob. For the complete source code see + * + * CreateBlob.java. + *

     {@code
    + * Storage storage = StorageOptions.defaultInstance().service();
      * BlobId blobId = BlobId.of("bucket", "blob_name");
    - * Blob blob = Blob.load(storage, blobId);
    - * if (blob == null) {
    - *   BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build();
    - *   storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8));
    - * } else {
    - *   System.out.println("Updating content for " + blobId.name());
    + * BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build();
    + * Blob blob = storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8));
    + * }
    + *

    + * This second example shows how to update the blob's content if the blob exists. For the complete + * source code see + * + * UpdateBlob.java. + *

     {@code
    + * Storage storage = StorageOptions.defaultInstance().service();
    + * BlobId blobId = BlobId.of("bucket", "blob_name");
    + * Blob blob = storage.get(blobId);
    + * if (blob != null) {
      *   byte[] prevContent = blob.content();
      *   System.out.println(new String(prevContent, UTF_8));
      *   WritableByteChannel channel = blob.writer();
      *   channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8)));
      *   channel.close();
      * }}
    - * + *

    When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Storage */ package com.google.gcloud.storage; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java similarity index 72% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java index 70cad8c7773e..aa6085e161ed 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/DefaultStorageRpc.java @@ -12,28 +12,30 @@ * the License. */ -package com.google.gcloud.spi; - -import static com.google.gcloud.spi.StorageRpc.Option.DELIMITER; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; -import static com.google.gcloud.spi.StorageRpc.Option.MAX_RESULTS; -import static com.google.gcloud.spi.StorageRpc.Option.PAGE_TOKEN; -import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_ACL; -import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; -import static com.google.gcloud.spi.StorageRpc.Option.PREFIX; -import static com.google.gcloud.spi.StorageRpc.Option.VERSIONS; +package com.google.gcloud.storage.spi; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.gcloud.storage.spi.StorageRpc.Option.DELIMITER; +import static com.google.gcloud.storage.spi.StorageRpc.Option.FIELDS; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; +import static com.google.gcloud.storage.spi.StorageRpc.Option.MAX_RESULTS; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREDEFINED_ACL; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; +import static com.google.gcloud.storage.spi.StorageRpc.Option.PREFIX; +import static com.google.gcloud.storage.spi.StorageRpc.Option.VERSIONS; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.googleapis.media.MediaHttpDownloader; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpHeaders; @@ -56,8 +58,10 @@ import com.google.api.services.storage.model.ComposeRequest.SourceObjects.ObjectPreconditions; import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableSet; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; @@ -65,10 +69,11 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.math.BigInteger; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; public class DefaultStorageRpc implements StorageRpc { @@ -76,9 +81,8 @@ public class DefaultStorageRpc implements StorageRpc { private final StorageOptions options; private final Storage storage; - // see: https://cloud.google.com/storage/docs/concepts-techniques#practices - private static final Set RETRYABLE_CODES = ImmutableSet.of(504, 503, 502, 500, 429, 408); private static final long MEGABYTE = 1024L * 1024L; + private static final int MAX_BATCH_DELETES = 100; public DefaultStorageRpc(StorageOptions options) { HttpTransport transport = options.httpTransportFactory().create(); @@ -91,24 +95,15 @@ public DefaultStorageRpc(StorageOptions options) { } private static StorageException translate(IOException exception) { - StorageException translated; - if (exception instanceof GoogleJsonResponseException) { - translated = translate(((GoogleJsonResponseException) exception).getDetails()); - } else { - translated = new StorageException(0, exception.getMessage(), false); - } - translated.initCause(exception); - return translated; + return new StorageException(exception); } private static StorageException translate(GoogleJsonError exception) { - boolean retryable = RETRYABLE_CODES.contains(exception.getCode()) - || "InternalError".equals(exception.getMessage()); - return new StorageException(exception.getCode(), exception.getMessage(), retryable); + return new StorageException(exception); } @Override - public Bucket create(Bucket bucket, Map options) throws StorageException { + public Bucket create(Bucket bucket, Map options) { try { return storage.buckets() .insert(this.options.projectId(), bucket) @@ -123,7 +118,7 @@ public Bucket create(Bucket bucket, Map options) throws StorageExcept @Override public StorageObject create(StorageObject storageObject, final InputStream content, - Map options) throws StorageException { + Map options) { try { Storage.Objects.Insert insert = storage.objects() .insert(storageObject.getBucket(), storageObject, @@ -150,6 +145,7 @@ public Tuple> list(Map options) { .setPrefix(PREFIX.getString(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) + .setFields(FIELDS.getString(options)) .execute(); return Tuple.>of(buckets.getNextPageToken(), buckets.getItems()); } catch (IOException ex) { @@ -158,7 +154,7 @@ public Tuple> list(Map options) { } @Override - public Tuple> list(String bucket, Map options) { + public Tuple> list(final String bucket, Map options) { try { Objects objects = storage.objects() .list(bucket) @@ -168,14 +164,32 @@ public Tuple> list(String bucket, Map .setPrefix(PREFIX.getString(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) + .setFields(FIELDS.getString(options)) .execute(); - return Tuple.>of( - objects.getNextPageToken(), objects.getItems()); + Iterable storageObjects = Iterables.concat( + firstNonNull(objects.getItems(), ImmutableList.of()), + objects.getPrefixes() != null + ? Lists.transform(objects.getPrefixes(), objectFromPrefix(bucket)) + : ImmutableList.of()); + return Tuple.of(objects.getNextPageToken(), storageObjects); } catch (IOException ex) { throw translate(ex); } } + private static Function objectFromPrefix(final String bucket) { + return new Function() { + @Override + public StorageObject apply(String prefix) { + return new StorageObject() + .set("isDirectory", true) + .setBucket(bucket) + .setName(prefix) + .setSize(BigInteger.ZERO); + } + }; + } + @Override public Bucket get(Bucket bucket, Map options) { try { @@ -184,9 +198,14 @@ public Bucket get(Bucket bucket, Map options) { .setProjection(DEFAULT_PROJECTION) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) + .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -195,7 +214,11 @@ public StorageObject get(StorageObject object, Map options) { try { return getRequest(object, options).execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -203,11 +226,13 @@ private Storage.Objects.Get getRequest(StorageObject object, Map opti throws IOException { return storage.objects() .get(object.getBucket(), object.getName()) + .setGeneration(object.getGeneration()) .setProjection(DEFAULT_PROJECTION) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) - .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)); + .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)) + .setFields(FIELDS.getString(options)); } @Override @@ -258,7 +283,7 @@ public boolean delete(Bucket bucket, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -272,7 +297,7 @@ public boolean delete(StorageObject blob, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -283,6 +308,7 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map throws IOException { return storage.objects() .delete(blob.getBucket(), blob.getName()) + .setGeneration(blob.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -291,7 +317,7 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map @Override public StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException { + Map targetOptions) { ComposeRequest request = new ComposeRequest(); if (target.getContentType() == null) { target.setContentType("application/octet-stream"); @@ -322,11 +348,11 @@ public StorageObject compose(Iterable sources, StorageObject targ } @Override - public byte[] load(StorageObject from, Map options) - throws StorageException { + public byte[] load(StorageObject from, Map options) { try { Storage.Objects.Get getRequest = storage.objects() .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -341,7 +367,27 @@ public byte[] load(StorageObject from, Map options) } @Override - public BatchResponse batch(BatchRequest request) throws StorageException { + public BatchResponse batch(BatchRequest request) { + List>>> partitionedToDelete = + Lists.partition(request.toDelete, MAX_BATCH_DELETES); + Iterator>>> iterator = partitionedToDelete.iterator(); + BatchRequest chunkRequest = new BatchRequest( + iterator.hasNext() + ? iterator.next() : ImmutableList.>>of(), + request.toUpdate, request.toGet); + BatchResponse response = batchChunk(chunkRequest); + Map> deletes = + Maps.newHashMapWithExpectedSize(request.toDelete.size()); + deletes.putAll(response.deletes); + while (iterator.hasNext()) { + chunkRequest = new BatchRequest(iterator.next(), null, null); + BatchResponse deleteBatchResponse = batchChunk(chunkRequest); + deletes.putAll(deleteBatchResponse.deletes); + } + return new BatchResponse(deletes, response.updates, response.gets); + } + + private BatchResponse batchChunk(BatchRequest request) { com.google.api.client.googleapis.batch.BatchRequest batch = storage.batch(); final Map> deletes = Maps.newConcurrentMap(); @@ -359,7 +405,11 @@ public void onSuccess(Void ignore, HttpHeaders responseHeaders) { @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - deletes.put(tuple.x(), Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + deletes.put(tuple.x(), Tuple.of(Boolean.FALSE, null)); + } else { + deletes.put(tuple.x(), Tuple.of(null, translate(e))); + } } }); } @@ -388,8 +438,13 @@ public void onSuccess(StorageObject storageObject, HttpHeaders responseHeaders) @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - gets.put(tuple.x(), - Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + gets.put(tuple.x(), + Tuple.of(null, null)); + } else { + gets.put(tuple.x(), + Tuple.of(null, translate(e))); + } } }); } @@ -401,35 +456,50 @@ public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { } @Override - public byte[] read(StorageObject from, Map options, long position, int bytes) - throws StorageException { + public Tuple read(StorageObject from, Map options, long position, + int bytes) { try { - Get req = storage.objects().get(from.getBucket(), from.getName()); - req.setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) + Get req = storage.objects() + .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) + .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)); - MediaHttpDownloader downloader = req.getMediaHttpDownloader(); - downloader.setContentRange(position, (int) position + bytes); - downloader.setDirectDownloadEnabled(true); + StringBuilder range = new StringBuilder(); + range.append("bytes=").append(position).append("-").append(position + bytes - 1); + req.getRequestHeaders().setRange(range.toString()); ByteArrayOutputStream output = new ByteArrayOutputStream(); - req.executeMediaAndDownloadTo(output); - return output.toByteArray(); + req.executeMedia().download(output); + String etag = req.getLastResponseHeaders().getETag(); + return Tuple.of(etag, output.toByteArray()); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == SC_REQUESTED_RANGE_NOT_SATISFIABLE) { + return Tuple.of(null, new byte[0]); + } + throw serviceException; } } @Override - public void write(String uploadId, byte[] toWrite, int toWriteOffset, StorageObject dest, - long destOffset, int length, boolean last) throws StorageException { + public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) { try { + if (length == 0 && !last) { + return; + } GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = storage.getRequestFactory().buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); long limit = destOffset + length; StringBuilder range = new StringBuilder("bytes "); - range.append(destOffset).append('-').append(limit - 1).append('/'); + if (length == 0) { + range.append('*'); + } else { + range.append(destOffset).append('-').append(limit - 1); + } + range.append('/'); if (last) { range.append(limit); } else { @@ -463,8 +533,7 @@ public void write(String uploadId, byte[] toWrite, int toWriteOffset, StorageObj } @Override - public String open(StorageObject object, Map options) - throws StorageException { + public String open(StorageObject object, Map options) { try { Insert req = storage.objects().insert(object.getBucket(), object); GenericUrl url = req.buildHttpRequest().getUrl(); @@ -485,7 +554,7 @@ public String open(StorageObject object, Map options) HttpRequest httpRequest = requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, object)); httpRequest.getHeaders().set("X-Upload-Content-Type", - MoreObjects.firstNonNull(object.getContentType(), "application/octet-stream")); + firstNonNull(object.getContentType(), "application/octet-stream")); HttpResponse response = httpRequest.execute(); if (response.getStatusCode() != 200) { GoogleJsonError error = new GoogleJsonError(); @@ -500,22 +569,23 @@ public String open(StorageObject object, Map options) } @Override - public RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException { + public RewriteResponse openRewrite(RewriteRequest rewriteRequest) { return rewrite(rewriteRequest, null); } @Override - public RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException { + public RewriteResponse continueRewrite(RewriteResponse previousResponse) { return rewrite(previousResponse.rewriteRequest, previousResponse.rewriteToken); } - private RewriteResponse rewrite(RewriteRequest req, String token) throws StorageException { + private RewriteResponse rewrite(RewriteRequest req, String token) { try { Long maxBytesRewrittenPerCall = req.megabytesRewrittenPerCall != null ? req.megabytesRewrittenPerCall * MEGABYTE : null; - com.google.api.services.storage.model.RewriteResponse rewriteReponse = storage.objects() + com.google.api.services.storage.model.RewriteResponse rewriteResponse = storage.objects() .rewrite(req.source.getBucket(), req.source.getName(), req.target.getBucket(), req.target.getName(), req.target.getContentType() != null ? req.target : null) + .setSourceGeneration(req.source.getGeneration()) .setRewriteToken(token) .setMaxBytesRewrittenPerCall(maxBytesRewrittenPerCall) .setProjection(DEFAULT_PROJECTION) @@ -531,11 +601,11 @@ private RewriteResponse rewrite(RewriteRequest req, String token) throws Storage .execute(); return new RewriteResponse( req, - rewriteReponse.getResource(), - rewriteReponse.getObjectSize().longValue(), - rewriteReponse.getDone(), - rewriteReponse.getRewriteToken(), - rewriteReponse.getTotalBytesRewritten().longValue()); + rewriteResponse.getResource(), + rewriteResponse.getObjectSize().longValue(), + rewriteResponse.getDone(), + rewriteResponse.getRewriteToken(), + rewriteResponse.getTotalBytesRewritten().longValue()); } catch (IOException ex) { throw translate(ex); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java similarity index 71% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java index 40382a857fca..d239a475a6dd 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpc.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.storage.spi; import static com.google.common.base.MoreObjects.firstNonNull; @@ -47,7 +47,8 @@ enum Option { MAX_RESULTS("maxResults"), PAGE_TOKEN("pageToken"), DELIMITER("delimiter"), - VERSIONS("versions"); + VERSIONS("versions"), + FIELDS("fields"); private final String value; @@ -216,47 +217,134 @@ public int hashCode() { } } - Bucket create(Bucket bucket, Map options) throws StorageException; - - StorageObject create(StorageObject object, InputStream content, Map options) - throws StorageException; - - Tuple> list(Map options) throws StorageException; - - Tuple> list(String bucket, Map options) - throws StorageException; - - Bucket get(Bucket bucket, Map options) throws StorageException; - - StorageObject get(StorageObject object, Map options) - throws StorageException; - - Bucket patch(Bucket bucket, Map options) throws StorageException; - - StorageObject patch(StorageObject storageObject, Map options) - throws StorageException; - - boolean delete(Bucket bucket, Map options) throws StorageException; - - boolean delete(StorageObject object, Map options) throws StorageException; - - BatchResponse batch(BatchRequest request) throws StorageException; - + /** + * Creates a new bucket. + * + * @throws StorageException upon failure + */ + Bucket create(Bucket bucket, Map options); + + /** + * Creates a new storage object. + * + * @throws StorageException upon failure + */ + StorageObject create(StorageObject object, InputStream content, Map options); + + /** + * Lists the project's buckets. + * + * @throws StorageException upon failure + */ + Tuple> list(Map options); + + /** + * Lists the bucket's blobs. + * + * @throws StorageException upon failure + */ + Tuple> list(String bucket, Map options); + + /** + * Returns the requested bucket or {@code null} if not found. + * + * @throws StorageException upon failure + */ + Bucket get(Bucket bucket, Map options); + + /** + * Returns the requested storage object or {@code null} if not found. + * + * @throws StorageException upon failure + */ + StorageObject get(StorageObject object, Map options); + + /** + * Updates bucket information. + * + * @throws StorageException upon failure + */ + Bucket patch(Bucket bucket, Map options); + + /** + * Updates the storage object's information. Original metadata are merged with metadata in the + * provided {@code storageObject}. + * + * @throws StorageException upon failure + */ + StorageObject patch(StorageObject storageObject, Map options); + + /** + * Deletes the requested bucket. + * + * @return {@code true} if the bucket was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(Bucket bucket, Map options); + + /** + * Deletes the requested storage object. + * + * @return {@code true} if the storage object was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(StorageObject object, Map options); + + /** + * Sends a batch request. + * + * @throws StorageException upon failure + */ + BatchResponse batch(BatchRequest request); + + /** + * Sends a compose request. + * + * @throws StorageException upon failure + */ StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException; - - byte[] load(StorageObject storageObject, Map options) - throws StorageException; - - byte[] read(StorageObject from, Map options, long position, int bytes) - throws StorageException; - - String open(StorageObject object, Map options) throws StorageException; - - void write(String uploadId, byte[] toWrite, int toWriteOffset, StorageObject dest, - long destOffset, int length, boolean last) throws StorageException; - - RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException; - - RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException; + Map targetOptions); + + /** + * Reads all the bytes from a storage object. + * + * @throws StorageException upon failure + */ + byte[] load(StorageObject storageObject, Map options); + + /** + * Reads the given amount of bytes from a storage object at the given position. + * + * @throws StorageException upon failure + */ + Tuple read(StorageObject from, Map options, long position, int bytes); + + /** + * Opens a resumable upload channel for a given storage object. + * + * @throws StorageException upon failure + */ + String open(StorageObject object, Map options); + + /** + * Writes the provided bytes to a storage object at the provided location. + * + * @throws StorageException upon failure + */ + void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last); + + /** + * Sends a rewrite request to open a rewrite channel. + * + * @throws StorageException upon failure + */ + RewriteResponse openRewrite(RewriteRequest rewriteRequest); + + /** + * Continues rewriting on an already open rewrite channel. + * + * @throws StorageException upon failure + */ + RewriteResponse continueRewrite(RewriteResponse previousResponse); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java similarity index 91% rename from gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java rename to gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java index f4959d617d17..19b98e6273db 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpcFactory.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/spi/StorageRpcFactory.java @@ -14,8 +14,9 @@ * limitations under the License. */ -package com.google.gcloud.spi; +package com.google.gcloud.storage.spi; +import com.google.gcloud.spi.ServiceRpcFactory; import com.google.gcloud.storage.StorageOptions; /** diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java index 6ad655db8670..1287ede746d5 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java @@ -20,11 +20,10 @@ import com.google.gcloud.RetryParams; import com.google.gcloud.storage.BlobInfo; import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.Storage.BlobListOption; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.UUID; @@ -39,14 +38,19 @@ import java.util.logging.Logger; /** - * Utility to create a remote storage configuration for testing + * Utility to create a remote storage configuration for testing. Storage options can be obtained via + * the {@link #options()} method. Returned options have custom {@link StorageOptions#retryParams()}: + * {@link RetryParams#retryMaxAttempts()} is {@code 10}, {@link RetryParams#retryMinAttempts()} is + * {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is {@code 30000}, + * {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and + * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}. + * {@link StorageOptions#connectTimeout()} and {@link StorageOptions#readTimeout()} are both set + * to {@code 60000}. */ public class RemoteGcsHelper { private static final Logger log = Logger.getLogger(RemoteGcsHelper.class.getName()); private static final String BUCKET_NAME_PREFIX = "gcloud-test-bucket-temp-"; - private static final String PROJECT_ID_ENV_VAR = "GCLOUD_TESTS_PROJECT_ID"; - private static final String PRIVATE_KEY_ENV_VAR = "GCLOUD_TESTS_KEY"; private final StorageOptions options; private RemoteGcsHelper(StorageOptions options) { @@ -62,13 +66,16 @@ public StorageOptions options() { /** * Deletes a bucket, even if non-empty. Objects in the bucket are listed and deleted until bucket - * deletion succeeds or {@code timeout} expires. + * deletion succeeds or {@code timeout} expires. To allow for the timeout, this method uses a + * separate thread to send the delete requests. Use + * {@link #forceDelete(Storage storage, String bucket)} if spawning an additional thread is + * undesirable, such as in the App Engine production runtime. * * @param storage the storage service to be used to issue requests * @param bucket the bucket to be deleted * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument - * @return true if deletion succeeded, false if timeout expired. + * @return true if deletion succeeded, false if timeout expired * @throws InterruptedException if the thread deleting the bucket is interrupted while waiting * @throws ExecutionException if an exception was thrown while deleting bucket or bucket objects */ @@ -85,6 +92,17 @@ public static Boolean forceDelete(Storage storage, String bucket, long timeout, } } + /** + * Deletes a bucket, even if non-empty. This method blocks until the deletion completes or fails. + * + * @param storage the storage service to be used to issue requests + * @param bucket the bucket to be deleted + * @throws StorageException if an exception is encountered during bucket deletion + */ + public static void forceDelete(Storage storage, String bucket) { + new DeleteBucketTask(storage, bucket).call(); + } + /** * Returns a bucket name generated using a random UUID. */ @@ -97,7 +115,7 @@ public static String generateBucketName() { * * @param projectId id of the project to be used for running the tests * @param keyStream input stream for a JSON key - * @return A {@code RemoteGcsHelper} object for the provided options. + * @return A {@code RemoteGcsHelper} object for the provided options * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if * {@code keyStream} is not a valid JSON key stream */ @@ -107,13 +125,7 @@ public static RemoteGcsHelper create(String projectId, InputStream keyStream) StorageOptions storageOptions = StorageOptions.builder() .authCredentials(AuthCredentials.createForJson(keyStream)) .projectId(projectId) - .retryParams(RetryParams.builder() - .retryMaxAttempts(10) - .retryMinAttempts(6) - .maxRetryDelayMillis(30000) - .totalRetryPeriodMillis(120000) - .initialRetryDelayMillis(250) - .build()) + .retryParams(retryParams()) .connectTimeout(60000) .readTimeout(60000) .build(); @@ -127,59 +139,26 @@ public static RemoteGcsHelper create(String projectId, InputStream keyStream) } /** - * Creates a {@code RemoteGcsHelper} object for the given project id and JSON key path. - * - * @param projectId id of the project to be used for running the tests - * @param keyPath path to the JSON key to be used for running the tests - * @return A {@code RemoteGcsHelper} object for the provided options. - * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if the file - * pointed by {@code keyPath} does not exist + * Creates a {@code RemoteGcsHelper} object using default project id and authentication + * credentials. */ - public static RemoteGcsHelper create(String projectId, String keyPath) - throws GcsHelperException { - try { - InputStream keyFileStream = new FileInputStream(keyPath); - return create(projectId, keyFileStream); - } catch (FileNotFoundException ex) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, ex.getMessage()); - } - throw GcsHelperException.translate(ex); - } catch (IOException ex) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, ex.getMessage()); - } - throw GcsHelperException.translate(ex); - } + public static RemoteGcsHelper create() throws GcsHelperException { + StorageOptions storageOptions = StorageOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteGcsHelper(storageOptions); } - /** - * Creates a {@code RemoteGcsHelper} object. Project id and path to JSON key are read from two - * environment variables: {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY}. - * - * @return A {@code RemoteGcsHelper} object for the provided options. - * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if environment - * variables {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY} are not set or if - * the file pointed by {@code GCLOUD_TESTS_KEY} does not exist - */ - public static RemoteGcsHelper create() throws GcsHelperException { - String projectId = System.getenv(PROJECT_ID_ENV_VAR); - String keyPath = System.getenv(PRIVATE_KEY_ENV_VAR); - if (projectId == null) { - String message = "Environment variable " + PROJECT_ID_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - if (keyPath == null) { - String message = "Environment variable " + PRIVATE_KEY_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - return create(projectId, keyPath); + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); } private static class DeleteBucketTask implements Callable { @@ -193,17 +172,22 @@ public DeleteBucketTask(Storage storage, String bucket) { } @Override - public Boolean call() throws Exception { + public Boolean call() { while (true) { - for (BlobInfo info : storage.list(bucket)) { - storage.delete(bucket, info.name()); + for (BlobInfo info : storage.list(bucket, BlobListOption.versions(true)).values()) { + storage.delete(info.blobId()); } try { storage.delete(bucket); return true; } catch (StorageException e) { if (e.code() == 409) { - Thread.sleep(500); + try { + Thread.sleep(500); + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + throw e; + } } else { throw e; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java index 82b3578284dc..8afdd8a9660d 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java @@ -18,9 +18,10 @@ * A testing helper for Google Cloud Storage. * *

    A simple usage example: + * *

    Before the test: *

     {@code
    - * RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(PROJECT_ID, "/path/to/JSON/key.json");
    + * RemoteGcsHelper gcsHelper = RemoteGcsHelper.create();
      * Storage storage = gcsHelper.options().service();
      * String bucket = RemoteGcsHelper.generateBucketName();
      * storage.create(BucketInfo.of(bucket));
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java
    index 6a11fb0b2810..1c62805b2a1b 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java
    @@ -82,8 +82,8 @@ public void testRawEntity() {
     
     
       @Test
    -  public void testAcl() {
    -    Acl acl = new Acl(User.ofAllUsers(), Role.READER);
    +  public void testOf() {
    +    Acl acl = Acl.of(User.ofAllUsers(), Role.READER);
         assertEquals(User.ofAllUsers(), acl.entity());
         assertEquals(Role.READER, acl.role());
         ObjectAccessControl objectPb = acl.toObjectPb();
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java
    deleted file mode 100644
    index 4c22edbc35c8..000000000000
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java
    +++ /dev/null
    @@ -1,47 +0,0 @@
    -/*
    - * Copyright 2015 Google Inc. All Rights Reserved.
    - *
    - * Licensed under the Apache License, Version 2.0 (the "License");
    - * you may not use this file except in compliance with the License.
    - * You may obtain a copy of the License at
    - *
    - *       http://www.apache.org/licenses/LICENSE-2.0
    - *
    - * Unless required by applicable law or agreed to in writing, software
    - * distributed under the License is distributed on an "AS IS" BASIS,
    - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    - * See the License for the specific language governing permissions and
    - * limitations under the License.
    - */
    -
    -package com.google.gcloud.storage;
    -
    -import static org.junit.Assert.assertEquals;
    -
    -import com.google.common.collect.ImmutableList;
    -
    -import org.junit.Test;
    -
    -import java.util.Collections;
    -
    -public class BaseListResultTest {
    -
    -  @Test
    -  public void testListResult() throws Exception {
    -    ImmutableList values = ImmutableList.of("1", "2");
    -    final BaseListResult nextResult =
    -        new BaseListResult<>(null, "c", Collections.emptyList());
    -    BaseListResult.NextPageFetcher fetcher = new BaseListResult.NextPageFetcher() {
    -
    -      @Override
    -      public BaseListResult nextPage() {
    -        return nextResult;
    -      }
    -    };
    -    BaseListResult result = new BaseListResult<>(fetcher, "c", values);
    -    assertEquals(nextResult, result.nextPage());
    -    assertEquals("c", result.nextPageCursor());
    -    assertEquals(values, ImmutableList.copyOf(result.iterator()));
    -
    -  }
    -}
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java
    index 06b1105d7b9b..63972ff85dfd 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java
    @@ -23,6 +23,7 @@
     import static org.junit.Assert.assertTrue;
     
     import com.google.common.collect.Iterables;
    +import com.google.gcloud.storage.Storage.BlobGetOption;
     import com.google.gcloud.storage.Storage.BlobSourceOption;
     import com.google.gcloud.storage.Storage.BlobTargetOption;
     
    @@ -36,24 +37,28 @@ public class BatchRequestTest {
       @Test
       public void testBatchRequest() {
         BatchRequest request = BatchRequest.builder()
    -        .delete("b1", "o1")
    +        .delete(BlobId.of("b1", "o1", 1L), BlobSourceOption.generationMatch())
             .delete("b1", "o2", BlobSourceOption.generationMatch(1),
                 BlobSourceOption.metagenerationMatch(2))
             .update(BlobInfo.builder("b2", "o1").build(), BlobTargetOption.predefinedAcl(PUBLIC_READ))
             .update(BlobInfo.builder("b2", "o2").build())
    -        .get("b3", "o1")
    -        .get("b3", "o2", BlobSourceOption.generationMatch(1))
    +        .get(BlobId.of("b3", "o1", 1L), BlobGetOption.generationMatch())
    +        .get("b3", "o2", BlobGetOption.generationMatch(1))
             .get("b3", "o3")
             .build();
     
         Iterator>> deletes = request
             .toDelete().entrySet().iterator();
         Entry> delete = deletes.next();
    -    assertEquals(BlobId.of("b1", "o1"), delete.getKey());
    -    assertTrue(Iterables.isEmpty(delete.getValue()));
    +    assertEquals(BlobId.of("b1", "o1", 1L), delete.getKey());
    +    assertEquals(1, Iterables.size(delete.getValue()));
    +    assertEquals(BlobSourceOption.generationMatch(), Iterables.getFirst(delete.getValue(), null));
         delete = deletes.next();
         assertEquals(BlobId.of("b1", "o2"), delete.getKey());
         assertEquals(2, Iterables.size(delete.getValue()));
    +    assertEquals(BlobSourceOption.generationMatch(1L), Iterables.getFirst(delete.getValue(), null));
    +    assertEquals(BlobSourceOption.metagenerationMatch(2L),
    +        Iterables.get(delete.getValue(), 1, null));
         assertFalse(deletes.hasNext());
     
         Iterator>> updates = request
    @@ -68,16 +73,15 @@ public void testBatchRequest() {
         assertTrue(Iterables.isEmpty(update.getValue()));
         assertFalse(updates.hasNext());
     
    -    Iterator>> gets = request
    -        .toGet().entrySet().iterator();
    -    Entry> get = gets.next();
    -    assertEquals(BlobId.of("b3", "o1"), get.getKey());
    -    assertTrue(Iterables.isEmpty(get.getValue()));
    +    Iterator>> gets = request.toGet().entrySet().iterator();
    +    Entry> get = gets.next();
    +    assertEquals(BlobId.of("b3", "o1", 1L), get.getKey());
    +    assertEquals(1, Iterables.size(get.getValue()));
    +    assertEquals(BlobGetOption.generationMatch(), Iterables.getFirst(get.getValue(), null));
         get = gets.next();
         assertEquals(BlobId.of("b3", "o2"), get.getKey());
         assertEquals(1, Iterables.size(get.getValue()));
    -    assertEquals(BlobSourceOption.generationMatch(1),
    -        Iterables.getFirst(get.getValue(), null));
    +    assertEquals(BlobGetOption.generationMatch(1), Iterables.getFirst(get.getValue(), null));
         get = gets.next();
         assertEquals(BlobId.of("b3", "o3"), get.getKey());
         assertTrue(Iterables.isEmpty(get.getValue()));
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java
    index 5985329e0183..eb45b8b51271 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java
    @@ -22,22 +22,35 @@
     import com.google.common.collect.ImmutableList;
     import com.google.gcloud.storage.BatchResponse.Result;
     
    +import org.easymock.EasyMock;
    +import org.junit.Before;
     import org.junit.Test;
     
     import java.util.List;
     
     public class BatchResponseTest {
     
    -  private static final BlobInfo BLOB_INFO_1 = BlobInfo.builder("b", "o1").build();
    -  private static final BlobInfo BLOB_INFO_2 = BlobInfo.builder("b", "o2").build();
    -  private static final BlobInfo BLOB_INFO_3 = BlobInfo.builder("b", "o3").build();
    +  private Storage mockStorage;
    +  private Blob blob1;
    +  private Blob blob2;
    +  private Blob blob3;
    +
    +  @Before
    +  public void setUp() {
    +    mockStorage = EasyMock.createMock(Storage.class);
    +    EasyMock.expect(mockStorage.options()).andReturn(null).times(3);
    +    EasyMock.replay(mockStorage);
    +    blob1 = new Blob(mockStorage, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "o1").build()));
    +    blob2 = new Blob(mockStorage, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "o2").build()));
    +    blob3 = new Blob(mockStorage, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "o3").build()));
    +  }
     
       @Test
       public void testBatchResponse() {
         List> deletes = ImmutableList.of(Result.of(true), Result.of(false));
    -    List> updates =
    -        ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2));
    -    List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3));
    +    List> updates =
    +        ImmutableList.of(Result.of(blob1), Result.of(blob2));
    +    List> gets = ImmutableList.of(Result.of(blob2), Result.of(blob3));
         BatchResponse response = new BatchResponse(deletes, updates, gets);
         assertEquals(deletes, response.deletes());
         assertEquals(updates, response.updates());
    @@ -47,14 +60,13 @@ public void testBatchResponse() {
       @Test
       public void testEquals() {
         List> deletes = ImmutableList.of(Result.of(true), Result.of(false));
    -    List> updates =
    -        ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2));
    -    List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3));
    +    List> updates =
    +        ImmutableList.of(Result.of(blob1), Result.of(blob2));
    +    List> gets = ImmutableList.of(Result.of(blob2), Result.of(blob3));
         List> otherDeletes = ImmutableList.of(Result.of(false), Result.of(true));
    -    List> otherUpdates =
    -        ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3));
    -    List> otherGets =
    -        ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2));
    +    List> otherUpdates = ImmutableList.of(Result.of(blob2), Result.of(blob3));
    +    List> otherGets =
    +        ImmutableList.of(Result.of(blob1), Result.of(blob2));
         BatchResponse response = new BatchResponse(deletes, updates, gets);
         BatchResponse responseEquals = new BatchResponse(deletes, updates, gets);
         BatchResponse responseNotEquals1 = new BatchResponse(otherDeletes, updates, gets);
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java
    index 70560b0c9a9e..029181c6c07b 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java
    @@ -20,7 +20,11 @@
     import static com.google.gcloud.storage.Acl.Role.READER;
     import static com.google.gcloud.storage.Acl.Role.WRITER;
     import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertFalse;
    +import static org.junit.Assert.assertNull;
    +import static org.junit.Assert.assertTrue;
     
    +import com.google.api.services.storage.model.StorageObject;
     import com.google.common.collect.ImmutableList;
     import com.google.common.collect.ImmutableMap;
     import com.google.gcloud.storage.Acl.Project;
    @@ -28,14 +32,15 @@
     
     import org.junit.Test;
     
    +import java.math.BigInteger;
     import java.util.List;
     import java.util.Map;
     
     public class BlobInfoTest {
     
       private static final List ACL = ImmutableList.of(
    -      new Acl(User.ofAllAuthenticatedUsers(), READER),
    -      new Acl(new Project(VIEWERS, "p1"), WRITER));
    +      Acl.of(User.ofAllAuthenticatedUsers(), READER),
    +      Acl.of(new Project(VIEWERS, "p1"), WRITER));
       private static final Integer COMPONENT_COUNT = 2;
       private static final String CONTENT_TYPE = "text/html";
       private static final String CACHE_CONTROL = "cache";
    @@ -55,7 +60,7 @@ public class BlobInfoTest {
       private static final String SELF_LINK = "http://storage/b/n";
       private static final Long SIZE = 1024L;
       private static final Long UPDATE_TIME = DELETE_TIME - 1L;
    -  private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n")
    +  private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n", GENERATION)
           .acl(ACL)
           .componentCount(COMPONENT_COUNT)
           .contentType(CONTENT_TYPE)
    @@ -66,7 +71,6 @@ public class BlobInfoTest {
           .crc32c(CRC32)
           .deleteTime(DELETE_TIME)
           .etag(ETAG)
    -      .generation(GENERATION)
           .id(ID)
           .md5(MD5)
           .mediaLink(MEDIA_LINK)
    @@ -77,6 +81,10 @@ public class BlobInfoTest {
           .size(SIZE)
           .updateTime(UPDATE_TIME)
           .build();
    +  private static final BlobInfo DIRECTORY_INFO = BlobInfo.builder("b", "n/")
    +      .size(0L)
    +      .isDirectory(true)
    +      .build();
     
       @Test
       public void testToBuilder() {
    @@ -85,10 +93,16 @@ public void testToBuilder() {
         assertEquals("n2", blobInfo.name());
         assertEquals("b2", blobInfo.bucket());
         assertEquals(Long.valueOf(200), blobInfo.size());
    -    blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n")).size(SIZE).build();
    +    blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n", GENERATION)).size(SIZE).build();
         compareBlobs(BLOB_INFO, blobInfo);
       }
     
    +  @Test
    +  public void testToBuilderIncomplete() {
    +    BlobInfo incompleteBlobInfo = BlobInfo.builder(BlobId.of("b2", "n2")).build();
    +    compareBlobs(incompleteBlobInfo, incompleteBlobInfo.toBuilder().build());
    +  }
    +
       @Test
       public void testBuilder() {
         assertEquals("b", BLOB_INFO.bucket());
    @@ -113,6 +127,30 @@ public void testBuilder() {
         assertEquals(SELF_LINK, BLOB_INFO.selfLink());
         assertEquals(SIZE, BLOB_INFO.size());
         assertEquals(UPDATE_TIME, BLOB_INFO.updateTime());
    +    assertFalse(BLOB_INFO.isDirectory());
    +    assertEquals("b", DIRECTORY_INFO.bucket());
    +    assertEquals("n/", DIRECTORY_INFO.name());
    +    assertNull(DIRECTORY_INFO.acl());
    +    assertNull(DIRECTORY_INFO.componentCount());
    +    assertNull(DIRECTORY_INFO.contentType());
    +    assertNull(DIRECTORY_INFO.cacheControl());
    +    assertNull(DIRECTORY_INFO.contentDisposition());
    +    assertNull(DIRECTORY_INFO.contentEncoding());
    +    assertNull(DIRECTORY_INFO.contentLanguage());
    +    assertNull(DIRECTORY_INFO.crc32c());
    +    assertNull(DIRECTORY_INFO.deleteTime());
    +    assertNull(DIRECTORY_INFO.etag());
    +    assertNull(DIRECTORY_INFO.generation());
    +    assertNull(DIRECTORY_INFO.id());
    +    assertNull(DIRECTORY_INFO.md5());
    +    assertNull(DIRECTORY_INFO.mediaLink());
    +    assertNull(DIRECTORY_INFO.metadata());
    +    assertNull(DIRECTORY_INFO.metageneration());
    +    assertNull(DIRECTORY_INFO.owner());
    +    assertNull(DIRECTORY_INFO.selfLink());
    +    assertEquals(0L, (long) DIRECTORY_INFO.size());
    +    assertNull(DIRECTORY_INFO.updateTime());
    +    assertTrue(DIRECTORY_INFO.isDirectory());
       }
     
       private void compareBlobs(BlobInfo expected, BlobInfo value) {
    @@ -146,10 +184,39 @@ public void testToPbAndFromPb() {
         compareBlobs(BLOB_INFO, BlobInfo.fromPb(BLOB_INFO.toPb()));
         BlobInfo blobInfo = BlobInfo.builder(BlobId.of("b", "n")).build();
         compareBlobs(blobInfo, BlobInfo.fromPb(blobInfo.toPb()));
    +    StorageObject object = new StorageObject()
    +        .setName("n/")
    +        .setBucket("b")
    +        .setSize(BigInteger.ZERO)
    +        .set("isDirectory", true);
    +    blobInfo = BlobInfo.fromPb(object);
    +    assertEquals("b", blobInfo.bucket());
    +    assertEquals("n/", blobInfo.name());
    +    assertNull(blobInfo.acl());
    +    assertNull(blobInfo.componentCount());
    +    assertNull(blobInfo.contentType());
    +    assertNull(blobInfo.cacheControl());
    +    assertNull(blobInfo.contentDisposition());
    +    assertNull(blobInfo.contentEncoding());
    +    assertNull(blobInfo.contentLanguage());
    +    assertNull(blobInfo.crc32c());
    +    assertNull(blobInfo.deleteTime());
    +    assertNull(blobInfo.etag());
    +    assertNull(blobInfo.generation());
    +    assertNull(blobInfo.id());
    +    assertNull(blobInfo.md5());
    +    assertNull(blobInfo.mediaLink());
    +    assertNull(blobInfo.metadata());
    +    assertNull(blobInfo.metageneration());
    +    assertNull(blobInfo.owner());
    +    assertNull(blobInfo.selfLink());
    +    assertEquals(0L, (long) blobInfo.size());
    +    assertNull(blobInfo.updateTime());
    +    assertTrue(blobInfo.isDirectory());
       }
     
       @Test
       public void testBlobId() {
    -    assertEquals(BlobId.of("b", "n"), BLOB_INFO.blobId());
    +    assertEquals(BlobId.of("b", "n", GENERATION), BLOB_INFO.blobId());
       }
     }
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java
    deleted file mode 100644
    index 615213ab1516..000000000000
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java
    +++ /dev/null
    @@ -1,95 +0,0 @@
    -/*
    - * Copyright 2015 Google Inc. All Rights Reserved.
    - *
    - * Licensed under the Apache License, Version 2.0 (the "License");
    - * you may not use this file except in compliance with the License.
    - * You may obtain a copy of the License at
    - *
    - *       http://www.apache.org/licenses/LICENSE-2.0
    - *
    - * Unless required by applicable law or agreed to in writing, software
    - * distributed under the License is distributed on an "AS IS" BASIS,
    - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    - * See the License for the specific language governing permissions and
    - * limitations under the License.
    - */
    -
    -package com.google.gcloud.storage;
    -
    -import static org.easymock.EasyMock.createStrictMock;
    -import static org.easymock.EasyMock.expect;
    -import static org.easymock.EasyMock.replay;
    -import static org.easymock.EasyMock.verify;
    -import static org.junit.Assert.assertEquals;
    -import static org.junit.Assert.assertFalse;
    -
    -import com.google.common.collect.ImmutableList;
    -
    -import org.junit.Before;
    -import org.junit.Test;
    -
    -import java.util.Iterator;
    -
    -public class BlobListResultTest {
    -
    -  private static final Iterable FIRST_PAGE_RESULTS = ImmutableList.of(
    -      BlobInfo.builder("b1", "n1").build(),
    -      BlobInfo.builder("b2", "n2").build());
    -
    -  private static final Iterable SECOND_PAGE_RESULTS = ImmutableList.of(
    -      BlobInfo.builder("b1", "n1").build(),
    -      BlobInfo.builder("b2", "n2").build());
    -
    -  private BaseListResult firstPage;
    -  private BaseListResult secondPage;
    -  private Storage storage;
    -  private BlobListResult blobListResult;
    -
    -  @Before
    -  public void setUp() throws Exception {
    -    firstPage = createStrictMock(BaseListResult.class);
    -    secondPage = createStrictMock(BaseListResult.class);
    -    storage = createStrictMock(Storage.class);
    -    blobListResult = new BlobListResult(storage, firstPage);
    -  }
    -
    -  @Test
    -  public void testListResult() throws Exception {
    -    expect(firstPage.iterator()).andReturn(FIRST_PAGE_RESULTS.iterator());
    -    replay(firstPage);
    -    Iterator firstPageIterator = FIRST_PAGE_RESULTS.iterator();
    -    Iterator blobListIterator = blobListResult.iterator();
    -    while (blobListIterator.hasNext() && firstPageIterator.hasNext()) {
    -      assertEquals(firstPageIterator.next(), blobListIterator.next().info());
    -    }
    -    assertFalse(blobListIterator.hasNext());
    -    assertFalse(firstPageIterator.hasNext());
    -    verify(firstPage);
    -  }
    -
    -  @Test
    -  public void testCursor() throws Exception {
    -    expect(firstPage.nextPageCursor()).andReturn("c");
    -    replay(firstPage);
    -    assertEquals("c", blobListResult.nextPageCursor());
    -    verify(firstPage);
    -  }
    -
    -  @Test
    -  public void testNextPage() throws Exception {
    -    expect(firstPage.nextPage()).andReturn(secondPage);
    -    expect(secondPage.iterator()).andReturn(SECOND_PAGE_RESULTS.iterator());
    -    replay(firstPage);
    -    replay(secondPage);
    -    ListResult nextPageResult = blobListResult.nextPage();
    -    Iterator secondPageIterator = SECOND_PAGE_RESULTS.iterator();
    -    Iterator blobListIterator = nextPageResult.iterator();
    -    while (blobListIterator.hasNext() && secondPageIterator.hasNext()) {
    -      assertEquals(secondPageIterator.next(), blobListIterator.next().info());
    -    }
    -    assertFalse(blobListIterator.hasNext());
    -    assertFalse(secondPageIterator.hasNext());
    -    verify(firstPage);
    -    verify(secondPage);
    -  }
    -}
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java
    similarity index 62%
    rename from gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java
    rename to gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java
    index e1f904bf72fe..1b0f36a864a2 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java
    @@ -19,7 +19,6 @@
     import static org.easymock.EasyMock.anyObject;
     import static org.easymock.EasyMock.createMock;
     import static org.easymock.EasyMock.expect;
    -import static org.easymock.EasyMock.expectLastCall;
     import static org.easymock.EasyMock.replay;
     import static org.easymock.EasyMock.verify;
     import static org.junit.Assert.assertArrayEquals;
    @@ -28,9 +27,11 @@
     import static org.junit.Assert.fail;
     
     import com.google.common.collect.ImmutableMap;
    +import com.google.gcloud.ReadChannel;
     import com.google.gcloud.RestorableState;
    -import com.google.gcloud.spi.StorageRpc;
    -import com.google.gcloud.spi.StorageRpcFactory;
    +import com.google.gcloud.RetryParams;
    +import com.google.gcloud.storage.spi.StorageRpc;
    +import com.google.gcloud.storage.spi.StorageRpcFactory;
     
     import org.junit.After;
     import org.junit.Before;
    @@ -38,15 +39,16 @@
     
     import java.io.IOException;
     import java.nio.ByteBuffer;
    +import java.nio.channels.ClosedChannelException;
     import java.util.Arrays;
     import java.util.Map;
     import java.util.Random;
     
    -public class BlobReadChannelImplTest {
    +public class BlobReadChannelTest {
     
       private static final String BUCKET_NAME = "b";
       private static final String BLOB_NAME = "n";
    -  private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME);
    +  private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME, -1L);
       private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of();
       private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024;
       private static final int CUSTOM_CHUNK_SIZE = 2 * 1024 * 1024;
    @@ -55,10 +57,10 @@ public class BlobReadChannelImplTest {
       private StorageOptions options;
       private StorageRpcFactory rpcFactoryMock;
       private StorageRpc storageRpcMock;
    -  private BlobReadChannelImpl reader;
    +  private BlobReadChannel reader;
     
       @Before
    -  public void setUp() throws IOException, InterruptedException {
    +  public void setUp() {
         rpcFactoryMock = createMock(StorageRpcFactory.class);
         storageRpcMock = createMock(StorageRpc.class);
         expect(rpcFactoryMock.create(anyObject(StorageOptions.class))).andReturn(storageRpcMock);
    @@ -66,6 +68,7 @@ public void setUp() throws IOException, InterruptedException {
         options = StorageOptions.builder()
             .projectId("projectId")
             .serviceRpcFactory(rpcFactoryMock)
    +        .retryParams(RetryParams.noRetries())
             .build();
       }
     
    @@ -77,18 +80,18 @@ public void tearDown() throws Exception {
       @Test
       public void testCreate() {
         replay(storageRpcMock);
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         assertTrue(reader.isOpen());
       }
     
       @Test
       public void testReadBuffered() throws IOException {
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE);
         ByteBuffer firstReadBuffer = ByteBuffer.allocate(42);
         ByteBuffer secondReadBuffer = ByteBuffer.allocate(42);
         expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    -        .andReturn(result);
    +        .andReturn(StorageRpc.Tuple.of("etag", result));
         replay(storageRpcMock);
         reader.read(firstReadBuffer);
         reader.read(secondReadBuffer);
    @@ -101,16 +104,17 @@ public void testReadBuffered() throws IOException {
     
       @Test
       public void testReadBig() throws IOException {
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         reader.chunkSize(CUSTOM_CHUNK_SIZE);
         byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE);
         byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE);
         ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
         ByteBuffer secondReadBuffer = ByteBuffer.allocate(42);
    -    storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE);
    -    expectLastCall().andReturn(firstResult);
    -    storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, CUSTOM_CHUNK_SIZE);
    -    expectLastCall().andReturn(secondResult);
    +    expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    +        .andReturn(StorageRpc.Tuple.of("etag", firstResult));
    +    expect(storageRpcMock.read(
    +        BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, CUSTOM_CHUNK_SIZE))
    +            .andReturn(StorageRpc.Tuple.of("etag", secondResult));
         replay(storageRpcMock);
         reader.read(firstReadBuffer);
         reader.read(secondReadBuffer);
    @@ -121,66 +125,91 @@ public void testReadBig() throws IOException {
     
       @Test
       public void testReadFinish() throws IOException {
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         byte[] result = {};
         ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
         expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    -        .andReturn(result);
    +        .andReturn(StorageRpc.Tuple.of("etag", result));
         replay(storageRpcMock);
         assertEquals(-1, reader.read(readBuffer));
       }
     
       @Test
       public void testSeek() throws IOException {
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         reader.seek(42);
         byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE);
         ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
         expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE))
    -        .andReturn(result);
    +        .andReturn(StorageRpc.Tuple.of("etag", result));
         replay(storageRpcMock);
         reader.read(readBuffer);
         assertArrayEquals(result, readBuffer.array());
       }
     
       @Test
    -  public void testClose() throws IOException {
    +  public void testClose() {
         replay(storageRpcMock);
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         assertTrue(reader.isOpen());
         reader.close();
         assertTrue(!reader.isOpen());
       }
     
       @Test
    -  public void testReadClosed() {
    +  public void testReadClosed() throws IOException {
         replay(storageRpcMock);
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         reader.close();
         try {
           ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
           reader.read(readBuffer);
    -      fail("Expected BlobReadChannel read to throw IOException");
    -    } catch (IOException ex) {
    +      fail("Expected BlobReadChannel read to throw ClosedChannelException");
    +    } catch (ClosedChannelException ex) {
           // expected
         }
       }
     
       @Test
    -  public void testSaveAndRestore() throws IOException, ClassNotFoundException {
    +  public void testReadGenerationChanged() throws IOException {
    +    BlobId blobId = BlobId.of(BUCKET_NAME, BLOB_NAME);
    +    reader = new BlobReadChannel(options, blobId, EMPTY_RPC_OPTIONS);
    +    byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE);
    +    byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE);
    +    ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
    +    ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
    +    expect(storageRpcMock.read(blobId.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    +        .andReturn(StorageRpc.Tuple.of("etag1", firstResult));
    +    expect(
    +        storageRpcMock.read(blobId.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE,
    +            DEFAULT_CHUNK_SIZE)).andReturn(StorageRpc.Tuple.of("etag2", secondResult));
    +    replay(storageRpcMock);
    +    reader.read(firstReadBuffer);
    +    try {
    +      reader.read(secondReadBuffer);
    +      fail("Expected ReadChannel read to throw StorageException");
    +    } catch (StorageException ex) {
    +      StringBuilder messageBuilder = new StringBuilder();
    +      messageBuilder.append("Blob ").append(blobId).append(" was updated while reading");
    +      assertEquals(messageBuilder.toString(), ex.getMessage());
    +    }
    +  }
    +
    +  @Test
    +  public void testSaveAndRestore() throws IOException {
         byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE);
         byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE);
         ByteBuffer firstReadBuffer = ByteBuffer.allocate(42);
         ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE);
         expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    -        .andReturn(firstResult);
    +        .andReturn(StorageRpc.Tuple.of("etag", firstResult));
         expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE))
    -        .andReturn(secondResult);
    +        .andReturn(StorageRpc.Tuple.of("etag", secondResult));
         replay(storageRpcMock);
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
         reader.read(firstReadBuffer);
    -    RestorableState readerState = reader.capture();
    -    BlobReadChannel restoredReader = readerState.restore();
    +    RestorableState readerState = reader.capture();
    +    ReadChannel restoredReader = readerState.restore();
         restoredReader.read(secondReadBuffer);
         assertArrayEquals(Arrays.copyOf(firstResult, firstReadBuffer.capacity()),
             firstReadBuffer.array());
    @@ -190,10 +219,11 @@ public void testSaveAndRestore() throws IOException, ClassNotFoundException {
       @Test
       public void testStateEquals() {
         replay(storageRpcMock);
    -    reader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    -    BlobReadChannel secondReader = new BlobReadChannelImpl(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    -    RestorableState state = reader.capture();
    -    RestorableState secondState = secondReader.capture();
    +    reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    @SuppressWarnings("resource") // avoid closing when you don't want partial writes to GCS
    +        ReadChannel secondReader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS);
    +    RestorableState state = reader.capture();
    +    RestorableState secondState = secondReader.capture();
         assertEquals(state, secondState);
         assertEquals(state.hashCode(), secondState.hashCode());
         assertEquals(state.toString(), secondState.toString());
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java
    index defb1d35e3f4..5a6173c08199 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java
    @@ -16,9 +16,13 @@
     
     package com.google.gcloud.storage;
     
    +import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS;
    +import static com.google.gcloud.storage.Acl.Role.READER;
    +import static com.google.gcloud.storage.Acl.Role.WRITER;
     import static org.easymock.EasyMock.capture;
     import static org.easymock.EasyMock.createMock;
     import static org.easymock.EasyMock.createStrictMock;
    +import static org.easymock.EasyMock.eq;
     import static org.easymock.EasyMock.expect;
     import static org.easymock.EasyMock.replay;
     import static org.easymock.EasyMock.verify;
    @@ -29,7 +33,11 @@
     import static org.junit.Assert.assertSame;
     import static org.junit.Assert.assertTrue;
     
    -import com.google.api.client.util.Lists;
    +import com.google.common.collect.ImmutableList;
    +import com.google.common.collect.ImmutableMap;
    +import com.google.gcloud.ReadChannel;
    +import com.google.gcloud.storage.Acl.Project;
    +import com.google.gcloud.storage.Acl.User;
     import com.google.gcloud.storage.Storage.CopyRequest;
     
     import org.easymock.Capture;
    @@ -38,25 +46,69 @@
     import org.junit.Test;
     
     import java.net.URL;
    -import java.util.Arrays;
     import java.util.List;
    +import java.util.Map;
     import java.util.concurrent.TimeUnit;
     
     public class BlobTest {
     
    -  private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").build();
    -  private static final BlobId[] BLOB_ID_ARRAY = {BlobId.of("b1", "n1"),
    -      BlobId.of("b2", "n2"), BlobId.of("b3", "n3")};
    -  private static final BlobInfo[] BLOB_INFO_ARRAY = {BlobInfo.builder("b1", "n1").build(),
    -      BlobInfo.builder("b2", "n2").build(), BlobInfo.builder("b3", "n3").build()};
    +  private static final List ACL = ImmutableList.of(
    +      Acl.of(User.ofAllAuthenticatedUsers(), READER), Acl.of(new Project(VIEWERS, "p1"), WRITER));
    +  private static final Integer COMPONENT_COUNT = 2;
    +  private static final String CONTENT_TYPE = "text/html";
    +  private static final String CACHE_CONTROL = "cache";
    +  private static final String CONTENT_DISPOSITION = "content-disposition";
    +  private static final String CONTENT_ENCODING = "UTF-8";
    +  private static final String CONTENT_LANGUAGE = "En";
    +  private static final String CRC32 = "0xFF00";
    +  private static final Long DELETE_TIME = System.currentTimeMillis();
    +  private static final String ETAG = "0xFF00";
    +  private static final Long GENERATION = 1L;
    +  private static final String ID = "B/N:1";
    +  private static final String MD5 = "0xFF00";
    +  private static final String MEDIA_LINK = "http://media/b/n";
    +  private static final Map METADATA = ImmutableMap.of("n1", "v1", "n2", "v2");
    +  private static final Long META_GENERATION = 10L;
    +  private static final User OWNER = new User("user@gmail.com");
    +  private static final String SELF_LINK = "http://storage/b/n";
    +  private static final Long SIZE = 1024L;
    +  private static final Long UPDATE_TIME = DELETE_TIME - 1L;
    +  private static final BlobInfo FULL_BLOB_INFO = BlobInfo.builder("b", "n", GENERATION)
    +      .acl(ACL)
    +      .componentCount(COMPONENT_COUNT)
    +      .contentType(CONTENT_TYPE)
    +      .cacheControl(CACHE_CONTROL)
    +      .contentDisposition(CONTENT_DISPOSITION)
    +      .contentEncoding(CONTENT_ENCODING)
    +      .contentLanguage(CONTENT_LANGUAGE)
    +      .crc32c(CRC32)
    +      .deleteTime(DELETE_TIME)
    +      .etag(ETAG)
    +      .id(ID)
    +      .md5(MD5)
    +      .mediaLink(MEDIA_LINK)
    +      .metadata(METADATA)
    +      .metageneration(META_GENERATION)
    +      .owner(OWNER)
    +      .selfLink(SELF_LINK)
    +      .size(SIZE)
    +      .updateTime(UPDATE_TIME)
    +      .build();
    +  private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").metageneration(42L).build();
    +  private static final BlobInfo DIRECTORY_INFO = BlobInfo.builder("b", "n/")
    +      .size(0L)
    +      .isDirectory(true)
    +      .build();
     
       private Storage storage;
       private Blob blob;
    +  private Blob expectedBlob;
    +  private Storage serviceMockReturnsOptions = createMock(Storage.class);
    +  private StorageOptions mockOptions = createMock(StorageOptions.class);
     
       @Before
    -  public void setUp() throws Exception {
    +  public void setUp() {
         storage = createStrictMock(Storage.class);
    -    blob = new Blob(storage, BLOB_INFO);
       }
     
       @After
    @@ -64,71 +116,122 @@ public void tearDown() throws Exception {
         verify(storage);
       }
     
    -  @Test
    -  public void testInfo() throws Exception {
    -    assertEquals(BLOB_INFO, blob.info());
    -    replay(storage);
    +  private void initializeExpectedBlob(int optionsCalls) {
    +    expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls);
    +    replay(serviceMockReturnsOptions);
    +    expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BLOB_INFO));
    +  }
    +
    +  private void initializeBlob() {
    +    blob = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO));
       }
     
       @Test
       public void testExists_True() throws Exception {
    -    expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(BLOB_INFO);
    +    initializeExpectedBlob(1);
    +    Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()};
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(expectedBlob.blobId(), expectedOptions)).andReturn(expectedBlob);
         replay(storage);
    +    initializeBlob();
         assertTrue(blob.exists());
       }
     
       @Test
       public void testExists_False() throws Exception {
    -    expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(null);
    +    Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()};
    +    expect(storage.options()).andReturn(null);
    +    expect(storage.get(BLOB_INFO.blobId(), expectedOptions)).andReturn(null);
         replay(storage);
    +    initializeBlob();
         assertFalse(blob.exists());
       }
     
       @Test
       public void testContent() throws Exception {
    +    initializeExpectedBlob(2);
         byte[] content = {1, 2};
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.readAllBytes(BLOB_INFO.blobId())).andReturn(content);
         replay(storage);
    +    initializeBlob();
         assertArrayEquals(content, blob.content());
       }
     
       @Test
       public void testReload() throws Exception {
    -    BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build();
    -    expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(updatedInfo);
    +    initializeExpectedBlob(2);
    +    Blob expectedReloadedBlob = expectedBlob.toBuilder().cacheControl("c").build();
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0]))
    +        .andReturn(expectedReloadedBlob);
         replay(storage);
    +    initializeBlob();
         Blob updatedBlob = blob.reload();
    -    assertSame(storage, blob.storage());
    -    assertEquals(updatedInfo, updatedBlob.info());
    +    assertEquals(expectedReloadedBlob, updatedBlob);
    +  }
    +
    +  @Test
    +  public void testReloadNull() throws Exception {
    +    initializeExpectedBlob(1);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null);
    +    replay(storage);
    +    initializeBlob();
    +    Blob reloadedBlob = blob.reload();
    +    assertNull(reloadedBlob);
    +  }
    +
    +  @Test
    +  public void testReloadWithOptions() throws Exception {
    +    initializeExpectedBlob(2);
    +    Blob expectedReloadedBlob = expectedBlob.toBuilder().cacheControl("c").build();
    +    Storage.BlobGetOption[] options = {Storage.BlobGetOption.metagenerationMatch(42L)};
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BLOB_INFO.blobId(), options)).andReturn(expectedReloadedBlob);
    +    replay(storage);
    +    initializeBlob();
    +    Blob updatedBlob = blob.reload(Blob.BlobSourceOption.metagenerationMatch());
    +    assertEquals(expectedReloadedBlob, updatedBlob);
       }
     
       @Test
       public void testUpdate() throws Exception {
    -    BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build();
    -    expect(storage.update(updatedInfo, new Storage.BlobTargetOption[0])).andReturn(updatedInfo);
    +    initializeExpectedBlob(2);
    +    Blob expectedUpdatedBlob = expectedBlob.toBuilder().cacheControl("c").build();
    +    expect(storage.options()).andReturn(mockOptions).times(2);
    +    expect(storage.update(eq(expectedUpdatedBlob), new Storage.BlobTargetOption[0]))
    +        .andReturn(expectedUpdatedBlob);
         replay(storage);
    -    Blob updatedBlob = blob.update(updatedInfo);
    -    assertSame(storage, blob.storage());
    -    assertEquals(updatedInfo, updatedBlob.info());
    +    initializeBlob();
    +    Blob updatedBlob = new Blob(storage, new BlobInfo.BuilderImpl(expectedUpdatedBlob));
    +    Blob actualUpdatedBlob = updatedBlob.update();
    +    assertEquals(expectedUpdatedBlob, actualUpdatedBlob);
       }
     
       @Test
       public void testDelete() throws Exception {
    +    initializeExpectedBlob(2);
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.delete(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(true);
         replay(storage);
    +    initializeBlob();
         assertTrue(blob.delete());
       }
     
       @Test
       public void testCopyToBucket() throws Exception {
    +    initializeExpectedBlob(2);
         BlobInfo target = BlobInfo.builder(BlobId.of("bt", "n")).build();
         CopyWriter copyWriter = createMock(CopyWriter.class);
         Capture capturedCopyRequest = Capture.newInstance();
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter);
         replay(storage);
    +    initializeBlob();
         CopyWriter returnedCopyWriter = blob.copyTo("bt");
         assertEquals(copyWriter, returnedCopyWriter);
    -    assertEquals(capturedCopyRequest.getValue().source(), blob.id());
    +    assertEquals(capturedCopyRequest.getValue().source(), blob.blobId());
         assertEquals(capturedCopyRequest.getValue().target(), target);
         assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty());
         assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty());
    @@ -136,14 +239,17 @@ public void testCopyToBucket() throws Exception {
     
       @Test
       public void testCopyTo() throws Exception {
    +    initializeExpectedBlob(2);
         BlobInfo target = BlobInfo.builder(BlobId.of("bt", "nt")).build();
         CopyWriter copyWriter = createMock(CopyWriter.class);
         Capture capturedCopyRequest = Capture.newInstance();
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter);
         replay(storage);
    +    initializeBlob();
         CopyWriter returnedCopyWriter = blob.copyTo("bt", "nt");
         assertEquals(copyWriter, returnedCopyWriter);
    -    assertEquals(capturedCopyRequest.getValue().source(), blob.id());
    +    assertEquals(capturedCopyRequest.getValue().source(), blob.blobId());
         assertEquals(capturedCopyRequest.getValue().target(), target);
         assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty());
         assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty());
    @@ -151,15 +257,18 @@ public void testCopyTo() throws Exception {
     
       @Test
       public void testCopyToBlobId() throws Exception {
    +    initializeExpectedBlob(2);
         BlobId targetId = BlobId.of("bt", "nt");
         CopyWriter copyWriter = createMock(CopyWriter.class);
    -    BlobInfo target = BLOB_INFO.builder(targetId).build();
    +    BlobInfo target = BlobInfo.builder(targetId).build();
         Capture capturedCopyRequest = Capture.newInstance();
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter);
         replay(storage);
    +    initializeBlob();
         CopyWriter returnedCopyWriter = blob.copyTo(targetId);
         assertEquals(copyWriter, returnedCopyWriter);
    -    assertEquals(capturedCopyRequest.getValue().source(), blob.id());
    +    assertEquals(capturedCopyRequest.getValue().source(), blob.blobId());
         assertEquals(capturedCopyRequest.getValue().target(), target);
         assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty());
         assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty());
    @@ -167,132 +276,123 @@ public void testCopyToBlobId() throws Exception {
     
       @Test
       public void testReader() throws Exception {
    -    BlobReadChannel channel = createMock(BlobReadChannel.class);
    +    initializeExpectedBlob(2);
    +    ReadChannel channel = createMock(ReadChannel.class);
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.reader(BLOB_INFO.blobId())).andReturn(channel);
         replay(storage);
    +    initializeBlob();
         assertSame(channel, blob.reader());
       }
     
       @Test
       public void testWriter() throws Exception {
    +    initializeExpectedBlob(2);
         BlobWriteChannel channel = createMock(BlobWriteChannel.class);
    -    expect(storage.writer(BLOB_INFO)).andReturn(channel);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.writer(eq(expectedBlob))).andReturn(channel);
         replay(storage);
    +    initializeBlob();
         assertSame(channel, blob.writer());
       }
     
       @Test
       public void testSignUrl() throws Exception {
    +    initializeExpectedBlob(2);
         URL url = new URL("http://localhost:123/bla");
    -    expect(storage.signUrl(BLOB_INFO, 100, TimeUnit.SECONDS)).andReturn(url);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.signUrl(expectedBlob, 100, TimeUnit.SECONDS)).andReturn(url);
         replay(storage);
    +    initializeBlob();
         assertEquals(url, blob.signUrl(100, TimeUnit.SECONDS));
       }
     
       @Test
    -  public void testGetNone() throws Exception {
    -    replay(storage);
    -    assertTrue(Blob.get(storage).isEmpty());
    -  }
    -
    -  @Test
    -  public void testGetSome() throws Exception {
    -    List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY);
    -    expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList);
    -    replay(storage);
    -    List result = Blob.get(storage, BLOB_ID_ARRAY);
    -    assertEquals(blobInfoList.size(), result.size());
    -    for (int i = 0; i < blobInfoList.size(); i++) {
    -      assertEquals(blobInfoList.get(i), result.get(i).info());
    -    }
    -  }
    -
    -  @Test
    -  public void testGetSomeNull() throws Exception {
    -    List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY[0], null, BLOB_INFO_ARRAY[2]);
    -    expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList);
    -    replay(storage);
    -    List result = Blob.get(storage, BLOB_ID_ARRAY);
    -    assertEquals(blobInfoList.size(), result.size());
    -    for (int i = 0; i < blobInfoList.size(); i++) {
    -      if (blobInfoList.get(i) != null) {
    -        assertEquals(blobInfoList.get(i), result.get(i).info());
    -      } else {
    -        assertNull(result.get(i));
    -      }
    -    }
    -  }
    -
    -  @Test
    -  public void testUpdateNone() throws Exception {
    -    replay(storage);
    -    assertTrue(Blob.update(storage).isEmpty());
    -  }
    -
    -  @Test
    -  public void testUpdateSome() throws Exception {
    -    List blobInfoList = Lists.newArrayListWithCapacity(BLOB_ID_ARRAY.length);
    -    for (BlobInfo info : BLOB_INFO_ARRAY) {
    -      blobInfoList.add(info.toBuilder().contentType("content").build());
    -    }
    -    expect(storage.update(BLOB_INFO_ARRAY)).andReturn(blobInfoList);
    -    replay(storage);
    -    List result = Blob.update(storage, BLOB_INFO_ARRAY);
    -    assertEquals(blobInfoList.size(), result.size());
    -    for (int i = 0; i < blobInfoList.size(); i++) {
    -      assertEquals(blobInfoList.get(i), result.get(i).info());
    -    }
    -  }
    -
    -  @Test
    -  public void testUpdateSomeNull() throws Exception {
    -    List blobInfoList = Arrays.asList(
    -        BLOB_INFO_ARRAY[0].toBuilder().contentType("content").build(), null,
    -        BLOB_INFO_ARRAY[2].toBuilder().contentType("content").build());
    -    expect(storage.update(BLOB_INFO_ARRAY)).andReturn(blobInfoList);
    -    replay(storage);
    -    List result = Blob.update(storage, BLOB_INFO_ARRAY);
    -    assertEquals(blobInfoList.size(), result.size());
    -    for (int i = 0; i < blobInfoList.size(); i++) {
    -      if (blobInfoList.get(i) != null) {
    -        assertEquals(blobInfoList.get(i), result.get(i).info());
    -      } else {
    -        assertNull(result.get(i));
    -      }
    -    }
    -  }
    -
    -  @Test
    -  public void testDeleteNone() throws Exception {
    -    replay(storage);
    -    assertTrue(Blob.delete(storage).isEmpty());
    -  }
    -
    -  @Test
    -  public void testDeleteSome() throws Exception {
    -    List deleleResultList = Arrays.asList(true, true, true);
    -    expect(storage.delete(BLOB_ID_ARRAY)).andReturn(deleleResultList);
    -    replay(storage);
    -    List result = Blob.delete(storage, BLOB_ID_ARRAY);
    -    assertEquals(deleleResultList.size(), result.size());
    -    for (int i = 0; i < deleleResultList.size(); i++) {
    -      assertEquals(deleleResultList.get(i), result.get(i));
    -    }
    -  }
    -
    -  @Test
    -  public void testLoadFromString() throws Exception {
    -    expect(storage.get(BLOB_INFO.blobId())).andReturn(BLOB_INFO);
    +  public void testToBuilder() {
    +    expect(storage.options()).andReturn(mockOptions).times(6);
         replay(storage);
    -    Blob loadedBlob = Blob.load(storage, BLOB_INFO.bucket(), BLOB_INFO.name());
    -    assertEquals(BLOB_INFO, loadedBlob.info());
    +    Blob fullBlob = new Blob(storage, new BlobInfo.BuilderImpl(FULL_BLOB_INFO));
    +    assertEquals(fullBlob, fullBlob.toBuilder().build());
    +    Blob simpleBlob = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO));
    +    assertEquals(simpleBlob, simpleBlob.toBuilder().build());
    +    Blob directory = new Blob(storage, new BlobInfo.BuilderImpl(DIRECTORY_INFO));
    +    assertEquals(directory, directory.toBuilder().build());
       }
     
       @Test
    -  public void testLoadFromId() throws Exception {
    -    expect(storage.get(BLOB_INFO.blobId())).andReturn(BLOB_INFO);
    +  public void testBuilder() {
    +    initializeExpectedBlob(4);
    +    expect(storage.options()).andReturn(mockOptions).times(4);
         replay(storage);
    -    Blob loadedBlob = Blob.load(storage, BLOB_INFO.blobId());
    -    assertEquals(BLOB_INFO, loadedBlob.info());
    +    Blob.Builder builder = new Blob.Builder(new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO)));
    +    Blob blob = builder.acl(ACL)
    +        .componentCount(COMPONENT_COUNT)
    +        .contentType(CONTENT_TYPE)
    +        .cacheControl(CACHE_CONTROL)
    +        .contentDisposition(CONTENT_DISPOSITION)
    +        .contentEncoding(CONTENT_ENCODING)
    +        .contentLanguage(CONTENT_LANGUAGE)
    +        .crc32c(CRC32)
    +        .deleteTime(DELETE_TIME)
    +        .etag(ETAG)
    +        .id(ID)
    +        .md5(MD5)
    +        .mediaLink(MEDIA_LINK)
    +        .metadata(METADATA)
    +        .metageneration(META_GENERATION)
    +        .owner(OWNER)
    +        .selfLink(SELF_LINK)
    +        .size(SIZE)
    +        .updateTime(UPDATE_TIME)
    +        .build();
    +    assertEquals("b", blob.bucket());
    +    assertEquals("n", blob.name());
    +    assertEquals(ACL, blob.acl());
    +    assertEquals(COMPONENT_COUNT, blob.componentCount());
    +    assertEquals(CONTENT_TYPE, blob.contentType());
    +    assertEquals(CACHE_CONTROL, blob.cacheControl());
    +    assertEquals(CONTENT_DISPOSITION, blob.contentDisposition());
    +    assertEquals(CONTENT_ENCODING, blob.contentEncoding());
    +    assertEquals(CONTENT_LANGUAGE, blob.contentLanguage());
    +    assertEquals(CRC32, blob.crc32c());
    +    assertEquals(DELETE_TIME, blob.deleteTime());
    +    assertEquals(ETAG, blob.etag());
    +    assertEquals(ID, blob.id());
    +    assertEquals(MD5, blob.md5());
    +    assertEquals(MEDIA_LINK, blob.mediaLink());
    +    assertEquals(METADATA, blob.metadata());
    +    assertEquals(META_GENERATION, blob.metageneration());
    +    assertEquals(OWNER, blob.owner());
    +    assertEquals(SELF_LINK, blob.selfLink());
    +    assertEquals(SIZE, blob.size());
    +    assertEquals(UPDATE_TIME, blob.updateTime());
    +    assertFalse(blob.isDirectory());
    +    builder = new Blob.Builder(new Blob(storage, new BlobInfo.BuilderImpl(DIRECTORY_INFO)));
    +    blob = builder.blobId(BlobId.of("b", "n/"))
    +        .isDirectory(true)
    +        .size(0L)
    +        .build();
    +    assertEquals("b", blob.bucket());
    +    assertEquals("n/", blob.name());
    +    assertNull(blob.acl());
    +    assertNull(blob.componentCount());
    +    assertNull(blob.contentType());
    +    assertNull(blob.cacheControl());
    +    assertNull(blob.contentDisposition());
    +    assertNull(blob.contentEncoding());
    +    assertNull(blob.contentLanguage());
    +    assertNull(blob.crc32c());
    +    assertNull(blob.deleteTime());
    +    assertNull(blob.etag());
    +    assertNull(blob.id());
    +    assertNull(blob.md5());
    +    assertNull(blob.mediaLink());
    +    assertNull(blob.metadata());
    +    assertNull(blob.metageneration());
    +    assertNull(blob.owner());
    +    assertNull(blob.selfLink());
    +    assertEquals(0L, (long) blob.size());
    +    assertNull(blob.updateTime());
    +    assertTrue(blob.isDirectory());
       }
     }
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java
    similarity index 79%
    rename from gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java
    rename to gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java
    index 6faa36173ab9..18ec64a9575f 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java
    @@ -32,8 +32,10 @@
     
     import com.google.common.collect.ImmutableMap;
     import com.google.gcloud.RestorableState;
    -import com.google.gcloud.spi.StorageRpc;
    -import com.google.gcloud.spi.StorageRpcFactory;
    +import com.google.gcloud.RetryParams;
    +import com.google.gcloud.WriteChannel;
    +import com.google.gcloud.storage.spi.StorageRpc;
    +import com.google.gcloud.storage.spi.StorageRpcFactory;
     
     import org.easymock.Capture;
     import org.easymock.CaptureType;
    @@ -47,7 +49,7 @@
     import java.util.Map;
     import java.util.Random;
     
    -public class BlobWriteChannelImplTest {
    +public class BlobWriteChannelTest {
     
       private static final String BUCKET_NAME = "b";
       private static final String BLOB_NAME = "n";
    @@ -62,10 +64,10 @@ public class BlobWriteChannelImplTest {
       private StorageOptions options;
       private StorageRpcFactory rpcFactoryMock;
       private StorageRpc storageRpcMock;
    -  private BlobWriteChannelImpl writer;
    +  private BlobWriteChannel writer;
     
       @Before
    -  public void setUp() throws IOException, InterruptedException {
    +  public void setUp() {
         rpcFactoryMock = createMock(StorageRpcFactory.class);
         storageRpcMock = createMock(StorageRpc.class);
         expect(rpcFactoryMock.create(anyObject(StorageOptions.class)))
    @@ -74,6 +76,7 @@ public void setUp() throws IOException, InterruptedException {
         options = StorageOptions.builder()
             .projectId("projectid")
             .serviceRpcFactory(rpcFactoryMock)
    +        .retryParams(RetryParams.noRetries())
             .build();
       }
     
    @@ -86,7 +89,7 @@ public void tearDown() throws Exception {
       public void testCreate() {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         assertTrue(writer.isOpen());
       }
     
    @@ -94,7 +97,7 @@ public void testCreate() {
       public void testWriteWithoutFlush() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)));
       }
     
    @@ -102,10 +105,10 @@ public void testWriteWithoutFlush() throws IOException {
       public void testWriteWithFlush() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(CUSTOM_CHUNK_SIZE), eq(false));
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L),
    +        eq(CUSTOM_CHUNK_SIZE), eq(false));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         writer.chunkSize(CUSTOM_CHUNK_SIZE);
         ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE);
         assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer));
    @@ -116,11 +119,10 @@ public void testWriteWithFlush() throws IOException {
       public void testWritesAndFlush() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(DEFAULT_CHUNK_SIZE),
    -        eq(false));
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L),
    +        eq(DEFAULT_CHUNK_SIZE), eq(false));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE];
         for (int i = 0; i < buffers.length; i++) {
           buffers[i] = randomBuffer(MIN_CHUNK_SIZE);
    @@ -138,10 +140,9 @@ public void testWritesAndFlush() throws IOException {
       public void testCloseWithoutFlush() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(0), eq(true));
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         assertTrue(writer.isOpen());
         writer.close();
         assertArrayEquals(new byte[0], capturedBuffer.getValue());
    @@ -153,11 +154,10 @@ public void testCloseWithFlush() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
         ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE);
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(MIN_CHUNK_SIZE),
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE),
             eq(true));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         assertTrue(writer.isOpen());
         writer.write(buffer);
         writer.close();
    @@ -170,10 +170,9 @@ public void testCloseWithFlush() throws IOException {
       public void testWriteClosed() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(0), eq(true));
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         writer.close();
         try {
           writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE));
    @@ -189,18 +188,17 @@ public void testSaveAndRestore() throws IOException {
         Capture capturedBuffer = Capture.newInstance(CaptureType.ALL);
         Capture capturedPosition = Capture.newInstance(CaptureType.ALL);
         storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), captureLong(capturedPosition),
    -        eq(DEFAULT_CHUNK_SIZE), eq(false));
    +        captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false));
         expectLastCall().times(2);
         replay(storageRpcMock);
         ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE);
         ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1));
         assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0));
         assertEquals(new Long(0L), capturedPosition.getValues().get(0));
    -    RestorableState writerState = writer.capture();
    -    BlobWriteChannel restoredWriter = writerState.restore();
    +    RestorableState writerState = writer.capture();
    +    WriteChannel restoredWriter = writerState.restore();
         assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2));
         assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1));
         assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1));
    @@ -210,33 +208,33 @@ public void testSaveAndRestore() throws IOException {
       public void testSaveAndRestoreClosed() throws IOException {
         expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID);
         Capture capturedBuffer = Capture.newInstance();
    -    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
    -        eq(BLOB_INFO.toPb()), eq(0L), eq(0), eq(true));
    +    storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
         writer.close();
    -    RestorableState writerState = writer.capture();
    -    RestorableState expectedWriterState =
    -        BlobWriteChannelImpl.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID)
    +    RestorableState writerState = writer.capture();
    +    RestorableState expectedWriterState =
    +        BlobWriteChannel.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID)
                 .buffer(null)
                 .chunkSize(DEFAULT_CHUNK_SIZE)
                 .isOpen(false)
                 .position(0)
                 .build();
    -    BlobWriteChannel restoredWriter = writerState.restore();
    +    WriteChannel restoredWriter = writerState.restore();
         assertArrayEquals(new byte[0], capturedBuffer.getValue());
         assertEquals(expectedWriterState, restoredWriter.capture());
       }
     
       @Test
       public void testStateEquals() {
    -    expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID)
    -        .times(2);
    +    expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2);
         replay(storageRpcMock);
    -    writer = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    -    BlobWriteChannel writer2 = new BlobWriteChannelImpl(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    -    RestorableState state = writer.capture();
    -    RestorableState state2 = writer2.capture();
    +    writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    // avoid closing when you don't want partial writes to GCS upon failure
    +    @SuppressWarnings("resource")
    +    WriteChannel writer2 = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS);
    +    RestorableState state = writer.capture();
    +    RestorableState state2 = writer2.capture();
         assertEquals(state, state2);
         assertEquals(state.hashCode(), state2.hashCode());
         assertEquals(state.toString(), state2.toString());
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java
    index 09ba0e8cda8e..bd6bcdbbcff2 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java
    @@ -18,7 +18,6 @@
     
     import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS;
     import static org.junit.Assert.assertEquals;
    -import static org.junit.Assert.assertSame;
     import static org.junit.Assert.assertTrue;
     
     import com.google.api.services.storage.model.Bucket.Lifecycle.Rule;
    @@ -31,10 +30,8 @@
     import com.google.gcloud.storage.BucketInfo.DeleteRule;
     import com.google.gcloud.storage.BucketInfo.DeleteRule.Type;
     import com.google.gcloud.storage.BucketInfo.IsLiveDeleteRule;
    -import com.google.gcloud.storage.BucketInfo.Location;
     import com.google.gcloud.storage.BucketInfo.NumNewerVersionsDeleteRule;
     import com.google.gcloud.storage.BucketInfo.RawDeleteRule;
    -import com.google.gcloud.storage.BucketInfo.StorageClass;
     
     import org.junit.Test;
     
    @@ -44,8 +41,8 @@
     public class BucketInfoTest {
     
       private static final List ACL = ImmutableList.of(
    -      new Acl(User.ofAllAuthenticatedUsers(), Role.READER),
    -      new Acl(new Project(VIEWERS, "p1"), Role.WRITER));
    +      Acl.of(User.ofAllAuthenticatedUsers(), Role.READER),
    +      Acl.of(new Project(VIEWERS, "p1"), Role.WRITER));
       private static final String ETAG = "0xFF00";
       private static final String ID = "B/N:1";
       private static final Long META_GENERATION = 10L;
    @@ -54,13 +51,13 @@ public class BucketInfoTest {
       private static final Long CREATE_TIME = System.currentTimeMillis();
       private static final List CORS = Collections.singletonList(Cors.builder().build());
       private static final List DEFAULT_ACL =
    -      Collections.singletonList(new Acl(User.ofAllAuthenticatedUsers(), Role.WRITER));
    +      Collections.singletonList(Acl.of(User.ofAllAuthenticatedUsers(), Role.WRITER));
       private static final List DELETE_RULES =
           Collections.singletonList(new AgeDeleteRule(5));
       private static final String INDEX_PAGE = "index.html";
       private static final String NOT_FOUND_PAGE = "error.html";
    -  private static final Location LOCATION = Location.asia();
    -  private static final StorageClass STORAGE_CLASS = StorageClass.standard();
    +  private static final String LOCATION = "ASIA";
    +  private static final String STORAGE_CLASS = "STANDARD";
       private static final Boolean VERSIONING_ENABLED = true;
       private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b")
           .acl(ACL)
    @@ -93,7 +90,7 @@ public void testToBuilder() {
       @Test
       public void testToBuilderIncomplete() {
         BucketInfo incompleteBucketInfo = BucketInfo.builder("b").build();
    -    assertEquals(incompleteBucketInfo.name(), incompleteBucketInfo.toBuilder().build().name());
    +    compareBuckets(incompleteBucketInfo, incompleteBucketInfo.toBuilder().build());
       }
     
       @Test
    @@ -149,16 +146,6 @@ private void compareBuckets(BucketInfo expected, BucketInfo value) {
         assertEquals(expected.versioningEnabled(), value.versioningEnabled());
       }
     
    -  @Test
    -  public void testLocation() {
    -    assertEquals("ASIA", Location.asia().value());
    -    assertEquals("EU", Location.eu().value());
    -    assertEquals("US", Location.us().value());
    -    assertSame(Location.asia(), Location.of("asia"));
    -    assertSame(Location.eu(), Location.of("EU"));
    -    assertSame(Location.us(), Location.of("uS"));
    -  }
    -
       @Test
       public void testDeleteRules() {
         AgeDeleteRule ageRule = new AgeDeleteRule(10);
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java
    index 370850a5b6d4..236411e0c2d8 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java
    @@ -16,23 +16,35 @@
     
     package com.google.gcloud.storage;
     
    +import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS;
    +import static com.google.gcloud.storage.Acl.Role.READER;
    +import static com.google.gcloud.storage.Acl.Role.WRITER;
     import static org.easymock.EasyMock.capture;
    +import static org.easymock.EasyMock.createMock;
     import static org.easymock.EasyMock.createStrictMock;
     import static org.easymock.EasyMock.expect;
     import static org.easymock.EasyMock.replay;
     import static org.easymock.EasyMock.verify;
     import static org.junit.Assert.assertEquals;
     import static org.junit.Assert.assertFalse;
    -import static org.junit.Assert.assertSame;
    +import static org.junit.Assert.assertNull;
     import static org.junit.Assert.assertTrue;
     
     import com.google.common.collect.ImmutableList;
    +import com.google.gcloud.Page;
    +import com.google.gcloud.PageImpl;
    +import com.google.gcloud.storage.Acl.Project;
    +import com.google.gcloud.storage.Acl.User;
     import com.google.gcloud.storage.BatchResponse.Result;
    +import com.google.gcloud.storage.BucketInfo.AgeDeleteRule;
    +import com.google.gcloud.storage.BucketInfo.DeleteRule;
     
     import org.easymock.Capture;
     import org.junit.After;
     import org.junit.Before;
    +import org.junit.Rule;
     import org.junit.Test;
    +import org.junit.rules.ExpectedException;
     
     import java.io.ByteArrayInputStream;
     import java.io.InputStream;
    @@ -44,20 +56,57 @@
     
     public class BucketTest {
     
    -  private static final BucketInfo BUCKET_INFO = BucketInfo.of("b");
    -  private static final Iterable BLOB_INFO_RESULTS = ImmutableList.of(
    -      BlobInfo.builder("b", "n1").build(),
    -      BlobInfo.builder("b", "n2").build(),
    -      BlobInfo.builder("b", "n3").build());
    +  private static final List ACL = ImmutableList.of(
    +      Acl.of(User.ofAllAuthenticatedUsers(), READER), Acl.of(new Project(VIEWERS, "p1"), WRITER));
    +  private static final String ETAG = "0xFF00";
    +  private static final String ID = "B/N:1";
    +  private static final Long META_GENERATION = 10L;
    +  private static final User OWNER = new User("user@gmail.com");
    +  private static final String SELF_LINK = "http://storage/b/n";
    +  private static final Long CREATE_TIME = System.currentTimeMillis();
    +  private static final List CORS = Collections.singletonList(Cors.builder().build());
    +  private static final List DEFAULT_ACL =
    +      Collections.singletonList(Acl.of(User.ofAllAuthenticatedUsers(), WRITER));
    +  private static final List DELETE_RULES =
    +      Collections.singletonList(new AgeDeleteRule(5));
    +  private static final String INDEX_PAGE = "index.html";
    +  private static final String NOT_FOUND_PAGE = "error.html";
    +  private static final String LOCATION = "ASIA";
    +  private static final String STORAGE_CLASS = "STANDARD";
    +  private static final Boolean VERSIONING_ENABLED = true;
    +  private static final BucketInfo FULL_BUCKET_INFO = BucketInfo.builder("b")
    +      .acl(ACL)
    +      .etag(ETAG)
    +      .id(ID)
    +      .metageneration(META_GENERATION)
    +      .owner(OWNER)
    +      .selfLink(SELF_LINK)
    +      .cors(CORS)
    +      .createTime(CREATE_TIME)
    +      .defaultAcl(DEFAULT_ACL)
    +      .deleteRules(DELETE_RULES)
    +      .indexPage(INDEX_PAGE)
    +      .notFoundPage(NOT_FOUND_PAGE)
    +      .location(LOCATION)
    +      .storageClass(STORAGE_CLASS)
    +      .versioningEnabled(VERSIONING_ENABLED)
    +      .build();
    +  private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b").metageneration(42L).build();
       private static final String CONTENT_TYPE = "text/plain";
     
       private Storage storage;
    +  private Storage serviceMockReturnsOptions = createMock(Storage.class);
    +  private StorageOptions mockOptions = createMock(StorageOptions.class);
       private Bucket bucket;
    +  private Bucket expectedBucket;
    +  private Iterable blobResults;
    +
    +  @Rule
    +  public ExpectedException thrown = ExpectedException.none();
     
       @Before
    -  public void setUp() throws Exception {
    +  public void setUp() {
         storage = createStrictMock(Storage.class);
    -    bucket = new Bucket(storage, BUCKET_INFO);
       }
     
       @After
    @@ -65,100 +114,165 @@ public void tearDown() throws Exception {
         verify(storage);
       }
     
    -  @Test
    -  public void testInfo() throws Exception {
    -    assertEquals(BUCKET_INFO, bucket.info());
    -    replay(storage);
    +  private void initializeExpectedBucket(int optionsCalls) {
    +    expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls);
    +    replay(serviceMockReturnsOptions);
    +    expectedBucket = new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(BUCKET_INFO));
    +    blobResults = ImmutableList.of(
    +        new Blob(serviceMockReturnsOptions,
    +            new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n1").build())),
    +        new Blob(serviceMockReturnsOptions,
    +            new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n2").build())),
    +        new Blob(serviceMockReturnsOptions,
    +            new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n3").build())));
    +  }
    +
    +  private void initializeBucket() {
    +    bucket = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO));
       }
     
       @Test
       public void testExists_True() throws Exception {
    -    expect(storage.get(BUCKET_INFO.name())).andReturn(BUCKET_INFO);
    +    initializeExpectedBucket(4);
    +    Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()};
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(expectedBucket);
         replay(storage);
    +    initializeBucket();
         assertTrue(bucket.exists());
       }
     
       @Test
       public void testExists_False() throws Exception {
    -    expect(storage.get(BUCKET_INFO.name())).andReturn(null);
    +    initializeExpectedBucket(4);
    +    Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()};
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(null);
         replay(storage);
    +    initializeBucket();
         assertFalse(bucket.exists());
       }
     
       @Test
       public void testReload() throws Exception {
    +    initializeExpectedBucket(5);
         BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build();
    -    expect(storage.get(updatedInfo.name())).andReturn(updatedInfo);
    +    Bucket expectedUpdatedBucket =
    +        new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(updatedInfo));
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(updatedInfo.name())).andReturn(expectedUpdatedBucket);
         replay(storage);
    +    initializeBucket();
         Bucket updatedBucket = bucket.reload();
    -    assertSame(storage, bucket.storage());
    -    assertEquals(updatedInfo, updatedBucket.info());
    +    assertEquals(expectedUpdatedBucket, updatedBucket);
       }
     
       @Test
    -  public void testUpdate() throws Exception {
    +  public void testReloadNull() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BUCKET_INFO.name())).andReturn(null);
    +    replay(storage);
    +    initializeBucket();
    +    assertNull(bucket.reload());
    +  }
    +
    +  @Test
    +  public void testReloadWithOptions() throws Exception {
    +    initializeExpectedBucket(5);
         BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build();
    -    expect(storage.update(updatedInfo)).andReturn(updatedInfo);
    +    Bucket expectedUpdatedBucket =
    +        new Bucket(serviceMockReturnsOptions, new BucketInfo.BuilderImpl(updatedInfo));
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(updatedInfo.name(), Storage.BucketGetOption.metagenerationMatch(42L)))
    +        .andReturn(expectedUpdatedBucket);
    +    replay(storage);
    +    initializeBucket();
    +    Bucket updatedBucket = bucket.reload(Bucket.BucketSourceOption.metagenerationMatch());
    +    assertEquals(expectedUpdatedBucket, updatedBucket);
    +  }
    +
    +  @Test
    +  public void testUpdate() throws Exception {
    +    initializeExpectedBucket(5);
    +    Bucket expectedUpdatedBucket = expectedBucket.toBuilder().notFoundPage("p").build();
    +    expect(storage.options()).andReturn(mockOptions).times(2);
    +    expect(storage.update(expectedUpdatedBucket)).andReturn(expectedUpdatedBucket);
         replay(storage);
    -    Bucket updatedBucket = bucket.update(updatedInfo);
    -    assertSame(storage, bucket.storage());
    -    assertEquals(updatedInfo, updatedBucket.info());
    +    initializeBucket();
    +    Bucket updatedBucket = new Bucket(storage, new BucketInfo.BuilderImpl(expectedUpdatedBucket));
    +    Bucket actualUpdatedBucket = updatedBucket.update();
    +    assertEquals(expectedUpdatedBucket, actualUpdatedBucket);
       }
     
       @Test
       public void testDelete() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
         expect(storage.delete(BUCKET_INFO.name())).andReturn(true);
         replay(storage);
    +    initializeBucket();
         assertTrue(bucket.delete());
       }
     
       @Test
       public void testList() throws Exception {
    -    BaseListResult blobInfoResult = new BaseListResult<>(null, "c", BLOB_INFO_RESULTS);
    -    expect(storage.list(BUCKET_INFO.name())).andReturn(blobInfoResult);
    +    initializeExpectedBucket(4);
    +    PageImpl expectedBlobPage = new PageImpl<>(null, "c", blobResults);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.list(BUCKET_INFO.name())).andReturn(expectedBlobPage);
         replay(storage);
    -    ListResult blobResult = bucket.list();
    -    Iterator blobInfoIterator = blobInfoResult.iterator();
    -    Iterator blobIterator = blobResult.iterator();
    +    initializeBucket();
    +    Page blobPage = bucket.list();
    +    Iterator blobInfoIterator = blobPage.values().iterator();
    +    Iterator blobIterator = blobPage.values().iterator();
         while (blobInfoIterator.hasNext() && blobIterator.hasNext()) {
    -      assertEquals(blobInfoIterator.next(), blobIterator.next().info());
    +      assertEquals(blobInfoIterator.next(), blobIterator.next());
         }
         assertFalse(blobInfoIterator.hasNext());
         assertFalse(blobIterator.hasNext());
    -    assertEquals(blobInfoResult.nextPageCursor(), blobResult.nextPageCursor());
    +    assertEquals(expectedBlobPage.nextPageCursor(), blobPage.nextPageCursor());
       }
     
       @Test
       public void testGet() throws Exception {
    -    BlobInfo info = BlobInfo.builder("b", "n").build();
    -    expect(storage.get(BlobId.of(bucket.info().name(), "n"), new Storage.BlobSourceOption[0]))
    -        .andReturn(info);
    +    initializeExpectedBucket(5);
    +    Blob expectedBlob = new Blob(
    +        serviceMockReturnsOptions, new BlobInfo.BuilderImpl(BlobInfo.builder("b", "n").build()));
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.get(BlobId.of(expectedBucket.name(), "n"), new Storage.BlobGetOption[0]))
    +        .andReturn(expectedBlob);
         replay(storage);
    +    initializeBucket();
         Blob blob = bucket.get("n");
    -    assertEquals(info, blob.info());
    +    assertEquals(expectedBlob, blob);
       }
     
       @Test
       public void testGetAll() throws Exception {
    +    initializeExpectedBucket(4);
         Capture capturedBatchRequest = Capture.newInstance();
    -    List> batchResultList = new LinkedList<>();
    -    for (BlobInfo info : BLOB_INFO_RESULTS) {
    +    List> batchResultList = new LinkedList<>();
    +    for (Blob info : blobResults) {
           batchResultList.add(new Result<>(info));
         }
    -    BatchResponse response =
    -        new BatchResponse(Collections.EMPTY_LIST, Collections.EMPTY_LIST, batchResultList);
    -    expect(storage.apply(capture(capturedBatchRequest))).andReturn(response);
    +    BatchResponse response = new BatchResponse(Collections.>emptyList(),
    +        Collections.>emptyList(), batchResultList);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.submit(capture(capturedBatchRequest))).andReturn(response);
    +    expect(storage.options()).andReturn(mockOptions).times(3);
         replay(storage);
    +    initializeBucket();
         List blobs = bucket.get("n1", "n2", "n3");
         Set blobInfoSet = capturedBatchRequest.getValue().toGet().keySet();
         assertEquals(batchResultList.size(), blobInfoSet.size());
    -    for (BlobInfo info : BLOB_INFO_RESULTS) {
    +    for (BlobInfo info : blobResults) {
           assertTrue(blobInfoSet.contains(info.blobId()));
         }
         Iterator blobIterator = blobs.iterator();
    -    Iterator> batchResultIterator = response.gets().iterator();
    +    Iterator> batchResultIterator = response.gets().iterator();
         while (batchResultIterator.hasNext() && blobIterator.hasNext()) {
    -      assertEquals(batchResultIterator.next().get(), blobIterator.next().info());
    +      assertEquals(batchResultIterator.next().get(), blobIterator.next());
         }
         assertFalse(batchResultIterator.hasNext());
         assertFalse(blobIterator.hasNext());
    @@ -166,51 +280,251 @@ public void testGetAll() throws Exception {
     
       @Test
       public void testCreate() throws Exception {
    +    initializeExpectedBucket(5);
         BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
         byte[] content = {0xD, 0xE, 0xA, 0xD};
    -    expect(storage.create(info, content)).andReturn(info);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, content)).andReturn(expectedBlob);
         replay(storage);
    +    initializeBucket();
         Blob blob = bucket.create("n", content, CONTENT_TYPE);
    -    assertEquals(info, blob.info());
    +    assertEquals(expectedBlob, blob);
       }
     
       @Test
       public void testCreateNullContentType() throws Exception {
    +    initializeExpectedBucket(5);
         BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
         byte[] content = {0xD, 0xE, 0xA, 0xD};
    -    expect(storage.create(info, content)).andReturn(info);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, content)).andReturn(expectedBlob);
         replay(storage);
    +    initializeBucket();
         Blob blob = bucket.create("n", content, null);
    -    assertEquals(info, blob.info());
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateWithOptions() throws Exception {
    +    initializeExpectedBucket(5);
    +    BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 42L))
    +        .contentType(CONTENT_TYPE)
    +        .metageneration(24L)
    +        .build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    Storage.PredefinedAcl acl = Storage.PredefinedAcl.ALL_AUTHENTICATED_USERS;
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, content, Storage.BlobTargetOption.generationMatch(),
    +        Storage.BlobTargetOption.metagenerationMatch(),
    +        Storage.BlobTargetOption.predefinedAcl(acl))).andReturn(expectedBlob);
    +    replay(storage);
    +    initializeBucket();
    +    Blob blob = bucket.create("n", content, CONTENT_TYPE,
    +        Bucket.BlobTargetOption.generationMatch(42L),
    +        Bucket.BlobTargetOption.metagenerationMatch(24L),
    +        Bucket.BlobTargetOption.predefinedAcl(acl));
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateNotExists() throws Exception {
    +    initializeExpectedBucket(5);
    +    BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 0L)).contentType(CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, content, Storage.BlobTargetOption.generationMatch()))
    +        .andReturn(expectedBlob);
    +    replay(storage);
    +    initializeBucket();
    +    Blob blob = bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.doesNotExist());
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateWithWrongGenerationOptions() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
    +    replay(storage);
    +    initializeBucket();
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    thrown.expect(IllegalArgumentException.class);
    +    thrown.expectMessage(
    +        "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided");
    +    bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.generationMatch(42L),
    +        Bucket.BlobTargetOption.generationNotMatch(24L));
    +  }
    +
    +  @Test
    +  public void testCreateWithWrongMetagenerationOptions() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
    +    replay(storage);
    +    initializeBucket();
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    thrown.expect(IllegalArgumentException.class);
    +    thrown.expectMessage(
    +        "metagenerationMatch and metagenerationNotMatch options can not be both provided");
    +    bucket.create("n", content, CONTENT_TYPE, Bucket.BlobTargetOption.metagenerationMatch(42L),
    +        Bucket.BlobTargetOption.metagenerationNotMatch(24L));
       }
     
       @Test
       public void testCreateFromStream() throws Exception {
    +    initializeExpectedBucket(5);
         BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
         byte[] content = {0xD, 0xE, 0xA, 0xD};
         InputStream streamContent = new ByteArrayInputStream(content);
    -    expect(storage.create(info, streamContent)).andReturn(info);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, streamContent)).andReturn(expectedBlob);
         replay(storage);
    +    initializeBucket();
         Blob blob = bucket.create("n", streamContent, CONTENT_TYPE);
    -    assertEquals(info, blob.info());
    +    assertEquals(expectedBlob, blob);
       }
     
       @Test
       public void testCreateFromStreamNullContentType() throws Exception {
    +    initializeExpectedBucket(5);
         BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
         byte[] content = {0xD, 0xE, 0xA, 0xD};
         InputStream streamContent = new ByteArrayInputStream(content);
    -    expect(storage.create(info, streamContent)).andReturn(info);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, streamContent)).andReturn(expectedBlob);
         replay(storage);
    +    initializeBucket();
         Blob blob = bucket.create("n", streamContent, null);
    -    assertEquals(info, blob.info());
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateFromStreamWithOptions() throws Exception {
    +    initializeExpectedBucket(5);
    +    BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 42L))
    +        .contentType(CONTENT_TYPE)
    +        .metageneration(24L)
    +        .crc32c("crc")
    +        .md5("md5")
    +        .build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    Storage.PredefinedAcl acl = Storage.PredefinedAcl.ALL_AUTHENTICATED_USERS;
    +    InputStream streamContent = new ByteArrayInputStream(content);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, streamContent, Storage.BlobWriteOption.generationMatch(),
    +        Storage.BlobWriteOption.metagenerationMatch(), Storage.BlobWriteOption.predefinedAcl(acl),
    +        Storage.BlobWriteOption.crc32cMatch(), Storage.BlobWriteOption.md5Match()))
    +        .andReturn(expectedBlob);
    +    replay(storage);
    +    initializeBucket();
    +    Blob blob = bucket.create("n", streamContent, CONTENT_TYPE,
    +        Bucket.BlobWriteOption.generationMatch(42L),
    +        Bucket.BlobWriteOption.metagenerationMatch(24L), Bucket.BlobWriteOption.predefinedAcl(acl),
    +        Bucket.BlobWriteOption.crc32cMatch("crc"), Bucket.BlobWriteOption.md5Match("md5"));
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateFromStreamNotExists() throws Exception {
    +    initializeExpectedBucket(5);
    +    BlobInfo info = BlobInfo.builder(BlobId.of("b", "n", 0L)).contentType(CONTENT_TYPE).build();
    +    Blob expectedBlob = new Blob(serviceMockReturnsOptions, new BlobInfo.BuilderImpl(info));
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    InputStream streamContent = new ByteArrayInputStream(content);
    +    expect(storage.options()).andReturn(mockOptions);
    +    expect(storage.create(info, streamContent, Storage.BlobWriteOption.generationMatch()))
    +        .andReturn(expectedBlob);
    +    replay(storage);
    +    initializeBucket();
    +    Blob blob =
    +        bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.doesNotExist());
    +    assertEquals(expectedBlob, blob);
    +  }
    +
    +  @Test
    +  public void testCreateFromStreamWithWrongGenerationOptions() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
    +    replay(storage);
    +    initializeBucket();
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    InputStream streamContent = new ByteArrayInputStream(content);
    +    thrown.expect(IllegalArgumentException.class);
    +    thrown.expectMessage(
    +        "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided");
    +    bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.generationMatch(42L),
    +        Bucket.BlobWriteOption.generationNotMatch(24L));
    +  }
    +
    +  @Test
    +  public void testCreateFromStreamWithWrongMetagenerationOptions() throws Exception {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions);
    +    replay(storage);
    +    initializeBucket();
    +    byte[] content = {0xD, 0xE, 0xA, 0xD};
    +    InputStream streamContent = new ByteArrayInputStream(content);
    +    thrown.expect(IllegalArgumentException.class);
    +    thrown.expectMessage(
    +        "metagenerationMatch and metagenerationNotMatch options can not be both provided");
    +    bucket.create("n", streamContent, CONTENT_TYPE, Bucket.BlobWriteOption.metagenerationMatch(42L),
    +        Bucket.BlobWriteOption.metagenerationNotMatch(24L));
    +  }
    +
    +  @Test
    +  public void testToBuilder() {
    +    expect(storage.options()).andReturn(mockOptions).times(4);
    +    replay(storage);
    +    Bucket fullBucket = new Bucket(storage, new BucketInfo.BuilderImpl(FULL_BUCKET_INFO));
    +    assertEquals(fullBucket, fullBucket.toBuilder().build());
    +    Bucket simpleBlob = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO));
    +    assertEquals(simpleBlob, simpleBlob.toBuilder().build());
       }
     
       @Test
    -  public void testLoad() throws Exception {
    -    expect(storage.get(BUCKET_INFO.name())).andReturn(BUCKET_INFO);
    +  public void testBuilder() {
    +    initializeExpectedBucket(4);
    +    expect(storage.options()).andReturn(mockOptions).times(4);
         replay(storage);
    -    Bucket loadedBucket = Bucket.load(storage, BUCKET_INFO.name());
    -    assertEquals(BUCKET_INFO, loadedBucket.info());
    +    Bucket.Builder builder =
    +        new Bucket.Builder(new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO)));
    +    Bucket bucket = builder.acl(ACL)
    +        .etag(ETAG)
    +        .id(ID)
    +        .metageneration(META_GENERATION)
    +        .owner(OWNER)
    +        .selfLink(SELF_LINK)
    +        .cors(CORS)
    +        .createTime(CREATE_TIME)
    +        .defaultAcl(DEFAULT_ACL)
    +        .deleteRules(DELETE_RULES)
    +        .indexPage(INDEX_PAGE)
    +        .notFoundPage(NOT_FOUND_PAGE)
    +        .location(LOCATION)
    +        .storageClass(STORAGE_CLASS)
    +        .versioningEnabled(VERSIONING_ENABLED)
    +        .build();
    +    assertEquals("b", bucket.name());
    +    assertEquals(ACL, bucket.acl());
    +    assertEquals(ETAG, bucket.etag());
    +    assertEquals(ID, bucket.id());
    +    assertEquals(META_GENERATION, bucket.metageneration());
    +    assertEquals(OWNER, bucket.owner());
    +    assertEquals(SELF_LINK, bucket.selfLink());
    +    assertEquals(CREATE_TIME, bucket.createTime());
    +    assertEquals(CORS, bucket.cors());
    +    assertEquals(DEFAULT_ACL, bucket.defaultAcl());
    +    assertEquals(DELETE_RULES, bucket.deleteRules());
    +    assertEquals(INDEX_PAGE, bucket.indexPage());
    +    assertEquals(NOT_FOUND_PAGE, bucket.notFoundPage());
    +    assertEquals(LOCATION, bucket.location());
    +    assertEquals(STORAGE_CLASS, bucket.storageClass());
    +    assertEquals(VERSIONING_ENABLED, bucket.versioningEnabled());
    +    assertEquals(storage.options(), bucket.storage().options());
       }
     }
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java
    index 0fcdb744c244..ad4a04c34127 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java
    @@ -21,22 +21,22 @@
     import static org.easymock.EasyMock.expect;
     import static org.easymock.EasyMock.replay;
     import static org.easymock.EasyMock.verify;
    -import static org.junit.Assert.assertTrue;
     import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertTrue;
     
     import com.google.common.collect.ImmutableMap;
     import com.google.gcloud.RestorableState;
    -import com.google.gcloud.spi.StorageRpc;
    -import com.google.gcloud.spi.StorageRpc.RewriteRequest;
    -import com.google.gcloud.spi.StorageRpc.RewriteResponse;
    -import com.google.gcloud.spi.StorageRpcFactory;
    +import com.google.gcloud.RetryParams;
    +import com.google.gcloud.storage.spi.StorageRpc;
    +import com.google.gcloud.storage.spi.StorageRpc.RewriteRequest;
    +import com.google.gcloud.storage.spi.StorageRpc.RewriteResponse;
    +import com.google.gcloud.storage.spi.StorageRpcFactory;
     
     import org.easymock.EasyMock;
     import org.junit.After;
    -import org.junit.Test;
     import org.junit.Before;
    +import org.junit.Test;
     
    -import java.io.IOException;
     import java.util.Map;
     
     public class CopyWriterTest {
    @@ -64,7 +64,7 @@ public class CopyWriterTest {
       private CopyWriter copyWriter;
     
       @Before
    -  public void setUp() throws IOException, InterruptedException {
    +  public void setUp() {
         rpcFactoryMock = createMock(StorageRpcFactory.class);
         storageRpcMock = createMock(StorageRpc.class);
         expect(rpcFactoryMock.create(anyObject(StorageOptions.class)))
    @@ -73,6 +73,7 @@ public void setUp() throws IOException, InterruptedException {
         options = StorageOptions.builder()
             .projectId("projectid")
             .serviceRpcFactory(rpcFactoryMock)
    +        .retryParams(RetryParams.noRetries())
             .build();
       }
     
    @@ -105,7 +106,7 @@ public void testRewriteMultipleRequests() {
       }
     
       @Test
    -  public void testSaveAndRestore() throws IOException {
    +  public void testSaveAndRestore() {
         EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE);
         EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE);
         EasyMock.replay(storageRpcMock);
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java
    deleted file mode 100644
    index 3aad7b712e48..000000000000
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java
    +++ /dev/null
    @@ -1,674 +0,0 @@
    -/*
    - * Copyright 2015 Google Inc. All Rights Reserved.
    - *
    - * Licensed under the Apache License, Version 2.0 (the "License");
    - * you may not use this file except in compliance with the License.
    - * You may obtain a copy of the License at
    - *
    - *       http://www.apache.org/licenses/LICENSE-2.0
    - *
    - * Unless required by applicable law or agreed to in writing, software
    - * distributed under the License is distributed on an "AS IS" BASIS,
    - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    - * See the License for the specific language governing permissions and
    - * limitations under the License.
    - */
    -
    -package com.google.gcloud.storage;
    -
    -import static java.nio.charset.StandardCharsets.UTF_8;
    -import static org.junit.Assert.assertArrayEquals;
    -import static org.junit.Assert.assertEquals;
    -import static org.junit.Assert.assertNotNull;
    -import static org.junit.Assert.assertNull;
    -import static org.junit.Assert.assertTrue;
    -import static org.junit.Assert.fail;
    -
    -import com.google.common.collect.ImmutableList;
    -import com.google.common.collect.ImmutableMap;
    -import com.google.gcloud.RestorableState;
    -import com.google.gcloud.storage.testing.RemoteGcsHelper;
    -
    -import org.junit.AfterClass;
    -import org.junit.BeforeClass;
    -import org.junit.Test;
    -
    -import java.io.ByteArrayInputStream;
    -import java.io.IOException;
    -import java.io.InputStream;
    -import java.io.UnsupportedEncodingException;
    -import java.net.URL;
    -import java.net.URLConnection;
    -import java.nio.ByteBuffer;
    -import java.util.Arrays;
    -import java.util.HashMap;
    -import java.util.Iterator;
    -import java.util.List;
    -import java.util.Map;
    -import java.util.concurrent.ExecutionException;
    -import java.util.concurrent.TimeUnit;
    -import java.util.concurrent.TimeoutException;
    -import java.util.logging.Level;
    -import java.util.logging.Logger;
    -
    -public class ITStorageTest {
    -
    -  private static Storage storage;
    -
    -  private static final Logger log = Logger.getLogger(ITStorageTest.class.getName());
    -  private static final String BUCKET = RemoteGcsHelper.generateBucketName();
    -  private static final String CONTENT_TYPE = "text/plain";
    -  private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD};
    -  private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!";
    -
    -  @BeforeClass
    -  public static void beforeClass() {
    -    RemoteGcsHelper gcsHelper = RemoteGcsHelper.create();
    -    storage = gcsHelper.options().service();
    -    storage.create(BucketInfo.of(BUCKET));
    -  }
    -
    -  @AfterClass
    -  public static void afterClass()
    -      throws ExecutionException, TimeoutException, InterruptedException {
    -    if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 5, TimeUnit.SECONDS)) {
    -      if (log.isLoggable(Level.WARNING)) {
    -        log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET);
    -      }
    -    }
    -  }
    -
    -  @Test(timeout = 5000)
    -  public void testListBuckets() throws InterruptedException {
    -    Iterator bucketIterator =
    -        storage.list(Storage.BucketListOption.prefix(BUCKET)).iterator();
    -    while (!bucketIterator.hasNext()) {
    -      Thread.sleep(500);
    -      bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET)).iterator();
    -    }
    -    while (bucketIterator.hasNext()) {
    -      assertTrue(bucketIterator.next().name().startsWith(BUCKET));
    -    }
    -  }
    -
    -  @Test
    -  public void testCreateBlob() {
    -    String blobName = "test-create-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    BlobInfo remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
    -    assertNotNull(remoteBlob);
    -    assertEquals(blob.blobId(), remoteBlob.blobId());
    -    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    -    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes);
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testCreateEmptyBlob() {
    -    String blobName = "test-create-empty-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    BlobInfo remoteBlob = storage.create(blob);
    -    assertNotNull(remoteBlob);
    -    assertEquals(blob.blobId(), remoteBlob.blobId());
    -    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    -    assertArrayEquals(new byte[0], readBytes);
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testCreateBlobStream() throws UnsupportedEncodingException {
    -    String blobName = "test-create-blob-stream";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build();
    -    ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
    -    BlobInfo remoteBlob = storage.create(blob, stream);
    -    assertNotNull(remoteBlob);
    -    assertEquals(blob.blobId(), remoteBlob.blobId());
    -    assertEquals(blob.contentType(), remoteBlob.contentType());
    -    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    -    assertEquals(BLOB_STRING_CONTENT, new String(readBytes, UTF_8));
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testCreateBlobFail() {
    -    String blobName = "test-create-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    try {
    -      storage.create(blob.toBuilder().generation(-1L).build(), BLOB_BYTE_CONTENT,
    -          Storage.BlobTargetOption.generationMatch());
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testCreateBlobMd5Fail() throws UnsupportedEncodingException {
    -    String blobName = "test-create-blob-md5-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    -        .contentType(CONTENT_TYPE)
    -        .md5("O1R4G1HJSDUISJjoIYmVhQ==")
    -        .build();
    -    ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
    -    try {
    -      storage.create(blob, stream, Storage.BlobWriteOption.md5Match());
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -  }
    -
    -  @Test
    -  public void testUpdateBlob() {
    -    String blobName = "test-update-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    BlobInfo updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build());
    -    assertNotNull(updatedBlob);
    -    assertEquals(blob.blobId(), updatedBlob.blobId());
    -    assertEquals(CONTENT_TYPE, updatedBlob.contentType());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobReplaceMetadata() {
    -    String blobName = "test-update-blob-replace-metadata";
    -    ImmutableMap metadata = ImmutableMap.of("k1", "a");
    -    ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    -        .contentType(CONTENT_TYPE)
    -        .metadata(metadata)
    -        .build();
    -    assertNotNull(storage.create(blob));
    -    BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(null).build());
    -    assertNotNull(updatedBlob);
    -    assertNull(updatedBlob.metadata());
    -    updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build());
    -    assertEquals(blob.blobId(), updatedBlob.blobId());
    -    assertEquals(newMetadata, updatedBlob.metadata());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobMergeMetadata() {
    -    String blobName = "test-update-blob-merge-metadata";
    -    ImmutableMap metadata = ImmutableMap.of("k1", "a");
    -    ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
    -    ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a", "k2", "b");
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    -        .contentType(CONTENT_TYPE)
    -        .metadata(metadata)
    -        .build();
    -    assertNotNull(storage.create(blob));
    -    BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build());
    -    assertNotNull(updatedBlob);
    -    assertEquals(blob.blobId(), updatedBlob.blobId());
    -    assertEquals(expectedMetadata, updatedBlob.metadata());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobUnsetMetadata() {
    -    String blobName = "test-update-blob-unset-metadata";
    -    ImmutableMap metadata = ImmutableMap.of("k1", "a", "k2", "b");
    -    Map newMetadata = new HashMap<>();
    -    newMetadata.put("k1", "a");
    -    newMetadata.put("k2", null);
    -    ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a");
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    -        .contentType(CONTENT_TYPE)
    -        .metadata(metadata)
    -        .build();
    -    assertNotNull(storage.create(blob));
    -    BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build());
    -    assertNotNull(updatedBlob);
    -    assertEquals(blob.blobId(), updatedBlob.blobId());
    -    assertEquals(expectedMetadata, updatedBlob.metadata());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobFail() {
    -    String blobName = "test-update-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    try {
    -      storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(-1L).build(),
    -          Storage.BlobTargetOption.generationMatch());
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testDeleteNonExistingBlob() {
    -    String blobName = "test-delete-non-existing-blob";
    -    assertTrue(!storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testDeleteBlobFail() {
    -    String blobName = "test-delete-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    try {
    -      storage.delete(BUCKET, blob.name(), Storage.BlobSourceOption.generationMatch(-1L));
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, blob.name()));
    -  }
    -
    -  @Test
    -  public void testComposeBlob() {
    -    String sourceBlobName1 = "test-compose-blob-source-1";
    -    String sourceBlobName2 = "test-compose-blob-source-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1, BLOB_BYTE_CONTENT));
    -    assertNotNull(storage.create(sourceBlob2, BLOB_BYTE_CONTENT));
    -    String targetBlobName = "test-compose-blob-target";
    -    BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build();
    -    Storage.ComposeRequest req =
    -        Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob);
    -    BlobInfo remoteBlob = storage.compose(req);
    -    assertNotNull(remoteBlob);
    -    assertEquals(targetBlob.blobId(), remoteBlob.blobId());
    -    byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName);
    -    byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2);
    -    System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length,
    -        BLOB_BYTE_CONTENT.length);
    -    assertArrayEquals(composedBytes, readBytes);
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName2));
    -    assertTrue(storage.delete(BUCKET, targetBlobName));
    -  }
    -
    -  @Test
    -  public void testComposeBlobFail() {
    -    String sourceBlobName1 = "test-compose-blob-fail-source-1";
    -    String sourceBlobName2 = "test-compose-blob-fail-source-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    assertNotNull(storage.create(sourceBlob2));
    -    String targetBlobName = "test-compose-blob-fail-target";
    -    BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build();
    -    Storage.ComposeRequest req = Storage.ComposeRequest.builder()
    -        .addSource(sourceBlobName1, -1L)
    -        .addSource(sourceBlobName2, -1L)
    -        .target(targetBlob)
    -        .build();
    -    try {
    -      storage.compose(req);
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName2));
    -  }
    -
    -  @Test
    -  public void testCopyBlob() {
    -    String sourceBlobName = "test-copy-blob-source";
    -    BlobId source = BlobId.of(BUCKET, sourceBlobName);
    -    ImmutableMap metadata = ImmutableMap.of("k", "v");
    -    BlobInfo blob = BlobInfo.builder(source)
    -        .contentType(CONTENT_TYPE)
    -        .metadata(metadata)
    -        .build();
    -    assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT));
    -    String targetBlobName = "test-copy-blob-target";
    -    Storage.CopyRequest req = Storage.CopyRequest.of(source, BlobId.of(BUCKET, targetBlobName));
    -    CopyWriter copyWriter = storage.copy(req);
    -    assertEquals(BUCKET, copyWriter.result().bucket());
    -    assertEquals(targetBlobName, copyWriter.result().name());
    -    assertEquals(CONTENT_TYPE, copyWriter.result().contentType());
    -    assertEquals(metadata, copyWriter.result().metadata());
    -    assertTrue(copyWriter.isDone());
    -    assertTrue(storage.delete(BUCKET, sourceBlobName));
    -    assertTrue(storage.delete(BUCKET, targetBlobName));
    -  }
    -
    -  @Test
    -  public void testCopyBlobUpdateMetadata() {
    -    String sourceBlobName = "test-copy-blob-update-metadata-source";
    -    BlobId source = BlobId.of(BUCKET, sourceBlobName);
    -    assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT));
    -    String targetBlobName = "test-copy-blob-update-metadata-target";
    -    ImmutableMap metadata = ImmutableMap.of("k", "v");
    -    BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName)
    -        .contentType(CONTENT_TYPE)
    -        .metadata(metadata)
    -        .build();
    -    Storage.CopyRequest req = Storage.CopyRequest.of(source, target);
    -    CopyWriter copyWriter = storage.copy(req);
    -    assertEquals(BUCKET, copyWriter.result().bucket());
    -    assertEquals(targetBlobName, copyWriter.result().name());
    -    assertEquals(CONTENT_TYPE, copyWriter.result().contentType());
    -    assertEquals(metadata, copyWriter.result().metadata());
    -    assertTrue(copyWriter.isDone());
    -    assertTrue(storage.delete(BUCKET, sourceBlobName));
    -    assertTrue(storage.delete(BUCKET, targetBlobName));
    -  }
    -
    -  @Test
    -  public void testCopyBlobFail() {
    -    String sourceBlobName = "test-copy-blob-source-fail";
    -    BlobId source = BlobId.of(BUCKET, sourceBlobName);
    -    assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT));
    -    String targetBlobName = "test-copy-blob-target-fail";
    -    BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build();
    -    Storage.CopyRequest req = Storage.CopyRequest.builder()
    -        .source(source)
    -        .sourceOptions(Storage.BlobSourceOption.generationMatch(-1L))
    -        .target(target)
    -        .build();
    -    try {
    -      storage.copy(req);
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, sourceBlobName));
    -  }
    -
    -  @Test
    -  public void testBatchRequest() {
    -    String sourceBlobName1 = "test-batch-request-blob-1";
    -    String sourceBlobName2 = "test-batch-request-blob-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    assertNotNull(storage.create(sourceBlob2));
    -
    -    // Batch update request
    -    BlobInfo updatedBlob1 = sourceBlob1.toBuilder().contentType(CONTENT_TYPE).build();
    -    BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build();
    -    BatchRequest updateRequest = BatchRequest.builder()
    -        .update(updatedBlob1)
    -        .update(updatedBlob2)
    -        .build();
    -    BatchResponse updateResponse = storage.apply(updateRequest);
    -    assertEquals(2, updateResponse.updates().size());
    -    assertEquals(0, updateResponse.deletes().size());
    -    assertEquals(0, updateResponse.gets().size());
    -    BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get();
    -    BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get();
    -    assertEquals(sourceBlob1.blobId(), remoteUpdatedBlob1.blobId());
    -    assertEquals(sourceBlob2.blobId(), remoteUpdatedBlob2.blobId());
    -    assertEquals(updatedBlob1.contentType(), remoteUpdatedBlob1.contentType());
    -    assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType());
    -
    -    // Batch get request
    -    BatchRequest getRequest = BatchRequest.builder()
    -        .get(BUCKET, sourceBlobName1)
    -        .get(BUCKET, sourceBlobName2)
    -        .build();
    -    BatchResponse getResponse = storage.apply(getRequest);
    -    assertEquals(2, getResponse.gets().size());
    -    assertEquals(0, getResponse.deletes().size());
    -    assertEquals(0, getResponse.updates().size());
    -    BlobInfo remoteBlob1 = getResponse.gets().get(0).get();
    -    BlobInfo remoteBlob2 = getResponse.gets().get(1).get();
    -    assertEquals(remoteUpdatedBlob1, remoteBlob1);
    -    assertEquals(remoteUpdatedBlob2, remoteBlob2);
    -
    -    // Batch delete request
    -    BatchRequest deleteRequest = BatchRequest.builder()
    -        .delete(BUCKET, sourceBlobName1)
    -        .delete(BUCKET, sourceBlobName2)
    -        .build();
    -    BatchResponse deleteResponse = storage.apply(deleteRequest);
    -    assertEquals(2, deleteResponse.deletes().size());
    -    assertEquals(0, deleteResponse.gets().size());
    -    assertEquals(0, deleteResponse.updates().size());
    -    assertTrue(deleteResponse.deletes().get(0).get());
    -    assertTrue(deleteResponse.deletes().get(1).get());
    -  }
    -
    -  @Test
    -  public void testBatchRequestFail() {
    -    String blobName = "test-batch-request-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    BlobInfo updatedBlob = blob.toBuilder().generation(-1L).build();
    -    BatchRequest batchRequest = BatchRequest.builder()
    -        .update(updatedBlob, Storage.BlobTargetOption.generationMatch())
    -        .delete(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L))
    -        .get(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L))
    -        .build();
    -    BatchResponse updateResponse = storage.apply(batchRequest);
    -    assertEquals(1, updateResponse.updates().size());
    -    assertEquals(1, updateResponse.deletes().size());
    -    assertEquals(1, updateResponse.gets().size());
    -    assertTrue(updateResponse.updates().get(0).failed());
    -    assertTrue(updateResponse.gets().get(0).failed());
    -    assertTrue(updateResponse.deletes().get(0).failed());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testReadAndWriteChannels() throws IOException {
    -    String blobName = "test-read-and-write-channels-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    byte[] stringBytes;
    -    try (BlobWriteChannel writer = storage.writer(blob)) {
    -      stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    -      writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
    -      writer.write(ByteBuffer.wrap(stringBytes));
    -    }
    -    ByteBuffer readBytes;
    -    ByteBuffer readStringBytes;
    -    try (BlobReadChannel reader = storage.reader(blob.blobId())) {
    -      readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length);
    -      readStringBytes = ByteBuffer.allocate(stringBytes.length);
    -      reader.read(readBytes);
    -      reader.read(readStringBytes);
    -    }
    -    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array());
    -    assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8));
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testReadAndWriteCaptureChannels() throws IOException {
    -    String blobName = "test-read-and-write-capture-channels-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    byte[] stringBytes;
    -    BlobWriteChannel writer = storage.writer(blob);
    -    stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    -    writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
    -    RestorableState writerState = writer.capture();
    -    BlobWriteChannel secondWriter = writerState.restore();
    -    secondWriter.write(ByteBuffer.wrap(stringBytes));
    -    secondWriter.close();
    -    ByteBuffer readBytes;
    -    ByteBuffer readStringBytes;
    -    BlobReadChannel reader = storage.reader(blob.blobId());
    -    reader.chunkSize(BLOB_BYTE_CONTENT.length);
    -    readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length);
    -    reader.read(readBytes);
    -    RestorableState readerState = reader.capture();
    -    BlobReadChannel secondReader = readerState.restore();
    -    readStringBytes = ByteBuffer.allocate(stringBytes.length);
    -    secondReader.read(readStringBytes);
    -    reader.close();
    -    secondReader.close();
    -    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array());
    -    assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8));
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testReadChannelFail() throws IOException {
    -    String blobName = "test-read-channel-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    try (BlobReadChannel reader =
    -        storage.reader(blob.blobId(), Storage.BlobSourceOption.metagenerationMatch(-1L))) {
    -      reader.read(ByteBuffer.allocate(42));
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testWriteChannelFail() throws IOException {
    -    String blobName = "test-write-channel-blob-fail";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).generation(-1L).build();
    -    try {
    -      try (BlobWriteChannel writer =
    -          storage.writer(blob, Storage.BlobWriteOption.generationMatch())) {
    -        writer.write(ByteBuffer.allocate(42));
    -      }
    -      fail("StorageException was expected");
    -    } catch (StorageException ex) {
    -      // expected
    -    }
    -  }
    -
    -  @Test
    -  public void testWriteChannelExistingBlob() throws IOException {
    -    String blobName = "test-write-channel-existing-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    BlobInfo remoteBlob = storage.create(blob);
    -    byte[] stringBytes;
    -    try (BlobWriteChannel writer = storage.writer(remoteBlob)) {
    -      stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    -      writer.write(ByteBuffer.wrap(stringBytes));
    -    }
    -    assertArrayEquals(stringBytes, storage.readAllBytes(blob.blobId()));
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testGetSignedUrl() throws IOException {
    -    String blobName = "test-get-signed-url-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT));
    -    URL url = storage.signUrl(blob, 1, TimeUnit.HOURS);
    -    URLConnection connection = url.openConnection();
    -    byte[] readBytes = new byte[BLOB_BYTE_CONTENT.length];
    -    try (InputStream responseStream = connection.getInputStream()) {
    -      assertEquals(BLOB_BYTE_CONTENT.length, responseStream.read(readBytes));
    -      assertArrayEquals(BLOB_BYTE_CONTENT, readBytes);
    -      assertTrue(storage.delete(BUCKET, blobName));
    -    }
    -  }
    -
    -  @Test
    -  public void testPostSignedUrl() throws IOException {
    -    String blobName = "test-post-signed-url-blob";
    -    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    -    assertNotNull(storage.create(blob));
    -    URL url =
    -        storage.signUrl(blob, 1, TimeUnit.HOURS, Storage.SignUrlOption.httpMethod(HttpMethod.POST));
    -    URLConnection connection = url.openConnection();
    -    connection.setDoOutput(true);
    -    connection.connect();
    -    BlobInfo remoteBlob = storage.get(BUCKET, blobName);
    -    assertNotNull(remoteBlob);
    -    assertEquals(blob.blobId(), remoteBlob.blobId());
    -    assertTrue(storage.delete(BUCKET, blobName));
    -  }
    -
    -  @Test
    -  public void testGetBlobs() {
    -    String sourceBlobName1 = "test-get-blobs-1";
    -    String sourceBlobName2 = "test-get-blobs-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    assertNotNull(storage.create(sourceBlob2));
    -    List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId());
    -    assertEquals(sourceBlob1.blobId(), remoteBlobs.get(0).blobId());
    -    assertEquals(sourceBlob2.blobId(), remoteBlobs.get(1).blobId());
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName2));
    -  }
    -
    -  @Test
    -  public void testGetBlobsFail() {
    -    String sourceBlobName1 = "test-get-blobs-fail-1";
    -    String sourceBlobName2 = "test-get-blobs-fail-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId());
    -    assertEquals(sourceBlob1.blobId(), remoteBlobs.get(0).blobId());
    -    assertNull(remoteBlobs.get(1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -  }
    -
    -  @Test
    -  public void testDeleteBlobs() {
    -    String sourceBlobName1 = "test-delete-blobs-1";
    -    String sourceBlobName2 = "test-delete-blobs-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    assertNotNull(storage.create(sourceBlob2));
    -    List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId());
    -    assertTrue(deleteStatus.get(0));
    -    assertTrue(deleteStatus.get(1));
    -  }
    -
    -  @Test
    -  public void testDeleteBlobsFail() {
    -    String sourceBlobName1 = "test-delete-blobs-fail-1";
    -    String sourceBlobName2 = "test-delete-blobs-fail-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    assertNotNull(storage.create(sourceBlob1));
    -    List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId());
    -    assertTrue(deleteStatus.get(0));
    -    assertTrue(!deleteStatus.get(1));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobs() {
    -    String sourceBlobName1 = "test-update-blobs-1";
    -    String sourceBlobName2 = "test-update-blobs-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    BlobInfo remoteBlob1 = storage.create(sourceBlob1);
    -    BlobInfo remoteBlob2 = storage.create(sourceBlob2);
    -    assertNotNull(remoteBlob1);
    -    assertNotNull(remoteBlob2);
    -    List updatedBlobs = storage.update(
    -        remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(),
    -        remoteBlob2.toBuilder().contentType(CONTENT_TYPE).build());
    -    assertEquals(sourceBlob1.blobId(), updatedBlobs.get(0).blobId());
    -    assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType());
    -    assertEquals(sourceBlob2.blobId(), updatedBlobs.get(1).blobId());
    -    assertEquals(CONTENT_TYPE, updatedBlobs.get(1).contentType());
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName2));
    -  }
    -
    -  @Test
    -  public void testUpdateBlobsFail() {
    -    String sourceBlobName1 = "test-update-blobs-fail-1";
    -    String sourceBlobName2 = "test-update-blobs-fail-2";
    -    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    -    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    -    BlobInfo remoteBlob1 = storage.create(sourceBlob1);
    -    assertNotNull(remoteBlob1);
    -    List updatedBlobs = storage.update(
    -        remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(),
    -        sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build());
    -    assertEquals(sourceBlob1.blobId(), updatedBlobs.get(0).blobId());
    -    assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType());
    -    assertNull(updatedBlobs.get(1));
    -    assertTrue(storage.delete(BUCKET, sourceBlobName1));
    -  }
    -}
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java
    index 2703ddb401c5..5924174ab138 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java
    @@ -18,7 +18,7 @@
     
     import static org.junit.Assert.assertEquals;
     
    -import com.google.gcloud.spi.StorageRpc;
    +import com.google.gcloud.storage.spi.StorageRpc;
     
     import org.junit.Test;
     
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java
    index 329767e85d4a..146922a9dae9 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java
    @@ -20,21 +20,20 @@
     import static org.junit.Assert.assertTrue;
     
     import com.google.common.collect.ImmutableList;
    +import com.google.gcloud.Page;
    +import com.google.gcloud.storage.Storage.BlobListOption;
     import com.google.gcloud.storage.testing.RemoteGcsHelper;
     
     import org.easymock.EasyMock;
    -import org.junit.BeforeClass;
    +import org.junit.Before;
     import org.junit.Rule;
     import org.junit.Test;
     import org.junit.rules.ExpectedException;
     
     import java.io.ByteArrayInputStream;
     import java.io.InputStream;
    -import java.nio.file.Files;
    -import java.nio.file.Paths;
     import java.util.Iterator;
     import java.util.List;
    -import java.util.UUID;
     import java.util.concurrent.ExecutionException;
     import java.util.concurrent.TimeUnit;
     
    @@ -69,46 +68,60 @@ public class RemoteGcsHelperTest {
           + "  \"type\": \"service_account\"\n"
           + "}";
       private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes());
    -  private static final List BLOB_LIST = ImmutableList.of(
    -      BlobInfo.builder(BUCKET_NAME, "n1").build(),
    -      BlobInfo.builder(BUCKET_NAME, "n2").build());
    -  private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, "", true);
    -  private static final StorageException FATAL_EXCEPTION = new StorageException(500, "", false);
    -  private static final ListResult BLOB_LIST_RESULT = new ListResult() {
    -
    -    @Override
    -    public String nextPageCursor() {
    -      return "listResult";
    -    }
    -
    -    @Override
    -    public ListResult nextPage() {
    -      return null;
    -    }
    +  private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, "");
    +  private static final StorageException FATAL_EXCEPTION = new StorageException(500, "");
     
    -    @Override
    -    public Iterator iterator() {
    -      return BLOB_LIST.iterator();
    -    }
    -  };
    -  private static String keyPath = "/does/not/exist/key." + UUID.randomUUID().toString() + ".json";
    +  private static Storage serviceMockReturnsOptions;
    +  private List blobList;
    +  private Page blobPage;
     
       @Rule
       public ExpectedException thrown = ExpectedException.none();
     
    -  @BeforeClass
    -  public static void beforeClass() {
    -    while (Files.exists(Paths.get(JSON_KEY))) {
    -      keyPath = "/does/not/exist/key." + UUID.randomUUID().toString() + ".json";
    -    }
    +  @Before
    +  public void setUp() {
    +    serviceMockReturnsOptions = EasyMock.createMock(Storage.class);
    +    EasyMock.expect(serviceMockReturnsOptions.options())
    +        .andReturn(EasyMock.createMock(StorageOptions.class))
    +        .times(2);
    +    EasyMock.replay(serviceMockReturnsOptions);
    +    blobList = ImmutableList.of(
    +        new Blob(
    +            serviceMockReturnsOptions,
    +            new BlobInfo.BuilderImpl(BlobInfo.builder(BUCKET_NAME, "n1").build())),
    +        new Blob(
    +            serviceMockReturnsOptions,
    +            new BlobInfo.BuilderImpl(BlobInfo.builder(BUCKET_NAME, "n2").build())));
    +    blobPage = new Page() {
    +      @Override
    +      public String nextPageCursor() {
    +        return "nextPageCursor";
    +      }
    +
    +      @Override
    +      public Page nextPage() {
    +        return null;
    +      }
    +
    +      @Override
    +      public Iterable values() {
    +        return blobList;
    +      }
    +
    +      @Override
    +      public Iterator iterateAll() {
    +        return blobList.iterator();
    +      }
    +    };
       }
     
       @Test
       public void testForceDelete() throws InterruptedException, ExecutionException {
         Storage storageMock = EasyMock.createMock(Storage.class);
    -    EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT);
    -    for (BlobInfo info : BLOB_LIST) {
    -      EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true);
    +    EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true)))
    +        .andReturn(blobPage);
    +    for (BlobInfo info : blobList) {
    +      EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true);
         }
         EasyMock.expect(storageMock.delete(BUCKET_NAME)).andReturn(true);
         EasyMock.replay(storageMock);
    @@ -119,9 +132,10 @@ public void testForceDelete() throws InterruptedException, ExecutionException {
       @Test
       public void testForceDeleteTimeout() throws InterruptedException, ExecutionException {
         Storage storageMock = EasyMock.createMock(Storage.class);
    -    EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT).anyTimes();
    -    for (BlobInfo info : BLOB_LIST) {
    -      EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true).anyTimes();
    +    EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true)))
    +        .andReturn(blobPage).anyTimes();
    +    for (BlobInfo info : blobList) {
    +      EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true).anyTimes();
         }
         EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(RETRYABLE_EXCEPTION).anyTimes();
         EasyMock.replay(storageMock);
    @@ -132,9 +146,10 @@ public void testForceDeleteTimeout() throws InterruptedException, ExecutionExcep
       @Test
       public void testForceDeleteFail() throws InterruptedException, ExecutionException {
         Storage storageMock = EasyMock.createMock(Storage.class);
    -    EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT);
    -    for (BlobInfo info : BLOB_LIST) {
    -      EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true);
    +    EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true)))
    +        .andReturn(blobPage);
    +    for (BlobInfo info : blobList) {
    +      EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true);
         }
         EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(FATAL_EXCEPTION);
         EasyMock.replay(storageMock);
    @@ -146,6 +161,38 @@ public void testForceDeleteFail() throws InterruptedException, ExecutionExceptio
         }
       }
     
    +  @Test
    +  public void testForceDeleteNoTimeout() {
    +    Storage storageMock = EasyMock.createMock(Storage.class);
    +    EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true)))
    +        .andReturn(blobPage);
    +    for (BlobInfo info : blobList) {
    +      EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true);
    +    }
    +    EasyMock.expect(storageMock.delete(BUCKET_NAME)).andReturn(true);
    +    EasyMock.replay(storageMock);
    +    RemoteGcsHelper.forceDelete(storageMock, BUCKET_NAME);
    +    EasyMock.verify(storageMock);
    +  }
    +
    +  @Test
    +  public void testForceDeleteNoTimeoutFail() {
    +    Storage storageMock = EasyMock.createMock(Storage.class);
    +    EasyMock.expect(storageMock.list(BUCKET_NAME, BlobListOption.versions(true)))
    +        .andReturn(blobPage);
    +    for (BlobInfo info : blobList) {
    +      EasyMock.expect(storageMock.delete(info.blobId())).andReturn(true);
    +    }
    +    EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(FATAL_EXCEPTION);
    +    EasyMock.replay(storageMock);
    +    thrown.expect(StorageException.class);
    +    try {
    +      RemoteGcsHelper.forceDelete(storageMock, BUCKET_NAME);
    +    } finally {
    +      EasyMock.verify(storageMock);
    +    }
    +  }
    +
       @Test
       public void testCreateFromStream() {
         RemoteGcsHelper helper = RemoteGcsHelper.create(PROJECT_ID, JSON_KEY_STREAM);
    @@ -153,17 +200,10 @@ public void testCreateFromStream() {
         assertEquals(PROJECT_ID, options.projectId());
         assertEquals(60000, options.connectTimeout());
         assertEquals(60000, options.readTimeout());
    -    assertEquals(10, options.retryParams().getRetryMaxAttempts());
    -    assertEquals(6, options.retryParams().getRetryMinAttempts());
    -    assertEquals(30000, options.retryParams().getMaxRetryDelayMillis());
    -    assertEquals(120000, options.retryParams().getTotalRetryPeriodMillis());
    -    assertEquals(250, options.retryParams().getInitialRetryDelayMillis());
    -  }
    -
    -  @Test
    -  public void testCreateNoKey() {
    -    thrown.expect(RemoteGcsHelper.GcsHelperException.class);
    -    thrown.expectMessage(keyPath + " (No such file or directory)");
    -    RemoteGcsHelper.create(PROJECT_ID, keyPath);
    +    assertEquals(10, options.retryParams().retryMaxAttempts());
    +    assertEquals(6, options.retryParams().retryMinAttempts());
    +    assertEquals(30000, options.retryParams().maxRetryDelayMillis());
    +    assertEquals(120000, options.retryParams().totalRetryPeriodMillis());
    +    assertEquals(250, options.retryParams().initialRetryDelayMillis());
       }
     }
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java
    index 4c22170bba80..efa56d9e39b2 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java
    @@ -21,10 +21,13 @@
     
     import com.google.common.collect.ImmutableMap;
     import com.google.gcloud.AuthCredentials;
    +import com.google.gcloud.PageImpl;
    +import com.google.gcloud.ReadChannel;
     import com.google.gcloud.RestorableState;
     import com.google.gcloud.RetryParams;
    -import com.google.gcloud.spi.StorageRpc;
    +import com.google.gcloud.WriteChannel;
     import com.google.gcloud.storage.Acl.Project.ProjectRole;
    +import com.google.gcloud.storage.spi.StorageRpc;
     
     import org.junit.Test;
     
    @@ -39,25 +42,29 @@
     
     public class SerializationTest {
     
    +  private static final Storage STORAGE = StorageOptions.builder().projectId("p").build().service();
       private static final Acl.Domain ACL_DOMAIN = new Acl.Domain("domain");
       private static final Acl.Group ACL_GROUP = new Acl.Group("group");
       private static final Acl.Project ACL_PROJECT_ = new Acl.Project(ProjectRole.VIEWERS, "pid");
       private static final Acl.User ACL_USER = new Acl.User("user");
       private static final Acl.RawEntity ACL_RAW = new Acl.RawEntity("raw");
    +  private static final Acl ACL = Acl.of(ACL_DOMAIN, Acl.Role.OWNER);
       private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").build();
       private static final BucketInfo BUCKET_INFO = BucketInfo.of("b");
    +  private static final Blob BLOB = new Blob(STORAGE, new BlobInfo.BuilderImpl(BLOB_INFO));
    +  private static final Bucket BUCKET = new Bucket(STORAGE, new BucketInfo.BuilderImpl(BUCKET_INFO));
       private static final Cors.Origin ORIGIN = Cors.Origin.any();
       private static final Cors CORS =
           Cors.builder().maxAgeSeconds(1).origins(Collections.singleton(ORIGIN)).build();
       private static final BatchRequest BATCH_REQUEST = BatchRequest.builder().delete("B", "N").build();
       private static final BatchResponse BATCH_RESPONSE = new BatchResponse(
           Collections.singletonList(BatchResponse.Result.of(true)),
    -      Collections.>emptyList(),
    -      Collections.>emptyList());
    -  private static final BaseListResult LIST_RESULT = new BaseListResult<>(
    -      null, "c", Collections.singletonList(BlobInfo.builder("b", "n").build()));
    +      Collections.>emptyList(),
    +      Collections.>emptyList());
    +  private static final PageImpl PAGE_RESULT =
    +      new PageImpl<>(null, "c", Collections.singletonList(BLOB));
       private static final Storage.BlobListOption BLOB_LIST_OPTIONS =
    -      Storage.BlobListOption.maxResults(100);
    +      Storage.BlobListOption.pageSize(100);
       private static final Storage.BlobSourceOption BLOB_SOURCE_OPTIONS =
           Storage.BlobSourceOption.generationMatch(1);
       private static final Storage.BlobTargetOption BLOB_TARGET_OPTIONS =
    @@ -81,9 +88,8 @@ public void testServiceOptions() throws Exception {
     
         options = options.toBuilder()
             .projectId("p2")
    -        .retryParams(RetryParams.getDefaultInstance())
    -        .authCredentials(AuthCredentials.noCredentials())
    -        .pathDelimiter(":")
    +        .retryParams(RetryParams.defaultInstance())
    +        .authCredentials(null)
             .build();
         serializedCopy = serializeAndDeserialize(options);
         assertEquals(options, serializedCopy);
    @@ -91,11 +97,10 @@ public void testServiceOptions() throws Exception {
     
       @Test
       public void testModelAndRequests() throws Exception {
    -    Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, BLOB_INFO,
    -        BUCKET_INFO,
    -        ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE, LIST_RESULT, BLOB_LIST_OPTIONS,
    -        BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS, BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS,
    -        BUCKET_TARGET_OPTIONS};
    +    Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, ACL,
    +        BLOB_INFO, BLOB, BUCKET_INFO, BUCKET, ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE,
    +        PAGE_RESULT, BLOB_LIST_OPTIONS, BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS,
    +        BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS, BUCKET_TARGET_OPTIONS};
         for (Serializable obj : objects) {
           Object copy = serializeAndDeserialize(obj);
           assertEquals(obj, obj);
    @@ -109,29 +114,30 @@ public void testModelAndRequests() throws Exception {
       public void testReadChannelState() throws IOException, ClassNotFoundException {
         StorageOptions options = StorageOptions.builder()
             .projectId("p2")
    -        .retryParams(RetryParams.getDefaultInstance())
    -        .authCredentials(AuthCredentials.noCredentials())
    +        .retryParams(RetryParams.defaultInstance())
             .build();
    -    BlobReadChannel reader =
    -        new BlobReadChannelImpl(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS);
    -    RestorableState state = reader.capture();
    -    RestorableState deserializedState = serializeAndDeserialize(state);
    +    ReadChannel reader =
    +        new BlobReadChannel(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS);
    +    RestorableState state = reader.capture();
    +    RestorableState deserializedState = serializeAndDeserialize(state);
         assertEquals(state, deserializedState);
         assertEquals(state.hashCode(), deserializedState.hashCode());
         assertEquals(state.toString(), deserializedState.toString());
    +    reader.close();
       }
     
       @Test
       public void testWriteChannelState() throws IOException, ClassNotFoundException {
         StorageOptions options = StorageOptions.builder()
             .projectId("p2")
    -        .retryParams(RetryParams.getDefaultInstance())
    -        .authCredentials(AuthCredentials.noCredentials())
    +        .retryParams(RetryParams.defaultInstance())
             .build();
    -    BlobWriteChannelImpl writer = new BlobWriteChannelImpl(
    -        options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id");
    -    RestorableState state = writer.capture();
    -    RestorableState deserializedState = serializeAndDeserialize(state);
    +    // avoid closing when you don't want partial writes to GCS upon failure
    +    @SuppressWarnings("resource")
    +    BlobWriteChannel writer =
    +        new BlobWriteChannel(options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id");
    +    RestorableState state = writer.capture();
    +    RestorableState deserializedState = serializeAndDeserialize(state);
         assertEquals(state, deserializedState);
         assertEquals(state.hashCode(), deserializedState.hashCode());
         assertEquals(state.toString(), deserializedState.toString());
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java
    new file mode 100644
    index 000000000000..cf1d4b394e57
    --- /dev/null
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java
    @@ -0,0 +1,125 @@
    +/*
    + * Copyright 2015 Google Inc. All Rights Reserved.
    + *
    + * Licensed under the Apache License, Version 2.0 (the "License");
    + * you may not use this file except in compliance with the License.
    + * You may obtain a copy of the License at
    + *
    + *       http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package com.google.gcloud.storage;
    +
    +import static org.easymock.EasyMock.createMock;
    +import static org.easymock.EasyMock.expect;
    +import static org.easymock.EasyMock.replay;
    +import static org.easymock.EasyMock.verify;
    +import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertFalse;
    +import static org.junit.Assert.assertNull;
    +import static org.junit.Assert.assertTrue;
    +
    +import com.google.api.client.googleapis.json.GoogleJsonError;
    +import com.google.gcloud.BaseServiceException;
    +import com.google.gcloud.RetryHelper.RetryHelperException;
    +
    +import org.junit.Test;
    +
    +import java.io.IOException;
    +import java.net.SocketTimeoutException;
    +
    +public class StorageExceptionTest {
    +
    +  @Test
    +  public void testStorageException() {
    +    StorageException exception = new StorageException(500, "message");
    +    assertEquals(500, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(502, "message");
    +    assertEquals(502, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(503, "message");
    +    assertEquals(503, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(504, "message");
    +    assertEquals(504, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(429, "message");
    +    assertEquals(429, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(408, "message");
    +    assertEquals(408, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    exception = new StorageException(400, "message");
    +    assertEquals(400, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertNull(exception.reason());
    +    assertFalse(exception.retryable());
    +    assertTrue(exception.idempotent());
    +
    +    IOException cause = new SocketTimeoutException();
    +    exception = new StorageException(cause);
    +    assertNull(exception.reason());
    +    assertNull(exception.getMessage());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +    assertEquals(cause, exception.getCause());
    +
    +    GoogleJsonError error = new GoogleJsonError();
    +    error.setCode(503);
    +    error.setMessage("message");
    +    exception = new StorageException(error);
    +    assertEquals(503, exception.code());
    +    assertEquals("message", exception.getMessage());
    +    assertTrue(exception.retryable());
    +    assertTrue(exception.idempotent());
    +  }
    +
    +  @Test
    +  public void testTranslateAndThrow() throws Exception {
    +    StorageException cause = new StorageException(503, "message");
    +    RetryHelperException exceptionMock = createMock(RetryHelperException.class);
    +    expect(exceptionMock.getCause()).andReturn(cause).times(2);
    +    replay(exceptionMock);
    +    try {
    +      StorageException.translateAndThrow(exceptionMock);
    +    } catch (BaseServiceException ex) {
    +      assertEquals(503, ex.code());
    +      assertEquals("message", ex.getMessage());
    +      assertTrue(ex.retryable());
    +      assertTrue(ex.idempotent());
    +    } finally {
    +      verify(exceptionMock);
    +    }
    +  }
    +}
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java
    index bdac54bcef2d..38b4bb58e77f 100644
    --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java
    @@ -31,14 +31,16 @@
     import com.google.common.collect.Iterables;
     import com.google.common.collect.Maps;
     import com.google.common.io.BaseEncoding;
    -import com.google.gcloud.AuthCredentials;
     import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials;
    +import com.google.gcloud.Page;
    +import com.google.gcloud.ReadChannel;
     import com.google.gcloud.RetryParams;
     import com.google.gcloud.ServiceOptions;
    -import com.google.gcloud.spi.StorageRpc;
    -import com.google.gcloud.spi.StorageRpc.Tuple;
    -import com.google.gcloud.spi.StorageRpcFactory;
    +import com.google.gcloud.WriteChannel;
     import com.google.gcloud.storage.Storage.CopyRequest;
    +import com.google.gcloud.storage.spi.StorageRpc;
    +import com.google.gcloud.storage.spi.StorageRpc.Tuple;
    +import com.google.gcloud.storage.spi.StorageRpcFactory;
     
     import org.easymock.Capture;
     import org.easymock.EasyMock;
    @@ -88,8 +90,8 @@ public class StorageImplTest {
       private static final BucketInfo BUCKET_INFO2 = BucketInfo.builder(BUCKET_NAME2).build();
     
       // BlobInfo objects
    -  private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1)
    -      .metageneration(42L).generation(24L).contentType("application/json").md5("md5string").build();
    +  private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1, 24L)
    +      .metageneration(42L).contentType("application/json").md5("md5string").build();
       private static final BlobInfo BLOB_INFO2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).build();
       private static final BlobInfo BLOB_INFO3 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME3).build();
     
    @@ -137,17 +139,40 @@ public class StorageImplTest {
       private static final Storage.BlobWriteOption BLOB_WRITE_CRC2C =
           Storage.BlobWriteOption.crc32cMatch();
     
    -  // Bucket source options
    +  // Bucket get/source options
       private static final Storage.BucketSourceOption BUCKET_SOURCE_METAGENERATION =
           Storage.BucketSourceOption.metagenerationMatch(BUCKET_INFO1.metageneration());
       private static final Map BUCKET_SOURCE_OPTIONS = ImmutableMap.of(
           StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value());
    +  private static final Storage.BucketGetOption BUCKET_GET_METAGENERATION =
    +      Storage.BucketGetOption.metagenerationMatch(BUCKET_INFO1.metageneration());
    +  private static final Storage.BucketGetOption BUCKET_GET_FIELDS =
    +      Storage.BucketGetOption.fields(Storage.BucketField.LOCATION, Storage.BucketField.ACL);
    +  private static final Storage.BucketGetOption BUCKET_GET_EMPTY_FIELDS =
    +      Storage.BucketGetOption.fields();
    +  private static final Map BUCKET_GET_OPTIONS = ImmutableMap.of(
    +      StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value());
     
    -  // Blob source options
    +  // Blob get/source options
    +  private static final Storage.BlobGetOption BLOB_GET_METAGENERATION =
    +      Storage.BlobGetOption.metagenerationMatch(BLOB_INFO1.metageneration());
    +  private static final Storage.BlobGetOption BLOB_GET_GENERATION =
    +      Storage.BlobGetOption.generationMatch(BLOB_INFO1.generation());
    +  private static final Storage.BlobGetOption BLOB_GET_GENERATION_FROM_BLOB_ID =
    +      Storage.BlobGetOption.generationMatch();
    +  private static final Storage.BlobGetOption BLOB_GET_FIELDS =
    +      Storage.BlobGetOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.CRC32C);
    +  private static final Storage.BlobGetOption BLOB_GET_EMPTY_FIELDS =
    +      Storage.BlobGetOption.fields();
    +  private static final Map BLOB_GET_OPTIONS = ImmutableMap.of(
    +      StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_GET_METAGENERATION.value(),
    +      StorageRpc.Option.IF_GENERATION_MATCH, BLOB_GET_GENERATION.value());
       private static final Storage.BlobSourceOption BLOB_SOURCE_METAGENERATION =
           Storage.BlobSourceOption.metagenerationMatch(BLOB_INFO1.metageneration());
       private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION =
           Storage.BlobSourceOption.generationMatch(BLOB_INFO1.generation());
    +  private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION_FROM_BLOB_ID =
    +      Storage.BlobSourceOption.generationMatch();
       private static final Map BLOB_SOURCE_OPTIONS = ImmutableMap.of(
           StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_SOURCE_METAGENERATION.value(),
           StorageRpc.Option.IF_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value());
    @@ -156,22 +181,33 @@ public class StorageImplTest {
           StorageRpc.Option.IF_SOURCE_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value());
     
       // Bucket list options
    -  private static final Storage.BucketListOption BUCKET_LIST_MAX_RESULT =
    -      Storage.BucketListOption.maxResults(42L);
    +  private static final Storage.BucketListOption BUCKET_LIST_PAGE_SIZE =
    +      Storage.BucketListOption.pageSize(42L);
       private static final Storage.BucketListOption BUCKET_LIST_PREFIX =
           Storage.BucketListOption.prefix("prefix");
    +  private static final Storage.BucketListOption BUCKET_LIST_FIELDS =
    +      Storage.BucketListOption.fields(Storage.BucketField.LOCATION, Storage.BucketField.ACL);
    +  private static final Storage.BucketListOption BUCKET_LIST_EMPTY_FIELDS =
    +      Storage.BucketListOption.fields();
       private static final Map BUCKET_LIST_OPTIONS = ImmutableMap.of(
    -      StorageRpc.Option.MAX_RESULTS, BUCKET_LIST_MAX_RESULT.value(),
    +      StorageRpc.Option.MAX_RESULTS, BUCKET_LIST_PAGE_SIZE.value(),
           StorageRpc.Option.PREFIX, BUCKET_LIST_PREFIX.value());
     
       // Blob list options
    -  private static final Storage.BlobListOption BLOB_LIST_MAX_RESULT =
    -      Storage.BlobListOption.maxResults(42L);
    +  private static final Storage.BlobListOption BLOB_LIST_PAGE_SIZE =
    +      Storage.BlobListOption.pageSize(42L);
       private static final Storage.BlobListOption BLOB_LIST_PREFIX =
           Storage.BlobListOption.prefix("prefix");
    +  private static final Storage.BlobListOption BLOB_LIST_FIELDS =
    +      Storage.BlobListOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.MD5HASH);
    +  private static final Storage.BlobListOption BLOB_LIST_VERSIONS =
    +      Storage.BlobListOption.versions(false);
    +  private static final Storage.BlobListOption BLOB_LIST_EMPTY_FIELDS =
    +      Storage.BlobListOption.fields();
       private static final Map BLOB_LIST_OPTIONS = ImmutableMap.of(
    -      StorageRpc.Option.MAX_RESULTS, BLOB_LIST_MAX_RESULT.value(),
    -      StorageRpc.Option.PREFIX, BLOB_LIST_PREFIX.value());
    +      StorageRpc.Option.MAX_RESULTS, BLOB_LIST_PAGE_SIZE.value(),
    +      StorageRpc.Option.PREFIX, BLOB_LIST_PREFIX.value(),
    +      StorageRpc.Option.VERSIONS, BLOB_LIST_VERSIONS.value());
     
       private static final String PRIVATE_KEY_STRING = "MIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoG"
               + "BAL2xolH1zrISQ8+GzOV29BNjjzq4/HIP8Psd1+cZb81vDklSF+95wB250MSE0BDc81pvIMwj5OmIfLg1NY6uB"
    @@ -205,6 +241,9 @@ public long millis() {
       private StorageRpc storageRpcMock;
       private Storage storage;
     
    +  private Blob expectedBlob1, expectedBlob2, expectedBlob3;
    +  private Bucket expectedBucket1, expectedBucket2;
    +
       @Rule
       public ExpectedException thrown = ExpectedException.none();
     
    @@ -220,7 +259,7 @@ public static void beforeClass() throws NoSuchAlgorithmException, InvalidKeySpec
       }
     
       @Before
    -  public void setUp() throws IOException, InterruptedException {
    +  public void setUp() {
         rpcFactoryMock = EasyMock.createMock(StorageRpcFactory.class);
         storageRpcMock = EasyMock.createMock(StorageRpc.class);
         EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(StorageOptions.class)))
    @@ -228,9 +267,9 @@ public void setUp() throws IOException, InterruptedException {
         EasyMock.replay(rpcFactoryMock);
         options = StorageOptions.builder()
             .projectId("projectId")
    -        .authCredentials(AuthCredentials.noCredentials())
             .clock(TIME_SOURCE)
             .serviceRpcFactory(rpcFactoryMock)
    +        .retryParams(RetryParams.noRetries())
             .build();
       }
     
    @@ -239,10 +278,23 @@ public void tearDown() throws Exception {
         EasyMock.verify(rpcFactoryMock, storageRpcMock);
       }
     
    +  private void initializeService() {
    +    storage = options.service();
    +    initializeServiceDependentObjects();
    +  }
    +
    +  private void initializeServiceDependentObjects() {
    +    expectedBlob1 = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO1));
    +    expectedBlob2 = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO2));
    +    expectedBlob3 = new Blob(storage, new BlobInfo.BuilderImpl(BLOB_INFO3));
    +    expectedBucket1 = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO1));
    +    expectedBucket2 = new Bucket(storage, new BucketInfo.BuilderImpl(BUCKET_INFO2));
    +  }
    +
       @Test
       public void testGetOptions() {
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         assertSame(options, storage.options());
       }
     
    @@ -251,9 +303,9 @@ public void testCreateBucket() {
         EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(BUCKET_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket = storage.create(BUCKET_INFO1);
    -    assertEquals(BUCKET_INFO1.toPb(), bucket.toPb());
    +    initializeService();
    +    Bucket bucket = storage.create(BUCKET_INFO1);
    +    assertEquals(expectedBucket1, bucket);
       }
     
       @Test
    @@ -261,10 +313,10 @@ public void testCreateBucketWithOptions() {
         EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), BUCKET_TARGET_OPTIONS))
             .andReturn(BUCKET_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket =
    +    initializeService();
    +    Bucket bucket =
             storage.create(BUCKET_INFO1, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL);
    -    assertEquals(BUCKET_INFO1, bucket);
    +    assertEquals(expectedBucket1, bucket);
       }
     
       @Test
    @@ -276,9 +328,9 @@ public void testCreateBlob() throws IOException {
             EasyMock.eq(EMPTY_RPC_OPTIONS)))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.create(BLOB_INFO1, BLOB_CONTENT);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.create(BLOB_INFO1, BLOB_CONTENT);
    +    assertEquals(expectedBlob1, blob);
         ByteArrayInputStream byteStream = capturedStream.getValue();
         byte[] streamBytes = new byte[BLOB_CONTENT.length];
         assertEquals(BLOB_CONTENT.length, byteStream.read(streamBytes));
    @@ -299,9 +351,9 @@ public void testCreateEmptyBlob() throws IOException {
             EasyMock.eq(EMPTY_RPC_OPTIONS)))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.create(BLOB_INFO1);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.create(BLOB_INFO1);
    +    assertEquals(expectedBlob1, blob);
         ByteArrayInputStream byteStream = capturedStream.getValue();
         byte[] streamBytes = new byte[BLOB_CONTENT.length];
         assertEquals(-1, byteStream.read(streamBytes));
    @@ -320,11 +372,11 @@ public void testCreateBlobWithOptions() throws IOException {
             EasyMock.eq(BLOB_TARGET_OPTIONS_CREATE)))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob =
    +    initializeService();
    +    Blob blob =
             storage.create(BLOB_INFO1, BLOB_CONTENT, BLOB_TARGET_METAGENERATION, BLOB_TARGET_NOT_EXIST,
                 BLOB_TARGET_PREDEFINED_ACL);
    -    assertEquals(BLOB_INFO1, blob);
    +    assertEquals(expectedBlob1, blob);
         ByteArrayInputStream byteStream = capturedStream.getValue();
         byte[] streamBytes = new byte[BLOB_CONTENT.length];
         assertEquals(BLOB_CONTENT.length, byteStream.read(streamBytes));
    @@ -333,7 +385,7 @@ public void testCreateBlobWithOptions() throws IOException {
       }
     
       @Test
    -  public void testCreateBlobFromStream() throws IOException {
    +  public void testCreateBlobFromStream() {
         ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT);
         BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder();
         BlobInfo infoWithHashes = infoBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build();
    @@ -341,9 +393,9 @@ public void testCreateBlobFromStream() throws IOException {
         EasyMock.expect(storageRpcMock.create(infoWithoutHashes.toPb(), fileStream, EMPTY_RPC_OPTIONS))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.create(infoWithHashes, fileStream);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.create(infoWithHashes, fileStream);
    +    assertEquals(expectedBlob1, blob);
       }
     
       @Test
    @@ -351,21 +403,54 @@ public void testGetBucket() {
         EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(BUCKET_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket = storage.get(BUCKET_NAME1);
    -    assertEquals(BUCKET_INFO1, bucket);
    +    initializeService();
    +    Bucket bucket = storage.get(BUCKET_NAME1);
    +    assertEquals(expectedBucket1, bucket);
       }
     
       @Test
       public void testGetBucketWithOptions() {
    -    EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_SOURCE_OPTIONS))
    +    EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_GET_OPTIONS))
             .andReturn(BUCKET_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket =
    -        storage.get(BUCKET_NAME1,
    -            Storage.BucketSourceOption.metagenerationMatch(BUCKET_INFO1.metageneration()));
    -    assertEquals(BUCKET_INFO1, bucket);
    +    initializeService();
    +    Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION);
    +    assertEquals(expectedBucket1, bucket);
    +  }
    +
    +  @Test
    +  public void testGetBucketWithSelectedFields() {
    +    Capture> capturedOptions = Capture.newInstance();
    +    EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()),
    +        EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb());
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, BUCKET_GET_FIELDS);
    +    assertEquals(BUCKET_GET_METAGENERATION.value(),
    +        capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption());
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("location"));
    +    assertTrue(selector.contains("acl"));
    +    assertEquals(17, selector.length());
    +    assertEquals(BUCKET_INFO1.name(), bucket.name());
    +  }
    +
    +  @Test
    +  public void testGetBucketWithEmptyFields() {
    +    Capture> capturedOptions = Capture.newInstance();
    +    EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()),
    +        EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb());
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    Bucket bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION,
    +        BUCKET_GET_EMPTY_FIELDS);
    +    assertEquals(BUCKET_GET_METAGENERATION.value(),
    +        capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption());
    +    assertTrue(selector.contains("name"));
    +    assertEquals(4, selector.length());
    +    assertEquals(BUCKET_INFO1.name(), bucket.name());
       }
     
       @Test
    @@ -374,35 +459,91 @@ public void testGetBlob() {
             storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1);
    +    assertEquals(expectedBlob1, blob);
       }
     
       @Test
       public void testGetBlobWithOptions() {
         EasyMock.expect(
    -        storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS))
    +        storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_GET_OPTIONS))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob =
    -        storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_METAGENERATION, BLOB_SOURCE_GENERATION);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob =
    +        storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, BLOB_GET_GENERATION);
    +    assertEquals(expectedBlob1, blob);
    +  }
    +
    +  @Test
    +  public void testGetBlobWithOptionsFromBlobId() {
    +    EasyMock.expect(
    +        storageRpcMock.get(BLOB_INFO1.blobId().toPb(), BLOB_GET_OPTIONS))
    +        .andReturn(BLOB_INFO1.toPb());
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    Blob blob =
    +        storage.get(BLOB_INFO1.blobId(), BLOB_GET_METAGENERATION, BLOB_GET_GENERATION_FROM_BLOB_ID);
    +    assertEquals(expectedBlob1, blob);
    +  }
    +
    +  @Test
    +  public void testGetBlobWithSelectedFields() {
    +    Capture> capturedOptions = Capture.newInstance();
    +    EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()),
    +        EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb());
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    Blob blob = storage.get(
    +        BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION,
    +        BLOB_GET_GENERATION, BLOB_GET_FIELDS);
    +    assertEquals(BLOB_GET_METAGENERATION.value(),
    +        capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption()));
    +    assertEquals(BLOB_GET_GENERATION.value(),
    +        capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption());
    +    assertTrue(selector.contains("bucket"));
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("contentType"));
    +    assertTrue(selector.contains("crc32c"));
    +    assertEquals(30, selector.length());
    +    assertEquals(expectedBlob1, blob);
    +  }
    +
    +  @Test
    +  public void testGetBlobWithEmptyFields() {
    +    Capture> capturedOptions = Capture.newInstance();
    +    EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()),
    +        EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb());
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    Blob blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION,
    +        BLOB_GET_GENERATION, BLOB_GET_EMPTY_FIELDS);
    +    assertEquals(BLOB_GET_METAGENERATION.value(),
    +        capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption()));
    +    assertEquals(BLOB_GET_GENERATION.value(),
    +        capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption());
    +    assertTrue(selector.contains("bucket"));
    +    assertTrue(selector.contains("name"));
    +    assertEquals(11, selector.length());
    +    assertEquals(expectedBlob1, blob);
       }
     
       @Test
       public void testListBuckets() {
         String cursor = "cursor";
    -    ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
    +    ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
         Tuple> result =
    -        Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION));
    +        Tuple.of(cursor, Iterables.transform(bucketInfoList, BucketInfo.TO_PB_FUNCTION));
         EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn(result);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult = storage.list();
    -    assertEquals(cursor, listResult.nextPageCursor());
    -    assertArrayEquals(bucketList.toArray(), Iterables.toArray(listResult, BucketInfo.class));
    +    initializeService();
    +    ImmutableList bucketList = ImmutableList.of(expectedBucket1, expectedBucket2);
    +    Page page = storage.list();
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class));
       }
     
       @Test
    @@ -410,67 +551,188 @@ public void testListBucketsEmpty() {
         EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn(
             Tuple.>of(null, null));
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult = storage.list();
    -    assertNull(listResult.nextPageCursor());
    -    assertArrayEquals(ImmutableList.of().toArray(),
    -        Iterables.toArray(listResult, BucketInfo.class));
    +    initializeService();
    +    Page page = storage.list();
    +    assertNull(page.nextPageCursor());
    +    assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(page.values(), Bucket.class));
       }
     
       @Test
       public void testListBucketsWithOptions() {
         String cursor = "cursor";
    -    ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
    +    ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
         Tuple> result =
    -        Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION));
    +        Tuple.of(cursor, Iterables.transform(bucketInfoList, BucketInfo.TO_PB_FUNCTION));
         EasyMock.expect(storageRpcMock.list(BUCKET_LIST_OPTIONS)).andReturn(result);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX);
    -    assertEquals(cursor, listResult.nextPageCursor());
    -    assertArrayEquals(bucketList.toArray(), Iterables.toArray(listResult, BucketInfo.class));
    +    initializeService();
    +    ImmutableList bucketList = ImmutableList.of(expectedBucket1, expectedBucket2);
    +    Page page = storage.list(BUCKET_LIST_PAGE_SIZE, BUCKET_LIST_PREFIX);
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class));
    +  }
    +
    +  @Test
    +  public void testListBucketsWithSelectedFields() {
    +    String cursor = "cursor";
    +    Capture> capturedOptions = Capture.newInstance();
    +    ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
    +    Tuple> result =
    +        Tuple.of(cursor, Iterables.transform(bucketInfoList, BucketInfo.TO_PB_FUNCTION));
    +    EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ImmutableList bucketList = ImmutableList.of(expectedBucket1, expectedBucket2);
    +    Page page = storage.list(BUCKET_LIST_FIELDS);
    +    String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption());
    +    assertTrue(selector.contains("items"));
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("acl"));
    +    assertTrue(selector.contains("location"));
    +    assertTrue(selector.contains("nextPageToken"));
    +    assertEquals(38, selector.length());
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class));
    +  }
    +
    +  @Test
    +  public void testListBucketsWithEmptyFields() {
    +    String cursor = "cursor";
    +    Capture> capturedOptions = Capture.newInstance();
    +    ImmutableList bucketInfoList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2);
    +    Tuple> result =
    +        Tuple.of(cursor, Iterables.transform(bucketInfoList, BucketInfo.TO_PB_FUNCTION));
    +    EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ImmutableList bucketList = ImmutableList.of(expectedBucket1, expectedBucket2);
    +    Page page = storage.list(BUCKET_LIST_EMPTY_FIELDS);
    +    String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption());
    +    assertTrue(selector.contains("items"));
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("nextPageToken"));
    +    assertEquals(25, selector.length());
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), Bucket.class));
       }
     
       @Test
       public void testListBlobs() {
         String cursor = "cursor";
    -    ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
    +    ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
         Tuple> result =
    -        Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION));
    +        Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION));
         EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, EMPTY_RPC_OPTIONS)).andReturn(result);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult = storage.list(BUCKET_NAME1);
    -    assertEquals(cursor, listResult.nextPageCursor());
    -    assertArrayEquals(blobList.toArray(), Iterables.toArray(listResult, BlobInfo.class));
    +    initializeService();
    +    ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2);
    +    Page page = storage.list(BUCKET_NAME1);
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class));
       }
     
       @Test
       public void testListBlobsEmpty() {
         EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, EMPTY_RPC_OPTIONS))
    -        .andReturn(
    -            Tuple.>of(null,
    -                null));
    +        .andReturn(Tuple.>of(
    +            null, null));
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult = storage.list(BUCKET_NAME1);
    -    assertNull(listResult.nextPageCursor());
    -    assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(listResult, BlobInfo.class));
    +    initializeService();
    +    Page page = storage.list(BUCKET_NAME1);
    +    assertNull(page.nextPageCursor());
    +    assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(page.values(), Blob.class));
       }
     
       @Test
       public void testListBlobsWithOptions() {
         String cursor = "cursor";
    -    ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
    +    ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
         Tuple> result =
    -        Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION));
    +        Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION));
         EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, BLOB_LIST_OPTIONS)).andReturn(result);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    ListResult listResult =
    -        storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX);
    -    assertEquals(cursor, listResult.nextPageCursor());
    -    assertArrayEquals(blobList.toArray(), Iterables.toArray(listResult, BlobInfo.class));
    +    initializeService();
    +    ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2);
    +    Page page =
    +        storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_VERSIONS);
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class));
    +  }
    +
    +  @Test
    +  public void testListBlobsWithSelectedFields() {
    +    String cursor = "cursor";
    +    Capture> capturedOptions = Capture.newInstance();
    +    ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
    +    Tuple> result =
    +        Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION));
    +    EasyMock.expect(
    +        storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions)))
    +        .andReturn(result);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2);
    +    Page page =
    +        storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_FIELDS);
    +    assertEquals(BLOB_LIST_PAGE_SIZE.value(),
    +        capturedOptions.getValue().get(BLOB_LIST_PAGE_SIZE.rpcOption()));
    +    assertEquals(BLOB_LIST_PREFIX.value(),
    +        capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption());
    +    assertTrue(selector.contains("items"));
    +    assertTrue(selector.contains("bucket"));
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("contentType"));
    +    assertTrue(selector.contains("md5Hash"));
    +    assertTrue(selector.contains("nextPageToken"));
    +    assertEquals(52, selector.length());
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class));
    +  }
    +
    +  @Test
    +  public void testListBlobsWithEmptyFields() {
    +    String cursor = "cursor";
    +    Capture> capturedOptions = Capture.newInstance();
    +    ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
    +    Tuple> result =
    +        Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION));
    +    EasyMock.expect(
    +        storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions)))
    +        .andReturn(result);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2);
    +    Page page =
    +        storage.list(BUCKET_NAME1, BLOB_LIST_PAGE_SIZE, BLOB_LIST_PREFIX, BLOB_LIST_EMPTY_FIELDS);
    +    assertEquals(BLOB_LIST_PAGE_SIZE.value(),
    +        capturedOptions.getValue().get(BLOB_LIST_PAGE_SIZE.rpcOption()));
    +    assertEquals(BLOB_LIST_PREFIX.value(),
    +        capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption()));
    +    String selector = (String) capturedOptions.getValue().get(BLOB_LIST_EMPTY_FIELDS.rpcOption());
    +    assertTrue(selector.contains("items"));
    +    assertTrue(selector.contains("bucket"));
    +    assertTrue(selector.contains("name"));
    +    assertTrue(selector.contains("nextPageToken"));
    +    assertEquals(32, selector.length());
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class));
    +  }
    +
    +  @Test
    +  public void testListBlobsCurrentDirectory() {
    +    String cursor = "cursor";
    +    Map options = ImmutableMap.of(StorageRpc.Option.DELIMITER, "/");
    +    ImmutableList blobInfoList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2);
    +    Tuple> result =
    +        Tuple.of(cursor, Iterables.transform(blobInfoList, BlobInfo.INFO_TO_PB_FUNCTION));
    +    EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, options)).andReturn(result);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ImmutableList blobList = ImmutableList.of(expectedBlob1, expectedBlob2);
    +    Page page = storage.list(BUCKET_NAME1, Storage.BlobListOption.currentDirectory());
    +    assertEquals(cursor, page.nextPageCursor());
    +    assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), Blob.class));
       }
     
       @Test
    @@ -479,9 +741,9 @@ public void testUpdateBucket() {
         EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(updatedBucketInfo.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket = storage.update(updatedBucketInfo);
    -    assertEquals(updatedBucketInfo, bucket);
    +    initializeService();
    +    Bucket bucket = storage.update(updatedBucketInfo);
    +    assertEquals(new Bucket(storage, new BucketInfo.BuilderImpl(updatedBucketInfo)), bucket);
       }
     
       @Test
    @@ -490,11 +752,11 @@ public void testUpdateBucketWithOptions() {
         EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), BUCKET_TARGET_OPTIONS))
             .andReturn(updatedBucketInfo.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BucketInfo bucket =
    +    initializeService();
    +    Bucket bucket =
             storage.update(updatedBucketInfo, BUCKET_TARGET_METAGENERATION,
                 BUCKET_TARGET_PREDEFINED_ACL);
    -    assertEquals(updatedBucketInfo, bucket);
    +    assertEquals(new Bucket(storage, new BucketInfo.BuilderImpl(updatedBucketInfo)), bucket);
       }
     
       @Test
    @@ -503,9 +765,9 @@ public void testUpdateBlob() {
         EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(updatedBlobInfo.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.update(updatedBlobInfo);
    -    assertEquals(updatedBlobInfo, blob);
    +    initializeService();
    +    Blob blob = storage.update(updatedBlobInfo);
    +    assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(updatedBlobInfo)), blob);
       }
     
       @Test
    @@ -514,10 +776,10 @@ public void testUpdateBlobWithOptions() {
         EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), BLOB_TARGET_OPTIONS_UPDATE))
             .andReturn(updatedBlobInfo.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob =
    +    initializeService();
    +    Blob blob =
             storage.update(updatedBlobInfo, BLOB_TARGET_METAGENERATION, BLOB_TARGET_PREDEFINED_ACL);
    -    assertEquals(updatedBlobInfo, blob);
    +    assertEquals(new Blob(storage, new BlobInfo.BuilderImpl(updatedBlobInfo)), blob);
       }
     
       @Test
    @@ -525,7 +787,7 @@ public void testDeleteBucket() {
         EasyMock.expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(true);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         assertTrue(storage.delete(BUCKET_NAME1));
       }
     
    @@ -535,7 +797,7 @@ public void testDeleteBucketWithOptions() {
             .expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_SOURCE_OPTIONS))
             .andReturn(true);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         assertTrue(storage.delete(BUCKET_NAME1, BUCKET_SOURCE_METAGENERATION));
       }
     
    @@ -545,7 +807,7 @@ public void testDeleteBlob() {
             storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(true);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1));
       }
     
    @@ -555,11 +817,22 @@ public void testDeleteBlobWithOptions() {
             storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS))
             .andReturn(true);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION,
             BLOB_SOURCE_METAGENERATION));
       }
     
    +  @Test
    +  public void testDeleteBlobWithOptionsFromBlobId() {
    +    EasyMock.expect(
    +        storageRpcMock.delete(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS))
    +        .andReturn(true);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    assertTrue(storage.delete(BLOB_INFO1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID,
    +        BLOB_SOURCE_METAGENERATION));
    +  }
    +
       @Test
       public void testCompose() {
         Storage.ComposeRequest req = Storage.ComposeRequest.builder()
    @@ -569,9 +842,9 @@ public void testCompose() {
         EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()),
             BLOB_INFO1.toPb(), EMPTY_RPC_OPTIONS)).andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.compose(req);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.compose(req);
    +    assertEquals(expectedBlob1, blob);
       }
     
       @Test
    @@ -584,9 +857,9 @@ public void testComposeWithOptions() {
         EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()),
             BLOB_INFO1.toPb(), BLOB_TARGET_OPTIONS_COMPOSE)).andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobInfo blob = storage.compose(req);
    -    assertEquals(BLOB_INFO1, blob);
    +    initializeService();
    +    Blob blob = storage.compose(req);
    +    assertEquals(expectedBlob1, blob);
       }
     
       @Test
    @@ -598,7 +871,7 @@ public void testCopy() {
             false, "token", 21L);
         EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         CopyWriter writer = storage.copy(request);
         assertEquals(42L, writer.blobSize());
         assertEquals(21L, writer.totalBytesCopied());
    @@ -618,7 +891,27 @@ public void testCopyWithOptions() {
             false, "token", 21L);
         EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
    +    CopyWriter writer = storage.copy(request);
    +    assertEquals(42L, writer.blobSize());
    +    assertEquals(21L, writer.totalBytesCopied());
    +    assertTrue(!writer.isDone());
    +  }
    +
    +  @Test
    +  public void testCopyWithOptionsFromBlobId() {
    +    CopyRequest request = Storage.CopyRequest.builder()
    +        .source(BLOB_INFO1.blobId())
    +        .sourceOptions(BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION)
    +        .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION)
    +        .build();
    +    StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(),
    +        BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null);
    +    StorageRpc.RewriteResponse rpcResponse =
    +        new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L);
    +    EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
         CopyWriter writer = storage.copy(request);
         assertEquals(42L, writer.blobSize());
         assertEquals(21L, writer.totalBytesCopied());
    @@ -637,7 +930,7 @@ public void testCopyMultipleRequests() {
         EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse1);
         EasyMock.expect(storageRpcMock.continueRewrite(rpcResponse1)).andReturn(rpcResponse2);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         CopyWriter writer = storage.copy(request);
         assertEquals(42L, writer.blobSize());
         assertEquals(21L, writer.totalBytesCopied());
    @@ -654,7 +947,7 @@ public void testReadAllBytes() {
             storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS))
             .andReturn(BLOB_CONTENT);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1);
         assertArrayEquals(BLOB_CONTENT, readBytes);
       }
    @@ -665,12 +958,24 @@ public void testReadAllBytesWithOptions() {
             storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS))
             .andReturn(BLOB_CONTENT);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION,
             BLOB_SOURCE_METAGENERATION);
         assertArrayEquals(BLOB_CONTENT, readBytes);
       }
     
    +  @Test
    +  public void testReadAllBytesWithOptionsFromBlobId() {
    +    EasyMock.expect(
    +        storageRpcMock.load(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS))
    +        .andReturn(BLOB_CONTENT);
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    byte[] readBytes = storage.readAllBytes(BLOB_INFO1.blobId(),
    +        BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION);
    +    assertArrayEquals(BLOB_CONTENT, readBytes);
    +  }
    +
       @Test
       public void testApply() {
         BatchRequest req = BatchRequest.builder()
    @@ -712,12 +1017,11 @@ public Tuple apply(StorageObject f) {
         StorageRpc.BatchResponse res =
             new StorageRpc.BatchResponse(deleteResult, updateResult, getResult);
     
    -
         Capture capturedBatchRequest = Capture.newInstance();
         EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BatchResponse batchResponse = storage.apply(req);
    +    initializeService();
    +    BatchResponse batchResponse = storage.submit(req);
     
         // Verify captured StorageRpc.BatchRequest
         List>> capturedToDelete =
    @@ -754,8 +1058,8 @@ public Tuple apply(StorageObject f) {
       @Test
       public void testReader() {
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1);
    +    initializeService();
    +    ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1);
         assertNotNull(channel);
         assertTrue(channel.isOpen());
       }
    @@ -765,16 +1069,31 @@ public void testReaderWithOptions() throws IOException {
         byte[] result = new byte[DEFAULT_CHUNK_SIZE];
         EasyMock.expect(
             storageRpcMock.read(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    -        .andReturn(result);
    +        .andReturn(StorageRpc.Tuple.of("etag", result));
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION,
    +    initializeService();
    +    ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION,
             BLOB_SOURCE_METAGENERATION);
         assertNotNull(channel);
         assertTrue(channel.isOpen());
         channel.read(ByteBuffer.allocate(42));
       }
     
    +  @Test
    +  public void testReaderWithOptionsFromBlobId() throws IOException {
    +    byte[] result = new byte[DEFAULT_CHUNK_SIZE];
    +    EasyMock.expect(
    +        storageRpcMock.read(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE))
    +        .andReturn(StorageRpc.Tuple.of("etag", result));
    +    EasyMock.replay(storageRpcMock);
    +    initializeService();
    +    ReadChannel channel = storage.reader(BLOB_INFO1.blobId(),
    +        BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION);
    +    assertNotNull(channel);
    +    assertTrue(channel.isOpen());
    +    channel.read(ByteBuffer.allocate(42));
    +  }
    +
       @Test
       public void testWriter() {
         BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder();
    @@ -783,8 +1102,8 @@ public void testWriter() {
         EasyMock.expect(storageRpcMock.open(infoWithoutHashes.toPb(), EMPTY_RPC_OPTIONS))
             .andReturn("upload-id");
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobWriteChannel channel = storage.writer(infoWithHashes);
    +    initializeService();
    +    WriteChannel channel = storage.writer(infoWithHashes);
         assertNotNull(channel);
         assertTrue(channel.isOpen());
       }
    @@ -795,8 +1114,8 @@ public void testWriterWithOptions() {
         EasyMock.expect(storageRpcMock.open(info.toPb(), BLOB_TARGET_OPTIONS_CREATE))
             .andReturn("upload-id");
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    BlobWriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST,
    +    initializeService();
    +    WriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST,
             BLOB_WRITE_PREDEFINED_ACL, BLOB_WRITE_CRC2C, BLOB_WRITE_MD5_HASH);
         assertNotNull(channel);
         assertTrue(channel.isOpen());
    @@ -881,12 +1200,11 @@ public Tuple apply(StorageObject f) {
         StorageRpc.BatchResponse res =
             new StorageRpc.BatchResponse(deleteResult, updateResult, getResult);
     
    -
         Capture capturedBatchRequest = Capture.newInstance();
         EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    List resultBlobs = storage.get(blobId1, blobId2);
    +    initializeService();
    +    List resultBlobs = storage.get(blobId1, blobId2);
     
         // Verify captured StorageRpc.BatchRequest
         List>> capturedToGet =
    @@ -924,12 +1242,11 @@ public Tuple apply(StorageObject f) {
         StorageRpc.BatchResponse res =
             new StorageRpc.BatchResponse(deleteResult, updateResult, getResult);
     
    -
         Capture capturedBatchRequest = Capture.newInstance();
         EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    -    List resultBlobs = storage.update(blobInfo1, blobInfo2);
    +    initializeService();
    +    List resultBlobs = storage.update(blobInfo1, blobInfo2);
     
         // Verify captured StorageRpc.BatchRequest
         List>> capturedToUpdate =
    @@ -970,7 +1287,7 @@ public Tuple apply(StorageObject f) {
         Capture capturedBatchRequest = Capture.newInstance();
         EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res);
         EasyMock.replay(storageRpcMock);
    -    storage = options.service();
    +    initializeService();
         List deleteResults = storage.delete(blobInfo1.blobId(), blobInfo2.blobId());
     
         // Verify captured StorageRpc.BatchRequest
    @@ -993,12 +1310,13 @@ public Tuple apply(StorageObject f) {
       public void testRetryableException() {
         BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1);
         EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS))
    -        .andThrow(new StorageException(500, "InternalError", true))
    +        .andThrow(new StorageException(500, "internalError"))
             .andReturn(BLOB_INFO1.toPb());
         EasyMock.replay(storageRpcMock);
    -    storage = options.toBuilder().retryParams(RetryParams.getDefaultInstance()).build().service();
    -    BlobInfo readBlob = storage.get(blob);
    -    assertEquals(BLOB_INFO1, readBlob);
    +    storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service();
    +    initializeServiceDependentObjects();
    +    Blob readBlob = storage.get(blob);
    +    assertEquals(expectedBlob1, readBlob);
       }
     
       @Test
    @@ -1006,9 +1324,10 @@ public void testNonRetryableException() {
         BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1);
         String exceptionMessage = "Not Implemented";
         EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS))
    -        .andThrow(new StorageException(501, exceptionMessage, false));
    +        .andThrow(new StorageException(501, exceptionMessage));
         EasyMock.replay(storageRpcMock);
    -    storage = options.toBuilder().retryParams(RetryParams.getDefaultInstance()).build().service();
    +    storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service();
    +    initializeServiceDependentObjects();
         thrown.expect(StorageException.class);
         thrown.expectMessage(exceptionMessage);
         storage.get(blob);
    @@ -1021,7 +1340,7 @@ public void testRuntimeException() {
         EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS))
             .andThrow(new RuntimeException(exceptionMessage));
         EasyMock.replay(storageRpcMock);
    -    storage = options.toBuilder().retryParams(RetryParams.getDefaultInstance()).build().service();
    +    storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service();
         thrown.expect(StorageException.class);
         thrown.expectMessage(exceptionMessage);
         storage.get(blob);
    diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java
    new file mode 100644
    index 000000000000..563a621c48fb
    --- /dev/null
    +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/it/ITStorageTest.java
    @@ -0,0 +1,1152 @@
    +/*
    + * Copyright 2015 Google Inc. All Rights Reserved.
    + *
    + * Licensed under the Apache License, Version 2.0 (the "License");
    + * you may not use this file except in compliance with the License.
    + * You may obtain a copy of the License at
    + *
    + *       http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package com.google.gcloud.storage.it;
    +
    +import static java.nio.charset.StandardCharsets.UTF_8;
    +import static org.junit.Assert.assertArrayEquals;
    +import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertFalse;
    +import static org.junit.Assert.assertNotNull;
    +import static org.junit.Assert.assertNull;
    +import static org.junit.Assert.assertTrue;
    +import static org.junit.Assert.fail;
    +
    +import com.google.common.collect.ImmutableList;
    +import com.google.common.collect.ImmutableMap;
    +import com.google.common.collect.ImmutableSet;
    +import com.google.common.collect.Iterators;
    +import com.google.common.collect.Lists;
    +import com.google.gcloud.Page;
    +import com.google.gcloud.ReadChannel;
    +import com.google.gcloud.RestorableState;
    +import com.google.gcloud.WriteChannel;
    +import com.google.gcloud.storage.BatchRequest;
    +import com.google.gcloud.storage.BatchResponse;
    +import com.google.gcloud.storage.Blob;
    +import com.google.gcloud.storage.BlobId;
    +import com.google.gcloud.storage.BlobInfo;
    +import com.google.gcloud.storage.Bucket;
    +import com.google.gcloud.storage.BucketInfo;
    +import com.google.gcloud.storage.CopyWriter;
    +import com.google.gcloud.storage.HttpMethod;
    +import com.google.gcloud.storage.Storage;
    +import com.google.gcloud.storage.Storage.BlobField;
    +import com.google.gcloud.storage.Storage.BucketField;
    +import com.google.gcloud.storage.StorageException;
    +import com.google.gcloud.storage.testing.RemoteGcsHelper;
    +
    +import org.junit.AfterClass;
    +import org.junit.BeforeClass;
    +import org.junit.Test;
    +
    +import java.io.ByteArrayInputStream;
    +import java.io.ByteArrayOutputStream;
    +import java.io.IOException;
    +import java.io.InputStream;
    +import java.net.URL;
    +import java.net.URLConnection;
    +import java.nio.ByteBuffer;
    +import java.util.Arrays;
    +import java.util.HashMap;
    +import java.util.Iterator;
    +import java.util.List;
    +import java.util.Map;
    +import java.util.Random;
    +import java.util.Set;
    +import java.util.concurrent.ExecutionException;
    +import java.util.concurrent.TimeUnit;
    +import java.util.logging.Level;
    +import java.util.logging.Logger;
    +
    +public class ITStorageTest {
    +
    +  private static Storage storage;
    +
    +  private static final Logger log = Logger.getLogger(ITStorageTest.class.getName());
    +  private static final String BUCKET = RemoteGcsHelper.generateBucketName();
    +  private static final String CONTENT_TYPE = "text/plain";
    +  private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD};
    +  private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!";
    +  private static final int MAX_BATCH_DELETES = 100;
    +
    +  @BeforeClass
    +  public static void beforeClass() {
    +    RemoteGcsHelper gcsHelper = RemoteGcsHelper.create();
    +    storage = gcsHelper.options().service();
    +    storage.create(BucketInfo.of(BUCKET));
    +  }
    +
    +  @AfterClass
    +  public static void afterClass() throws ExecutionException, InterruptedException {
    +    if (storage != null) {
    +      boolean wasDeleted = RemoteGcsHelper.forceDelete(storage, BUCKET, 5, TimeUnit.SECONDS);
    +      if (!wasDeleted && log.isLoggable(Level.WARNING)) {
    +        log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET);
    +      }
    +    }
    +  }
    +
    +  @Test(timeout = 5000)
    +  public void testListBuckets() throws InterruptedException {
    +    Iterator bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET),
    +        Storage.BucketListOption.fields()).iterateAll();
    +    while (!bucketIterator.hasNext()) {
    +      Thread.sleep(500);
    +      bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET),
    +          Storage.BucketListOption.fields()).iterateAll();
    +    }
    +    while (bucketIterator.hasNext()) {
    +      Bucket remoteBucket = bucketIterator.next();
    +      assertTrue(remoteBucket.name().startsWith(BUCKET));
    +      assertNull(remoteBucket.createTime());
    +      assertNull(remoteBucket.selfLink());
    +    }
    +  }
    +
    +  @Test
    +  public void testGetBucketSelectedFields() {
    +    Bucket remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields(BucketField.ID));
    +    assertEquals(BUCKET, remoteBucket.name());
    +    assertNull(remoteBucket.createTime());
    +    assertNotNull(remoteBucket.id());
    +  }
    +
    +  @Test
    +  public void testGetBucketAllSelectedFields() {
    +    Bucket remoteBucket = storage.get(BUCKET,
    +        Storage.BucketGetOption.fields(BucketField.values()));
    +    assertEquals(BUCKET, remoteBucket.name());
    +    assertNotNull(remoteBucket.createTime());
    +    assertNotNull(remoteBucket.selfLink());
    +  }
    +
    +  @Test
    +  public void testGetBucketEmptyFields() {
    +    Bucket remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields());
    +    assertEquals(BUCKET, remoteBucket.name());
    +    assertNull(remoteBucket.createTime());
    +    assertNull(remoteBucket.selfLink());
    +  }
    +
    +  @Test
    +  public void testCreateBlob() {
    +    String blobName = "test-create-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteBlob);
    +    assertEquals(blob.bucket(), remoteBlob.bucket());
    +    assertEquals(blob.name(), remoteBlob.name());
    +    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    +    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes);
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testCreateEmptyBlob() {
    +    String blobName = "test-create-empty-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    assertEquals(blob.bucket(), remoteBlob.bucket());
    +    assertEquals(blob.name(), remoteBlob.name());
    +    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    +    assertArrayEquals(new byte[0], readBytes);
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testCreateBlobStream() {
    +    String blobName = "test-create-blob-stream";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build();
    +    ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
    +    Blob remoteBlob = storage.create(blob, stream);
    +    assertNotNull(remoteBlob);
    +    assertEquals(blob.bucket(), remoteBlob.bucket());
    +    assertEquals(blob.name(), remoteBlob.name());
    +    assertEquals(blob.contentType(), remoteBlob.contentType());
    +    byte[] readBytes = storage.readAllBytes(BUCKET, blobName);
    +    assertEquals(BLOB_STRING_CONTENT, new String(readBytes, UTF_8));
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testCreateBlobFail() {
    +    String blobName = "test-create-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L).build();
    +    try {
    +      storage.create(wrongGenerationBlob, BLOB_BYTE_CONTENT,
    +          Storage.BlobTargetOption.generationMatch());
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testCreateBlobMd5Fail() {
    +    String blobName = "test-create-blob-md5-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .md5("O1R4G1HJSDUISJjoIYmVhQ==")
    +        .build();
    +    ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8));
    +    try {
    +      storage.create(blob, stream, Storage.BlobWriteOption.md5Match());
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +  }
    +
    +  @Test
    +  public void testGetBlobEmptySelectedFields() {
    +    String blobName = "test-get-empty-selected-fields-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build();
    +    assertNotNull(storage.create(blob));
    +    Blob remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields());
    +    assertEquals(blob.blobId(), remoteBlob.blobId());
    +    assertNull(remoteBlob.contentType());
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobSelectedFields() {
    +    String blobName = "test-get-selected-fields-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(ImmutableMap.of("k", "v"))
    +        .build();
    +    assertNotNull(storage.create(blob));
    +    Blob remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields(
    +        BlobField.METADATA));
    +    assertEquals(blob.blobId(), remoteBlob.blobId());
    +    assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata());
    +    assertNull(remoteBlob.contentType());
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobAllSelectedFields() {
    +    String blobName = "test-get-all-selected-fields-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(ImmutableMap.of("k", "v"))
    +        .build();
    +    assertNotNull(storage.create(blob));
    +    Blob remoteBlob = storage.get(blob.blobId(),
    +        Storage.BlobGetOption.fields(BlobField.values()));
    +    assertEquals(blob.bucket(), remoteBlob.bucket());
    +    assertEquals(blob.name(), remoteBlob.name());
    +    assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata());
    +    assertNotNull(remoteBlob.id());
    +    assertNotNull(remoteBlob.selfLink());
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobFail() {
    +    String blobName = "test-get-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName);
    +    try {
    +      storage.get(wrongGenerationBlob, Storage.BlobGetOption.generationMatch(-1));
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobFailNonExistingGeneration() {
    +    String blobName = "test-get-blob-fail-non-existing-generation";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName, -1L);
    +    assertNull(storage.get(wrongGenerationBlob));
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test(timeout = 5000)
    +  public void testListBlobsSelectedFields() throws InterruptedException {
    +    String[] blobNames = {"test-list-blobs-selected-fields-blob1",
    +        "test-list-blobs-selected-fields-blob2"};
    +    ImmutableMap metadata = ImmutableMap.of("k", "v");
    +    BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0])
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1])
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Blob remoteBlob1 = storage.create(blob1);
    +    Blob remoteBlob2 = storage.create(blob2);
    +    assertNotNull(remoteBlob1);
    +    assertNotNull(remoteBlob2);
    +    Page page = storage.list(BUCKET,
    +        Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"),
    +        Storage.BlobListOption.fields(BlobField.METADATA));
    +    // Listing blobs is eventually consistent, we loop until the list is of the expected size. The
    +    // test fails if timeout is reached.
    +    while (Iterators.size(page.iterateAll()) != 2) {
    +      Thread.sleep(500);
    +      page = storage.list(BUCKET,
    +          Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"),
    +          Storage.BlobListOption.fields(BlobField.METADATA));
    +    }
    +    Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]);
    +    Iterator iterator = page.iterateAll();
    +    while (iterator.hasNext()) {
    +      Blob remoteBlob = iterator.next();
    +      assertEquals(BUCKET, remoteBlob.bucket());
    +      assertTrue(blobSet.contains(remoteBlob.name()));
    +      assertEquals(metadata, remoteBlob.metadata());
    +      assertNull(remoteBlob.contentType());
    +    }
    +    assertTrue(remoteBlob1.delete());
    +    assertTrue(remoteBlob2.delete());
    +  }
    +
    +  @Test(timeout = 5000)
    +  public void testListBlobsEmptySelectedFields() throws InterruptedException {
    +    String[] blobNames = {"test-list-blobs-empty-selected-fields-blob1",
    +        "test-list-blobs-empty-selected-fields-blob2"};
    +    BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0])
    +        .contentType(CONTENT_TYPE)
    +        .build();
    +    BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1])
    +        .contentType(CONTENT_TYPE)
    +        .build();
    +    Blob remoteBlob1 = storage.create(blob1);
    +    Blob remoteBlob2 = storage.create(blob2);
    +    assertNotNull(remoteBlob1);
    +    assertNotNull(remoteBlob2);
    +    Page page = storage.list(BUCKET,
    +        Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"),
    +        Storage.BlobListOption.fields());
    +    // Listing blobs is eventually consistent, we loop until the list is of the expected size. The
    +    // test fails if timeout is reached.
    +    while (Iterators.size(page.iterateAll()) != 2) {
    +      Thread.sleep(500);
    +      page = storage.list(BUCKET,
    +          Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"),
    +          Storage.BlobListOption.fields());
    +    }
    +    Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]);
    +    Iterator iterator = page.iterateAll();
    +    while (iterator.hasNext()) {
    +      Blob remoteBlob = iterator.next();
    +      assertEquals(BUCKET, remoteBlob.bucket());
    +      assertTrue(blobSet.contains(remoteBlob.name()));
    +      assertNull(remoteBlob.contentType());
    +    }
    +    assertTrue(remoteBlob1.delete());
    +    assertTrue(remoteBlob2.delete());
    +  }
    +
    +  @Test(timeout = 15000)
    +  public void testListBlobsVersioned() throws ExecutionException, InterruptedException {
    +    String bucketName = RemoteGcsHelper.generateBucketName();
    +    Bucket bucket = storage.create(BucketInfo.builder(bucketName).versioningEnabled(true).build());
    +    try {
    +      String[] blobNames = {"test-list-blobs-versioned-blob1", "test-list-blobs-versioned-blob2"};
    +      BlobInfo blob1 = BlobInfo.builder(bucket, blobNames[0])
    +          .contentType(CONTENT_TYPE)
    +          .build();
    +      BlobInfo blob2 = BlobInfo.builder(bucket, blobNames[1])
    +          .contentType(CONTENT_TYPE)
    +          .build();
    +      Blob remoteBlob1 = storage.create(blob1);
    +      Blob remoteBlob2 = storage.create(blob2);
    +      Blob remoteBlob3 = storage.create(blob2);
    +      assertNotNull(remoteBlob1);
    +      assertNotNull(remoteBlob2);
    +      assertNotNull(remoteBlob3);
    +      Page page = storage.list(bucketName,
    +          Storage.BlobListOption.prefix("test-list-blobs-versioned-blob"),
    +          Storage.BlobListOption.versions(true));
    +      // Listing blobs is eventually consistent, we loop until the list is of the expected size. The
    +      // test fails if timeout is reached.
    +      while (Iterators.size(page.iterateAll()) != 3) {
    +        Thread.sleep(500);
    +        page = storage.list(bucketName,
    +            Storage.BlobListOption.prefix("test-list-blobs-versioned-blob"),
    +            Storage.BlobListOption.versions(true));
    +      }
    +      Set blobSet = ImmutableSet.of(blobNames[0], blobNames[1]);
    +      Iterator iterator = page.iterateAll();
    +      while (iterator.hasNext()) {
    +        Blob remoteBlob = iterator.next();
    +        assertEquals(bucketName, remoteBlob.bucket());
    +        assertTrue(blobSet.contains(remoteBlob.name()));
    +        assertNotNull(remoteBlob.generation());
    +      }
    +      assertTrue(remoteBlob1.delete());
    +      assertTrue(remoteBlob2.delete());
    +      assertTrue(remoteBlob3.delete());
    +    } finally {
    +      RemoteGcsHelper.forceDelete(storage, bucketName, 5, TimeUnit.SECONDS);
    +    }
    +  }
    +
    +  @Test(timeout = 5000)
    +  public void testListBlobsCurrentDirectory() throws InterruptedException {
    +    String directoryName = "test-list-blobs-current-directory/";
    +    String subdirectoryName = "subdirectory/";
    +    String[] blobNames = {directoryName + subdirectoryName + "blob1",
    +        directoryName + "blob2"};
    +    BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0])
    +        .contentType(CONTENT_TYPE)
    +        .build();
    +    BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1])
    +        .contentType(CONTENT_TYPE)
    +        .build();
    +    Blob remoteBlob1 = storage.create(blob1, BLOB_BYTE_CONTENT);
    +    Blob remoteBlob2 = storage.create(blob2, BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteBlob1);
    +    assertNotNull(remoteBlob2);
    +    Page page = storage.list(BUCKET,
    +        Storage.BlobListOption.prefix("test-list-blobs-current-directory/"),
    +        Storage.BlobListOption.currentDirectory());
    +    // Listing blobs is eventually consistent, we loop until the list is of the expected size. The
    +    // test fails if timeout is reached.
    +    while (Iterators.size(page.iterateAll()) != 2) {
    +      Thread.sleep(500);
    +      page = storage.list(BUCKET,
    +          Storage.BlobListOption.prefix("test-list-blobs-current-directory/"),
    +          Storage.BlobListOption.currentDirectory());
    +    }
    +    Iterator iterator = page.iterateAll();
    +    while (iterator.hasNext()) {
    +      Blob remoteBlob = iterator.next();
    +      assertEquals(BUCKET, remoteBlob.bucket());
    +      if (remoteBlob.name().equals(blobNames[1])) {
    +        assertEquals(CONTENT_TYPE, remoteBlob.contentType());
    +        assertEquals(BLOB_BYTE_CONTENT.length, (long) remoteBlob.size());
    +        assertFalse(remoteBlob.isDirectory());
    +      } else if (remoteBlob.name().equals(directoryName + subdirectoryName)) {
    +        assertEquals(0L, (long) remoteBlob.size());
    +        assertTrue(remoteBlob.isDirectory());
    +      } else {
    +        fail("Unexpected blob with name " + remoteBlob.name());
    +      }
    +    }
    +    assertTrue(remoteBlob1.delete());
    +    assertTrue(remoteBlob2.delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlob() {
    +    String blobName = "test-update-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    Blob updatedBlob = remoteBlob.toBuilder().contentType(CONTENT_TYPE).build().update();
    +    assertNotNull(updatedBlob);
    +    assertEquals(blob.name(), updatedBlob.name());
    +    assertEquals(blob.bucket(), updatedBlob.bucket());
    +    assertEquals(CONTENT_TYPE, updatedBlob.contentType());
    +    assertTrue(updatedBlob.delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlobReplaceMetadata() {
    +    String blobName = "test-update-blob-replace-metadata";
    +    ImmutableMap metadata = ImmutableMap.of("k1", "a");
    +    ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    Blob updatedBlob = remoteBlob.toBuilder().metadata(null).build().update();
    +    assertNotNull(updatedBlob);
    +    assertNull(updatedBlob.metadata());
    +    updatedBlob = remoteBlob.toBuilder().metadata(newMetadata).build().update();
    +    assertEquals(blob.name(), updatedBlob.name());
    +    assertEquals(blob.bucket(), updatedBlob.bucket());
    +    assertEquals(newMetadata, updatedBlob.metadata());
    +    assertTrue(updatedBlob.delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlobMergeMetadata() {
    +    String blobName = "test-update-blob-merge-metadata";
    +    ImmutableMap metadata = ImmutableMap.of("k1", "a");
    +    ImmutableMap newMetadata = ImmutableMap.of("k2", "b");
    +    ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a", "k2", "b");
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    Blob updatedBlob = remoteBlob.toBuilder().metadata(newMetadata).build().update();
    +    assertNotNull(updatedBlob);
    +    assertEquals(blob.name(), updatedBlob.name());
    +    assertEquals(blob.bucket(), updatedBlob.bucket());
    +    assertEquals(expectedMetadata, updatedBlob.metadata());
    +    assertTrue(updatedBlob.delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlobUnsetMetadata() {
    +    String blobName = "test-update-blob-unset-metadata";
    +    ImmutableMap metadata = ImmutableMap.of("k1", "a", "k2", "b");
    +    Map newMetadata = new HashMap<>();
    +    newMetadata.put("k1", "a");
    +    newMetadata.put("k2", null);
    +    ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a");
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    Blob updatedBlob = remoteBlob.toBuilder().metadata(newMetadata).build().update();
    +    assertNotNull(updatedBlob);
    +    assertEquals(blob.name(), updatedBlob.name());
    +    assertEquals(blob.bucket(), updatedBlob.bucket());
    +    assertEquals(expectedMetadata, updatedBlob.metadata());
    +    assertTrue(updatedBlob.delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlobFail() {
    +    String blobName = "test-update-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L)
    +        .contentType(CONTENT_TYPE)
    +        .build();
    +    try {
    +      storage.update(wrongGenerationBlob, Storage.BlobTargetOption.generationMatch());
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testDeleteNonExistingBlob() {
    +    String blobName = "test-delete-non-existing-blob";
    +    assertFalse(storage.delete(BUCKET, blobName));
    +  }
    +
    +  @Test
    +  public void testDeleteBlobNonExistingGeneration() {
    +    String blobName = "test-delete-blob-non-existing-generation";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    assertNotNull(storage.create(blob));
    +    assertFalse(storage.delete(BlobId.of(BUCKET, blobName, -1L)));
    +  }
    +
    +  @Test
    +  public void testDeleteBlobFail() {
    +    String blobName = "test-delete-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    try {
    +      storage.delete(BUCKET, blob.name(), Storage.BlobSourceOption.generationMatch(-1L));
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testComposeBlob() {
    +    String sourceBlobName1 = "test-compose-blob-source-1";
    +    String sourceBlobName2 = "test-compose-blob-source-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    Blob remoteSourceBlob1 = storage.create(sourceBlob1, BLOB_BYTE_CONTENT);
    +    Blob remoteSourceBlob2 = storage.create(sourceBlob2, BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteSourceBlob1);
    +    assertNotNull(remoteSourceBlob2);
    +    String targetBlobName = "test-compose-blob-target";
    +    BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build();
    +    Storage.ComposeRequest req =
    +        Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob);
    +    Blob remoteTargetBlob = storage.compose(req);
    +    assertNotNull(remoteTargetBlob);
    +    assertEquals(targetBlob.name(), remoteTargetBlob.name());
    +    assertEquals(targetBlob.bucket(), remoteTargetBlob.bucket());
    +    byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName);
    +    byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2);
    +    System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length,
    +        BLOB_BYTE_CONTENT.length);
    +    assertArrayEquals(composedBytes, readBytes);
    +    assertTrue(remoteSourceBlob1.delete());
    +    assertTrue(remoteSourceBlob2.delete());
    +    assertTrue(remoteTargetBlob.delete());
    +  }
    +
    +  @Test
    +  public void testComposeBlobFail() {
    +    String sourceBlobName1 = "test-compose-blob-fail-source-1";
    +    String sourceBlobName2 = "test-compose-blob-fail-source-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    Blob remoteSourceBlob1 = storage.create(sourceBlob1);
    +    Blob remoteSourceBlob2 = storage.create(sourceBlob2);
    +    assertNotNull(remoteSourceBlob1);
    +    assertNotNull(remoteSourceBlob2);
    +    String targetBlobName = "test-compose-blob-fail-target";
    +    BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build();
    +    Storage.ComposeRequest req = Storage.ComposeRequest.builder()
    +        .addSource(sourceBlobName1, -1L)
    +        .addSource(sourceBlobName2, -1L)
    +        .target(targetBlob)
    +        .build();
    +    try {
    +      storage.compose(req);
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteSourceBlob1.delete());
    +    assertTrue(remoteSourceBlob2.delete());
    +  }
    +
    +  @Test
    +  public void testCopyBlob() {
    +    String sourceBlobName = "test-copy-blob-source";
    +    BlobId source = BlobId.of(BUCKET, sourceBlobName);
    +    ImmutableMap metadata = ImmutableMap.of("k", "v");
    +    BlobInfo blob = BlobInfo.builder(source)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteBlob);
    +    String targetBlobName = "test-copy-blob-target";
    +    Storage.CopyRequest req = Storage.CopyRequest.of(source, BlobId.of(BUCKET, targetBlobName));
    +    CopyWriter copyWriter = storage.copy(req);
    +    assertEquals(BUCKET, copyWriter.result().bucket());
    +    assertEquals(targetBlobName, copyWriter.result().name());
    +    assertEquals(CONTENT_TYPE, copyWriter.result().contentType());
    +    assertEquals(metadata, copyWriter.result().metadata());
    +    assertTrue(copyWriter.isDone());
    +    assertTrue(remoteBlob.delete());
    +    assertTrue(storage.delete(BUCKET, targetBlobName));
    +  }
    +
    +  @Test
    +  public void testCopyBlobUpdateMetadata() {
    +    String sourceBlobName = "test-copy-blob-update-metadata-source";
    +    BlobId source = BlobId.of(BUCKET, sourceBlobName);
    +    Blob remoteSourceBlob = storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteSourceBlob);
    +    String targetBlobName = "test-copy-blob-update-metadata-target";
    +    ImmutableMap metadata = ImmutableMap.of("k", "v");
    +    BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName)
    +        .contentType(CONTENT_TYPE)
    +        .metadata(metadata)
    +        .build();
    +    Storage.CopyRequest req = Storage.CopyRequest.of(source, target);
    +    CopyWriter copyWriter = storage.copy(req);
    +    assertEquals(BUCKET, copyWriter.result().bucket());
    +    assertEquals(targetBlobName, copyWriter.result().name());
    +    assertEquals(CONTENT_TYPE, copyWriter.result().contentType());
    +    assertEquals(metadata, copyWriter.result().metadata());
    +    assertTrue(copyWriter.isDone());
    +    assertTrue(remoteSourceBlob.delete());
    +    assertTrue(storage.delete(BUCKET, targetBlobName));
    +  }
    +
    +  @Test
    +  public void testCopyBlobFail() {
    +    String sourceBlobName = "test-copy-blob-source-fail";
    +    BlobId source = BlobId.of(BUCKET, sourceBlobName, -1L);
    +    Blob remoteSourceBlob = storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteSourceBlob);
    +    String targetBlobName = "test-copy-blob-target-fail";
    +    BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build();
    +    Storage.CopyRequest req = Storage.CopyRequest.builder()
    +        .source(BUCKET, sourceBlobName)
    +        .sourceOptions(Storage.BlobSourceOption.generationMatch(-1L))
    +        .target(target)
    +        .build();
    +    try {
    +      storage.copy(req);
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    Storage.CopyRequest req2 = Storage.CopyRequest.builder()
    +        .source(source)
    +        .sourceOptions(Storage.BlobSourceOption.generationMatch())
    +        .target(target)
    +        .build();
    +    try {
    +      storage.copy(req2);
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteSourceBlob.delete());
    +  }
    +
    +  @Test
    +  public void testBatchRequest() {
    +    String sourceBlobName1 = "test-batch-request-blob-1";
    +    String sourceBlobName2 = "test-batch-request-blob-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    assertNotNull(storage.create(sourceBlob2));
    +
    +    // Batch update request
    +    BlobInfo updatedBlob1 = sourceBlob1.toBuilder().contentType(CONTENT_TYPE).build();
    +    BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build();
    +    BatchRequest updateRequest = BatchRequest.builder()
    +        .update(updatedBlob1)
    +        .update(updatedBlob2)
    +        .build();
    +    BatchResponse updateResponse = storage.submit(updateRequest);
    +    assertEquals(2, updateResponse.updates().size());
    +    assertEquals(0, updateResponse.deletes().size());
    +    assertEquals(0, updateResponse.gets().size());
    +    BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get();
    +    BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get();
    +    assertEquals(sourceBlob1.bucket(), remoteUpdatedBlob1.bucket());
    +    assertEquals(sourceBlob1.name(), remoteUpdatedBlob1.name());
    +    assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket());
    +    assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name());
    +    assertEquals(updatedBlob1.contentType(), remoteUpdatedBlob1.contentType());
    +    assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType());
    +
    +    // Batch get request
    +    BatchRequest getRequest = BatchRequest.builder()
    +        .get(BUCKET, sourceBlobName1)
    +        .get(BUCKET, sourceBlobName2)
    +        .build();
    +    BatchResponse getResponse = storage.submit(getRequest);
    +    assertEquals(2, getResponse.gets().size());
    +    assertEquals(0, getResponse.deletes().size());
    +    assertEquals(0, getResponse.updates().size());
    +    BlobInfo remoteBlob1 = getResponse.gets().get(0).get();
    +    BlobInfo remoteBlob2 = getResponse.gets().get(1).get();
    +    assertEquals(remoteUpdatedBlob1, remoteBlob1);
    +    assertEquals(remoteUpdatedBlob2, remoteBlob2);
    +
    +    // Batch delete request
    +    BatchRequest deleteRequest = BatchRequest.builder()
    +        .delete(BUCKET, sourceBlobName1)
    +        .delete(BUCKET, sourceBlobName2)
    +        .build();
    +    BatchResponse deleteResponse = storage.submit(deleteRequest);
    +    assertEquals(2, deleteResponse.deletes().size());
    +    assertEquals(0, deleteResponse.gets().size());
    +    assertEquals(0, deleteResponse.updates().size());
    +    assertTrue(deleteResponse.deletes().get(0).get());
    +    assertTrue(deleteResponse.deletes().get(1).get());
    +  }
    +
    +  @Test
    +  public void testBatchRequestManyDeletes() {
    +    List blobsToDelete = Lists.newArrayListWithCapacity(2 * MAX_BATCH_DELETES);
    +    for (int i = 0; i < 2 * MAX_BATCH_DELETES; i++) {
    +      blobsToDelete.add(BlobId.of(BUCKET, "test-batch-request-many-deletes-blob-" + i));
    +    }
    +    BatchRequest.Builder builder = BatchRequest.builder();
    +    for (BlobId blob : blobsToDelete) {
    +      builder.delete(blob);
    +    }
    +    String sourceBlobName1 = "test-batch-request-many-deletes-source-blob-1";
    +    String sourceBlobName2 = "test-batch-request-many-deletes-source-blob-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    assertNotNull(storage.create(sourceBlob2));
    +    BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build();
    +
    +    BatchRequest updateRequest = builder
    +        .get(BUCKET, sourceBlobName1)
    +        .update(updatedBlob2)
    +        .build();
    +    BatchResponse response = storage.submit(updateRequest);
    +    assertEquals(2 * MAX_BATCH_DELETES, response.deletes().size());
    +    assertEquals(1, response.updates().size());
    +    assertEquals(1, response.gets().size());
    +
    +    // Check deletes
    +    for (BatchResponse.Result deleteResult : response.deletes()) {
    +      assertFalse(deleteResult.failed());
    +      assertFalse(deleteResult.get());
    +    }
    +
    +    // Check updates
    +    Blob remoteUpdatedBlob2 = response.updates().get(0).get();
    +    assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket());
    +    assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name());
    +    assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType());
    +
    +    // Check gets
    +    Blob remoteBlob1 = response.gets().get(0).get();
    +    assertEquals(sourceBlob1.bucket(), remoteBlob1.bucket());
    +    assertEquals(sourceBlob1.name(), remoteBlob1.name());
    +
    +    assertTrue(remoteBlob1.delete());
    +    assertTrue(remoteUpdatedBlob2.delete());
    +  }
    +
    +  @Test
    +  public void testBatchRequestFail() {
    +    String blobName = "test-batch-request-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    BlobInfo updatedBlob = BlobInfo.builder(BUCKET, blobName, -1L).build();
    +    BatchRequest batchRequest = BatchRequest.builder()
    +        .update(updatedBlob, Storage.BlobTargetOption.generationMatch())
    +        .delete(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L))
    +        .delete(BlobId.of(BUCKET, blobName, -1L))
    +        .get(BUCKET, blobName, Storage.BlobGetOption.generationMatch(-1L))
    +        .get(BlobId.of(BUCKET, blobName, -1L))
    +        .build();
    +    BatchResponse batchResponse = storage.submit(batchRequest);
    +    assertEquals(1, batchResponse.updates().size());
    +    assertEquals(2, batchResponse.deletes().size());
    +    assertEquals(2, batchResponse.gets().size());
    +    assertTrue(batchResponse.updates().get(0).failed());
    +    assertTrue(batchResponse.gets().get(0).failed());
    +    assertFalse(batchResponse.gets().get(1).failed());
    +    assertNull(batchResponse.gets().get(1).get());
    +    assertTrue(batchResponse.deletes().get(0).failed());
    +    assertFalse(batchResponse.deletes().get(1).failed());
    +    assertFalse(batchResponse.deletes().get(1).get());
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testReadAndWriteChannels() throws IOException {
    +    String blobName = "test-read-and-write-channels-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    byte[] stringBytes;
    +    try (WriteChannel writer = storage.writer(blob)) {
    +      stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    +      writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
    +      writer.write(ByteBuffer.wrap(stringBytes));
    +    }
    +    ByteBuffer readBytes;
    +    ByteBuffer readStringBytes;
    +    try (ReadChannel reader = storage.reader(blob.blobId())) {
    +      readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length);
    +      readStringBytes = ByteBuffer.allocate(stringBytes.length);
    +      reader.read(readBytes);
    +      reader.read(readStringBytes);
    +    }
    +    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array());
    +    assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8));
    +    assertTrue(storage.delete(BUCKET, blobName));
    +  }
    +
    +  @Test
    +  public void testReadAndWriteChannelsWithDifferentFileSize() throws IOException {
    +    String blobNamePrefix = "test-read-and-write-channels-blob-";
    +    int[] blobSizes = {0, 700, 1024 * 256, 2 * 1024 * 1024, 4 * 1024 * 1024, 4 * 1024 * 1024 + 1};
    +    Random rnd = new Random();
    +    for (int blobSize : blobSizes) {
    +      String blobName = blobNamePrefix + blobSize;
    +      BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +      byte[] bytes = new byte[blobSize];
    +      rnd.nextBytes(bytes);
    +      try (WriteChannel writer = storage.writer(blob)) {
    +        writer.write(ByteBuffer.wrap(bytes));
    +      }
    +      ByteArrayOutputStream output = new ByteArrayOutputStream();
    +      try (ReadChannel reader = storage.reader(blob.blobId())) {
    +        ByteBuffer buffer = ByteBuffer.allocate(64 * 1024);
    +        while (reader.read(buffer) > 0) {
    +          buffer.flip();
    +          output.write(buffer.array(), 0, buffer.limit());
    +          buffer.clear();
    +        }
    +      }
    +      assertArrayEquals(bytes, output.toByteArray());
    +      assertTrue(storage.delete(BUCKET, blobName));
    +    }
    +  }
    +
    +  @Test
    +  public void testReadAndWriteCaptureChannels() throws IOException {
    +    String blobName = "test-read-and-write-capture-channels-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    byte[] stringBytes;
    +    WriteChannel writer = storage.writer(blob);
    +    stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    +    writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT));
    +    RestorableState writerState = writer.capture();
    +    WriteChannel secondWriter = writerState.restore();
    +    secondWriter.write(ByteBuffer.wrap(stringBytes));
    +    secondWriter.close();
    +    ByteBuffer readBytes;
    +    ByteBuffer readStringBytes;
    +    ReadChannel reader = storage.reader(blob.blobId());
    +    reader.chunkSize(BLOB_BYTE_CONTENT.length);
    +    readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length);
    +    reader.read(readBytes);
    +    RestorableState readerState = reader.capture();
    +    ReadChannel secondReader = readerState.restore();
    +    readStringBytes = ByteBuffer.allocate(stringBytes.length);
    +    secondReader.read(readStringBytes);
    +    reader.close();
    +    secondReader.close();
    +    assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array());
    +    assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8));
    +    assertTrue(storage.delete(BUCKET, blobName));
    +  }
    +
    +  @Test
    +  public void testReadChannelFail() throws IOException {
    +    String blobName = "test-read-channel-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob);
    +    assertNotNull(remoteBlob);
    +    try (ReadChannel reader =
    +        storage.reader(blob.blobId(), Storage.BlobSourceOption.metagenerationMatch(-1L))) {
    +      reader.read(ByteBuffer.allocate(42));
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    try (ReadChannel reader =
    +             storage.reader(blob.blobId(), Storage.BlobSourceOption.generationMatch(-1L))) {
    +      reader.read(ByteBuffer.allocate(42));
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    BlobId blobIdWrongGeneration = BlobId.of(BUCKET, blobName, -1L);
    +    try (ReadChannel reader =
    +             storage.reader(blobIdWrongGeneration, Storage.BlobSourceOption.generationMatch())) {
    +      reader.read(ByteBuffer.allocate(42));
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testReadChannelFailUpdatedGeneration() throws IOException {
    +    String blobName = "test-read-blob-fail-updated-generation";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Random random = new Random();
    +    int chunkSize = 1024;
    +    int blobSize = 2 * chunkSize;
    +    byte[] content = new byte[blobSize];
    +    random.nextBytes(content);
    +    Blob remoteBlob = storage.create(blob, content);
    +    assertNotNull(remoteBlob);
    +    assertEquals(blobSize, (long) remoteBlob.size());
    +    try (ReadChannel reader = storage.reader(blob.blobId())) {
    +      reader.chunkSize(chunkSize);
    +      ByteBuffer readBytes = ByteBuffer.allocate(chunkSize);
    +      int numReadBytes = reader.read(readBytes);
    +      assertEquals(chunkSize, numReadBytes);
    +      assertArrayEquals(Arrays.copyOf(content, chunkSize), readBytes.array());
    +      try (WriteChannel writer = storage.writer(blob)) {
    +        byte[] newContent = new byte[blobSize];
    +        random.nextBytes(newContent);
    +        int numWrittenBytes = writer.write(ByteBuffer.wrap(newContent));
    +        assertEquals(blobSize, numWrittenBytes);
    +      }
    +      readBytes = ByteBuffer.allocate(chunkSize);
    +      reader.read(readBytes);
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      StringBuilder messageBuilder = new StringBuilder();
    +      messageBuilder.append("Blob ").append(blob.blobId()).append(" was updated while reading");
    +      assertEquals(messageBuilder.toString(), ex.getMessage());
    +    }
    +    assertTrue(storage.delete(BUCKET, blobName));
    +  }
    +
    +  @Test
    +  public void testWriteChannelFail() throws IOException {
    +    String blobName = "test-write-channel-blob-fail";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName, -1L).build();
    +    try {
    +      try (WriteChannel writer = storage.writer(blob, Storage.BlobWriteOption.generationMatch())) {
    +        writer.write(ByteBuffer.allocate(42));
    +      }
    +      fail("StorageException was expected");
    +    } catch (StorageException ex) {
    +      // expected
    +    }
    +  }
    +
    +  @Test
    +  public void testWriteChannelExistingBlob() throws IOException {
    +    String blobName = "test-write-channel-existing-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    storage.create(blob);
    +    byte[] stringBytes;
    +    try (WriteChannel writer = storage.writer(blob)) {
    +      stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8);
    +      writer.write(ByteBuffer.wrap(stringBytes));
    +    }
    +    assertArrayEquals(stringBytes, storage.readAllBytes(blob.blobId()));
    +    assertTrue(storage.delete(BUCKET, blobName));
    +  }
    +
    +  @Test
    +  public void testGetSignedUrl() throws IOException {
    +    String blobName = "test-get-signed-url-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    Blob remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT);
    +    assertNotNull(remoteBlob);
    +    URL url = storage.signUrl(blob, 1, TimeUnit.HOURS);
    +    URLConnection connection = url.openConnection();
    +    byte[] readBytes = new byte[BLOB_BYTE_CONTENT.length];
    +    try (InputStream responseStream = connection.getInputStream()) {
    +      assertEquals(BLOB_BYTE_CONTENT.length, responseStream.read(readBytes));
    +      assertArrayEquals(BLOB_BYTE_CONTENT, readBytes);
    +      assertTrue(remoteBlob.delete());
    +    }
    +  }
    +
    +  @Test
    +  public void testPostSignedUrl() throws IOException {
    +    String blobName = "test-post-signed-url-blob";
    +    BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build();
    +    assertNotNull(storage.create(blob));
    +    URL url =
    +        storage.signUrl(blob, 1, TimeUnit.HOURS, Storage.SignUrlOption.httpMethod(HttpMethod.POST));
    +    URLConnection connection = url.openConnection();
    +    connection.setDoOutput(true);
    +    connection.connect();
    +    Blob remoteBlob = storage.get(BUCKET, blobName);
    +    assertNotNull(remoteBlob);
    +    assertEquals(blob.bucket(), remoteBlob.bucket());
    +    assertEquals(blob.name(), remoteBlob.name());
    +    assertTrue(remoteBlob.delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobs() {
    +    String sourceBlobName1 = "test-get-blobs-1";
    +    String sourceBlobName2 = "test-get-blobs-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    assertNotNull(storage.create(sourceBlob2));
    +    List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId());
    +    assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket());
    +    assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name());
    +    assertEquals(sourceBlob2.bucket(), remoteBlobs.get(1).bucket());
    +    assertEquals(sourceBlob2.name(), remoteBlobs.get(1).name());
    +    assertTrue(remoteBlobs.get(0).delete());
    +    assertTrue(remoteBlobs.get(1).delete());
    +  }
    +
    +  @Test
    +  public void testGetBlobsFail() {
    +    String sourceBlobName1 = "test-get-blobs-fail-1";
    +    String sourceBlobName2 = "test-get-blobs-fail-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId());
    +    assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket());
    +    assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name());
    +    assertNull(remoteBlobs.get(1));
    +    assertTrue(remoteBlobs.get(0).delete());
    +  }
    +
    +  @Test
    +  public void testDeleteBlobs() {
    +    String sourceBlobName1 = "test-delete-blobs-1";
    +    String sourceBlobName2 = "test-delete-blobs-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    assertNotNull(storage.create(sourceBlob2));
    +    List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId());
    +    assertTrue(deleteStatus.get(0));
    +    assertTrue(deleteStatus.get(1));
    +  }
    +
    +  @Test
    +  public void testDeleteBlobsFail() {
    +    String sourceBlobName1 = "test-delete-blobs-fail-1";
    +    String sourceBlobName2 = "test-delete-blobs-fail-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    assertNotNull(storage.create(sourceBlob1));
    +    List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId());
    +    assertTrue(deleteStatus.get(0));
    +    assertFalse(deleteStatus.get(1));
    +  }
    +
    +  @Test
    +  public void testUpdateBlobs() {
    +    String sourceBlobName1 = "test-update-blobs-1";
    +    String sourceBlobName2 = "test-update-blobs-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    Blob remoteBlob1 = storage.create(sourceBlob1);
    +    Blob remoteBlob2 = storage.create(sourceBlob2);
    +    assertNotNull(remoteBlob1);
    +    assertNotNull(remoteBlob2);
    +    List updatedBlobs = storage.update(
    +        remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(),
    +        remoteBlob2.toBuilder().contentType(CONTENT_TYPE).build());
    +    assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket());
    +    assertEquals(sourceBlob1.name(), updatedBlobs.get(0).name());
    +    assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType());
    +    assertEquals(sourceBlob2.bucket(), updatedBlobs.get(1).bucket());
    +    assertEquals(sourceBlob2.name(), updatedBlobs.get(1).name());
    +    assertEquals(CONTENT_TYPE, updatedBlobs.get(1).contentType());
    +    assertTrue(updatedBlobs.get(0).delete());
    +    assertTrue(updatedBlobs.get(1).delete());
    +  }
    +
    +  @Test
    +  public void testUpdateBlobsFail() {
    +    String sourceBlobName1 = "test-update-blobs-fail-1";
    +    String sourceBlobName2 = "test-update-blobs-fail-2";
    +    BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build();
    +    BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build();
    +    BlobInfo remoteBlob1 = storage.create(sourceBlob1);
    +    assertNotNull(remoteBlob1);
    +    List updatedBlobs = storage.update(
    +        remoteBlob1.toBuilder().contentType(CONTENT_TYPE).build(),
    +        sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build());
    +    assertEquals(sourceBlob1.bucket(), updatedBlobs.get(0).bucket());
    +    assertEquals(sourceBlob1.name(), updatedBlobs.get(0).name());
    +    assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType());
    +    assertNull(updatedBlobs.get(1));
    +    assertTrue(updatedBlobs.get(0).delete());
    +  }
    +}
    diff --git a/gcloud-java/README.md b/gcloud-java/README.md
    index 7e2eee84a8c4..e296d0c0c565 100644
    --- a/gcloud-java/README.md
    +++ b/gcloud-java/README.md
    @@ -6,6 +6,8 @@ Java idiomatic client for [Google Cloud Platform][cloud-platform] services.
     [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java)
     [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master)
     [![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java.svg)
    +[![Codacy Badge](https://api.codacy.com/project/badge/grade/9da006ad7c3a4fe1abd142e77c003917)](https://www.codacy.com/app/mziccard/gcloud-java)
    +[![Dependency Status](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969/badge.svg?style=flat)](https://www.versioneye.com/user/projects/56bd8ee72a29ed002d2b0969)
     
     -  [Homepage] (https://googlecloudplatform.github.io/gcloud-java/)
     -  [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs)
    @@ -20,14 +22,27 @@ This client supports the following Google Cloud Platform services:
     
     Quickstart
     ----------
    -Add this to your pom.xml file
    +If you are using Maven, add this to your pom.xml file
     ```xml
     
       com.google.gcloud
       gcloud-java
    -  0.0.10
    +  0.1.5
     
     ```
    +If you are using Gradle, add this to your dependencies
    +```Groovy
    +compile 'com.google.gcloud:gcloud-java:0.1.5'
    +```
    +If you are using SBT, add this to your dependencies
    +```Scala
    +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.5"
    +```
    +
    +Troubleshooting
    +---------------
    +
    +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting).
     
     Java Versions
     -------------
    @@ -48,7 +63,9 @@ Contributing
     
     Contributions to this library are always welcome and highly encouraged.
     
    -See [CONTRIBUTING] for more information on how to get started.
    +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started.
    +
    +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information.
     
     License
     -------
    @@ -57,6 +74,7 @@ Apache 2.0 - See [LICENSE] for more information.
     
     
     [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md
    +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct
     [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE
     [cloud-platform]: https://cloud.google.com/
     [cloud-datastore]: https://cloud.google.com/datastore/docs
    diff --git a/gcloud-java/pom.xml b/gcloud-java/pom.xml
    index 6ad0fce8f1b1..19536faa8c8d 100644
    --- a/gcloud-java/pom.xml
    +++ b/gcloud-java/pom.xml
    @@ -1,7 +1,6 @@
     
     
       4.0.0
    -  com.google.gcloud
       gcloud-java
       jar
       GCloud Java
    @@ -11,9 +10,14 @@
       
         com.google.gcloud
         gcloud-java-pom
    -    0.0.11-SNAPSHOT
    +    0.1.6-SNAPSHOT
       
       
    +    
    +      ${project.groupId}
    +      gcloud-java-bigquery
    +      ${project.version}
    +    
         
           ${project.groupId}
           gcloud-java-core
    @@ -24,6 +28,11 @@
           gcloud-java-datastore
           ${project.version}
         
    +    
    +      ${project.groupId}
    +      gcloud-java-resourcemanager
    +      ${project.version}
    +    
         
           ${project.groupId}
           gcloud-java-storage
    diff --git a/pom.xml b/pom.xml
    index 44bdccc273a4..b9f48b580bfd 100644
    --- a/pom.xml
    +++ b/pom.xml
    @@ -4,13 +4,31 @@
       com.google.gcloud
       gcloud-java-pom
       pom
    -  0.0.11-SNAPSHOT
    +  0.1.6-SNAPSHOT
       GCloud Java
       https://github.com/GoogleCloudPlatform/gcloud-java
       
         Java idiomatic client for Google Cloud Platform services.
       
       
    +    
    +      derka
    +      Martin Derka
    +      derka@google.com
    +      Google
    +      
    +        Developer
    +      
    +    
    +    
    +      ajaykannan
    +      Ajay Kannan
    +      ajaykannan@google.com
    +      Google
    +      
    +        Developer
    +      
    +    
         
           ozarov
           Arie Ozarov
    @@ -20,6 +38,15 @@
             Developer
           
         
    +    
    +      mziccardi
    +      Marco Ziccardi
    +      mziccardi@google.com
    +      Google
    +      
    +        Developer
    +      
    +    
       
       
         Google
    @@ -66,13 +93,16 @@
         gcloud-java
       
       
    +    gcloud-java
    +    gcloud-java-bigquery
    +    gcloud-java-contrib
         gcloud-java-core
         gcloud-java-gax
         gcloud-java-datastore
    -    gcloud-java-storage
         gcloud-java-pubsub
    -    gcloud-java
         gcloud-java-examples
    +    gcloud-java-resourcemanager
    +    gcloud-java-storage
       
       
         
    @@ -105,7 +135,7 @@
           
             org.apache.maven.plugins
             maven-surefire-plugin
    -        2.18
    +        2.19.1
             
               
               
    @@ -116,7 +146,7 @@
           
             org.apache.maven.plugins
             maven-enforcer-plugin
    -        1.4
    +        1.4.1
             
               
                 enforce-maven
    @@ -131,7 +161,7 @@
                     
                       [1.7,)
                     
    -                  
    +              
                 
               
             
    @@ -158,7 +188,7 @@
           
             org.apache.maven.plugins
             maven-failsafe-plugin
    -        2.18.1
    +        2.19.1
             
               
                 
    @@ -190,7 +220,7 @@
           
           
             maven-compiler-plugin
    -        3.2
    +        3.5.1
             
               1.7
               1.7
    @@ -201,7 +231,7 @@
           
             org.apache.maven.plugins
             maven-source-plugin
    -        2.4
    +        3.0.0
             
               
                 attach-sources
    @@ -241,7 +271,7 @@
           
             org.sonatype.plugins
             nexus-staging-maven-plugin
    -        1.6.5
    +        1.6.6
             true
             
               sonatype-nexus-staging
    @@ -252,7 +282,7 @@
           
             org.eluder.coveralls
             coveralls-maven-plugin
    -        3.1.0
    +        4.1.0
             
               
                 ${basedir}/target/coverage.xml
    @@ -285,12 +315,12 @@
           
             org.apache.maven.plugins
             maven-checkstyle-plugin
    -        2.16
    +        2.17
             
               
                 com.puppycrawl.tools
                 checkstyle
    -            6.8.1
    +            6.15
               
             
           
    @@ -309,7 +339,7 @@
                 
                   org.apache.maven.plugins
                   maven-project-info-reports-plugin
    -              2.8
    +              2.8.1
                   
                     
                       
    @@ -361,17 +391,35 @@
                     protected
                     true
                     ${project.build.directory}/javadoc
    +                
    +                  
    +                    API packages
    +                    com.google.gcloud*
    +                  
    +                  
    +                    Test helpers packages
    +                    com.google.gcloud.bigquery.testing:com.google.gcloud.datastore.testing:com.google.gcloud.resourcemanager.testing:com.google.gcloud.storage.testing
    +                  
    +                  
    +                    Example packages
    +                    com.google.gcloud.examples*
    +                  
    +                  
    +                    SPI packages
    +                    com.google.gcloud.spi:com.google.gcloud.bigquery.spi:com.google.gcloud.datastore.spi:com.google.gcloud.resourcemanager.spi:com.google.gcloud.storage.spi
    +                  
    +                
                   
                 
                 
                   org.apache.maven.plugins
                   maven-surefire-report-plugin
    -              2.18.1
    +              2.19.1
                 
                 
                   org.apache.maven.plugins
                   maven-checkstyle-plugin
    -              2.16
    +              2.17
                   
                     checkstyle.xml
                     false
    diff --git a/src/site/resources/index.html b/src/site/resources/index.html
    index 25c769db2fbf..0e0933e7b68c 100644
    --- a/src/site/resources/index.html
    +++ b/src/site/resources/index.html
    @@ -122,8 +122,8 @@ 

    What is it?

    gcloud is configured to access Google Cloud Platform services and authorize (OAuth 2.0) automatically on your behalf. Add the gcloud dependency to your project and get a private key to be - up and ready to go. Better yet, if you are running on a Google - Compute Engine instance, the private key is automatically detected. + up and ready to go. Better yet, if you are running on Google + App Engine or Compute Engine, the private key is automatically detected.

    @@ -143,8 +143,7 @@

    Example: Retrieve Datastore Entries

    // Authentication is automatic inside Google Compute Engine // and Google App Engine. -DatastoreOptions options = DatastoreOptions.builder().build(); -Datastore datastore = options.service(); +Datastore datastore = DatastoreOptions.defaultInstance().service(); KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); Key key = keyFactory.newKey(keyName); Entity entity = datastore.get(key); @@ -173,6 +172,20 @@

    Example: Retrieve Datastore Entries

    +
    +
    +

    Examples

    + +
      +
    • + SparkJava demo - Uses gcloud-java with App Engine Managed VMs, Datastore, and SparkJava. +
    • +
    • + Bookshelf - An App Engine app that manages a virtual bookshelf using gcloud-java libraries for Datastore and Storage. +
    +
    +
    +

    FAQ

    diff --git a/src/site/site.xml b/src/site/site.xml index 55047ce85c54..6279179eb389 100644 --- a/src/site/site.xml +++ b/src/site/site.xml @@ -20,7 +20,7 @@ org.apache.maven.skins maven-fluido-skin - 1.3.1 + 1.4 diff --git a/utilities/after_success.sh b/utilities/after_success.sh index 26405bcd9db3..be7484806c46 100755 --- a/utilities/after_success.sh +++ b/utilities/after_success.sh @@ -1,5 +1,4 @@ #!/bin/bash -source ./utilities/integration_test_env.sh # This script is used by Travis-CI to publish artifacts (binary, sorce and javadoc jars) when releasing snapshots. # This script is referenced in .travis.yml. @@ -7,35 +6,40 @@ source ./utilities/integration_test_env.sh echo "Travis branch: " ${TRAVIS_BRANCH} echo "Travis pull request: " ${TRAVIS_PULL_REQUEST} echo "Travis JDK version: " ${TRAVIS_JDK_VERSION} -if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" -a "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then - mvn cobertura:cobertura coveralls:report - SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)') - if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then - # Deploy site if not a SNAPSHOT - git config --global user.name "travis-ci" - git config --global user.email "travis@travis-ci.org" - git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages - mkdir -p tmp_gh-pages/$SITE_VERSION - mvn site -DskipTests=true - mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/ - cd tmp_gh-pages - cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/ - sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change - git add $SITE_VERSION - echo "" > index.html - git add index.html - echo "" > apidocs/index.html - git add apidocs/index.html - git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect." - git config --global push.default simple - git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1 +if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" ]; then + mvn clean cobertura:cobertura coveralls:report + if [ "${TRAVIS_PULL_REQUEST}" == "false" -a "${TRAVIS_BRANCH}" == "master" ]; then + source ./utilities/integration_test_env.sh + SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)') + if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then + # Deploy site if not a SNAPSHOT + git config --global user.name "travis-ci" + git config --global user.email "travis@travis-ci.org" + git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages + mkdir -p tmp_gh-pages/$SITE_VERSION + mvn site -DskipTests=true + mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/ + cd tmp_gh-pages + cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/ + sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change + git add $SITE_VERSION + echo "" > index.html + git add index.html + echo "" > apidocs/index.html + git add apidocs/index.html + git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect." + git config --global push.default simple + git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1 - cd .. - utilities/update_docs_version.sh # Update version in READMEs - mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy + cd .. + utilities/update_docs_version.sh # Update version in READMEs + mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy + else + mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml + fi else - mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml + echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds." fi else echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds." diff --git a/utilities/integration_test_env.sh b/utilities/integration_test_env.sh index f7aca1a8a623..a1bebe4dcb69 100755 --- a/utilities/integration_test_env.sh +++ b/utilities/integration_test_env.sh @@ -1,3 +1,3 @@ # Export test env variables -export GCLOUD_TESTS_PROJECT_ID="gcloud-devel" -export GCLOUD_TESTS_KEY=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json +export GCLOUD_PROJECT="gcloud-devel" +export GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json diff --git a/utilities/update_docs_version.sh b/utilities/update_docs_version.sh index d7e7bdbfb830..4fc0aa772963 100755 --- a/utilities/update_docs_version.sh +++ b/utilities/update_docs_version.sh @@ -14,11 +14,13 @@ if [ "${RELEASED_VERSION##*-}" != "SNAPSHOT" ]; then for item in ${module_folders[*]} do sed -ri "s/[0-9]+\.[0-9]+\.[0-9]+<\/version>/${RELEASED_VERSION}<\/version>/g" ${item}/README.md + sed -ri "s/:[0-9]+\.[0-9]+\.[0-9]+'/:${RELEASED_VERSION}'/g" ${item}/README.md + sed -ri "s/\"[0-9]+\.[0-9]+\.[0-9]+\"/\"${RELEASED_VERSION}\"/g" ${item}/README.md done git add README.md */README.md git config --global user.name "travis-ci" git config --global user.email "travis@travis-ci.org" - git commit -m "Updating version in README files." + git commit -m "Updating version in README files. [ci skip]" git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" HEAD:master > /dev/null 2>&1 fi diff --git a/utilities/verify.sh b/utilities/verify.sh index 463180415e98..b29ab8d8f747 100755 --- a/utilities/verify.sh +++ b/utilities/verify.sh @@ -1,10 +1,9 @@ #!/bin/bash -source ./utilities/integration_test_env.sh - # This script is used by Travis-CI to run tests. # This script is referenced in .travis.yml. -if [ "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then +if [ "${TRAVIS_PULL_REQUEST}" == "false" ]; then + source ./utilities/integration_test_env.sh # Get signing tools and API keyfile openssl aes-256-cbc -K $encrypted_631490ecae8f_key -iv $encrypted_631490ecae8f_iv -in target/travis/signing-tools.tar.enc -out $TRAVIS_BUILD_DIR/signing-tools.tar -d mkdir $TRAVIS_BUILD_DIR/signing-tools