Skip to content

Commit

Permalink
chore(samples): Retail Tutorials. Product Setup/Cleanup test resources (
Browse files Browse the repository at this point in the history
#291)

* Configure modules settings.

* Add resources files.

* Product setup/cleanup impl.

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* Format code.

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* Replace PROJECT_NUMBER with PROJECT_ID

* kokoro files updated

* Change branch.

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: tetiana-karasova <tetiana.karasova@gmail.com>
Co-authored-by: Karl Weinmeister <11586922+kweinmeister@users.noreply.github.com>
  • Loading branch information
4 people authored and Shabirmean committed Nov 15, 2022
1 parent 5bcd3c0 commit 7de32ef
Show file tree
Hide file tree
Showing 16 changed files with 1,448 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package events.setup;

import static events.setup.EventsCreateGcsBucket.eventsCreateGcsBucketAndUploadJsonFiles;
import static setup.SetupCleanup.createBqDataset;
import static setup.SetupCleanup.createBqTable;
import static setup.SetupCleanup.getGson;
import static setup.SetupCleanup.uploadDataToBqTable;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.stream.Collectors;

public class EventsCreateBigQueryTable {

public static void createBqTableWithEvents() throws IOException {
eventsCreateGcsBucketAndUploadJsonFiles();

String dataset = "user_events";
String validEventsTable = "events";
String invalidEventsTable = "events_some_invalid";
String eventsSchemaFilePath = "src/main/resources/events_schema.json";
String validEventsSourceFile =
String.format("gs://%s/user_events.json", EventsCreateGcsBucket.getBucketName());
String invalidEventsSourceFile =
String.format(
"gs://%s/user_events_some_invalid.json", EventsCreateGcsBucket.getBucketName());

BufferedReader bufferedReader = new BufferedReader(new FileReader(eventsSchemaFilePath));
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
jsonToString = jsonToString.replace("\"fields\"", "\"subFields\"");
Field[] fields = getGson().fromJson(jsonToString, Field[].class);
Schema eventsSchema = Schema.of(fields);

createBqDataset(dataset);
createBqTable(dataset, validEventsTable, eventsSchema);
uploadDataToBqTable(dataset, validEventsTable, validEventsSourceFile, eventsSchema);
createBqTable(dataset, invalidEventsTable, eventsSchema);
uploadDataToBqTable(dataset, invalidEventsTable, invalidEventsSourceFile, eventsSchema);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package events.setup;

import static setup.SetupCleanup.createBucket;
import static setup.SetupCleanup.uploadObject;

import java.io.IOException;

public class EventsCreateGcsBucket {

private static final String BUCKET_NAME = System.getenv("BUCKET_NAME");

public static String getBucketName() {
return BUCKET_NAME;
}

public static void eventsCreateGcsBucketAndUploadJsonFiles() throws IOException {
createBucket(BUCKET_NAME);
uploadObject(BUCKET_NAME, "user_events.json", "src/main/resources/user_events.json");
uploadObject(
BUCKET_NAME,
"user_events_some_invalid.json",
"src/main/resources/user_events_some_invalid.json");
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package init;

import static events.setup.EventsCreateBigQueryTable.createBqTableWithEvents;
import static events.setup.EventsCreateGcsBucket.eventsCreateGcsBucketAndUploadJsonFiles;
import static product.setup.ProductsCreateBigqueryTable.createBqTableWithProducts;
import static product.setup.ProductsCreateGcsBucket.productsCreateGcsBucketAndUploadJsonFiles;

import com.google.cloud.retail.v2.GcsSource;
import com.google.cloud.retail.v2.ImportErrorsConfig;
import com.google.cloud.retail.v2.ImportMetadata;
import com.google.cloud.retail.v2.ImportProductsRequest;
import com.google.cloud.retail.v2.ImportProductsRequest.ReconciliationMode;
import com.google.cloud.retail.v2.ImportProductsResponse;
import com.google.cloud.retail.v2.ProductInputConfig;
import com.google.cloud.retail.v2.ProductServiceClient;
import com.google.longrunning.Operation;
import com.google.longrunning.OperationsClient;
import java.io.IOException;
import java.util.Collections;

public class CreateTestResources {
private static final String PROJECT_ID = System.getenv("PROJECT_ID");
private static final String BUCKET_NAME = System.getenv("BUCKET_NAME");
private static final String GCS_BUCKET = String.format("gs://%s", System.getenv("BUCKET_NAME"));
private static final String GCS_ERROR_BUCKET = String.format("%s/errors", GCS_BUCKET);
private static final String DEFAULT_CATALOG =
String.format(
"projects/%s/locations/global/catalogs/default_catalog/" + "branches/0", PROJECT_ID);

public static void main(String[] args) throws IOException, InterruptedException {
productsCreateGcsBucketAndUploadJsonFiles();
eventsCreateGcsBucketAndUploadJsonFiles();
importProductsFromGcs();
createBqTableWithProducts();
createBqTableWithEvents();
}

public static ImportProductsRequest getImportProductsGcsRequest(String gcsObjectName) {
GcsSource gcsSource =
GcsSource.newBuilder()
.addAllInputUris(
Collections.singleton(String.format("gs://%s/%s", BUCKET_NAME, gcsObjectName)))
.build();
ProductInputConfig inputConfig =
ProductInputConfig.newBuilder().setGcsSource(gcsSource).build();
System.out.println("GRS source: " + gcsSource.getInputUrisList());

ImportErrorsConfig errorsConfig =
ImportErrorsConfig.newBuilder().setGcsPrefix(GCS_ERROR_BUCKET).build();
ImportProductsRequest importRequest =
ImportProductsRequest.newBuilder()
.setParent(DEFAULT_CATALOG)
.setReconciliationMode(ReconciliationMode.INCREMENTAL)
.setInputConfig(inputConfig)
.setErrorsConfig(errorsConfig)
.build();
System.out.println("Import products from google cloud source request: " + importRequest);

return importRequest;
}

public static void importProductsFromGcs() throws IOException, InterruptedException {
ImportProductsRequest importGcsRequest = getImportProductsGcsRequest("products.json");

try (ProductServiceClient serviceClient = ProductServiceClient.create()) {
String operationName =
serviceClient.importProductsCallable().call(importGcsRequest).getName();
System.out.printf("OperationName = %s\n", operationName);

OperationsClient operationsClient = serviceClient.getOperationsClient();
Operation operation = operationsClient.getOperation(operationName);

while (!operation.getDone()) {
System.out.println("Please wait till operation is completed.");
// Keep polling the operation periodically until the import task is done.
int awaitDuration = 30000;
Thread.sleep(awaitDuration);
operation = operationsClient.getOperation(operationName);
}

System.out.println("Import products operation is completed.");

if (operation.hasMetadata()) {
ImportMetadata metadata = operation.getMetadata().unpack(ImportMetadata.class);
System.out.printf(
"Number of successfully imported products: %s\n", metadata.getSuccessCount());
System.out.printf(
"Number of failures during the importing: %s\n", metadata.getFailureCount());
}

if (operation.hasResponse()) {
ImportProductsResponse response =
operation.getResponse().unpack(ImportProductsResponse.class);
System.out.printf("Operation result: %s", response);
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package init;

import static setup.SetupCleanup.deleteBucket;
import static setup.SetupCleanup.deleteDataset;

import com.google.api.gax.rpc.PermissionDeniedException;
import com.google.cloud.retail.v2.DeleteProductRequest;
import com.google.cloud.retail.v2.ListProductsRequest;
import com.google.cloud.retail.v2.Product;
import com.google.cloud.retail.v2.ProductServiceClient;
import com.google.cloud.retail.v2.ProductServiceClient.ListProductsPagedResponse;
import java.io.IOException;

public class RemoveTestResources {

private static final String PROJECT_ID = System.getenv("PROJECT_ID");
private static final String BUCKET_NAME = System.getenv("BUCKET_NAME");
private static final String DEFAULT_CATALOG =
String.format(
"projects/%s/locations/global/catalogs/default_catalog/" + "branches/0", PROJECT_ID);

public static void main(String[] args) throws IOException {
deleteBucket(BUCKET_NAME);
deleteAllProducts();
deleteDataset(PROJECT_ID, "products");
deleteDataset(PROJECT_ID, "user_events");
}

public static void deleteAllProducts() throws IOException {
System.out.println("Deleting products in process, please wait...");

try (ProductServiceClient productServiceClient = ProductServiceClient.create()) {
ListProductsRequest listRequest =
ListProductsRequest.newBuilder().setParent(DEFAULT_CATALOG).build();
ListProductsPagedResponse products = productServiceClient.listProducts(listRequest);

int deleteCount = 0;

for (Product product : products.iterateAll()) {
DeleteProductRequest deleteRequest =
DeleteProductRequest.newBuilder().setName(product.getName()).build();

try {
productServiceClient.deleteProduct(deleteRequest);
deleteCount++;
} catch (PermissionDeniedException e) {
System.out.println(
"Ignore PermissionDenied in case the product does not exist "
+ "at time of deletion.");
}
}

System.out.printf("%s products were deleted from %s%n", deleteCount, DEFAULT_CATALOG);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# How to set up/ tear down the test resources

## Required environment variables

To successfully import the catalog data for tests, the following environment variables should be
set:

- PROJECT_ID
- PROJECT_NUMBER
- BUCKET_NAME

The Secret Manager name is set in .kokoro/presubmit/common.cfg file, SECRET_MANAGER_KEYS variable.

## Import catalog data

There is a JSON file with valid products prepared in the `product` directory:
`resources/products.json`.

Run the `CreateTestResources` to perform the following actions:

- create the GCS bucket <BUCKET_NAME>;
- upload the product data from `resources/products.json` file to the bucket;
- import products to the default branch of the Retail catalog;
- upload the product data from `resources/user_events.json` file to the bucket;
- create a BigQuery dataset and table `products`;
- insert products from resources/products.json to the created products table;
- create a BigQuery dataset and table `events`;
- insert user events from resources/user_events.json to the created events table;

```
mvn compile exec:java -Dexec.mainClass="init.CreateTestResources"
```

In the result 316 products should be created in the test project catalog.

## Remove catalog data

Run the `RemoveTestResources` to perform the following actions:

- remove all objects from the GCS bucket <BUCKET_NAME>;
- remove the <BUCKET_NAME> bucket;
- delete all products from the Retail catalog;
- remove all objects from the GCS bucket <BUCKET_NAME>;
- remove dataset `products` along with tables;
- remove dataset `user_events` along with tables;

```
mvn compile exec:java -Dexec.mainClass="init.RemoveTestResources"
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package product.setup;

import static product.setup.ProductsCreateGcsBucket.productsCreateGcsBucketAndUploadJsonFiles;
import static setup.SetupCleanup.createBqDataset;
import static setup.SetupCleanup.createBqTable;
import static setup.SetupCleanup.getGson;
import static setup.SetupCleanup.uploadDataToBqTable;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.Schema;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.stream.Collectors;

public class ProductsCreateBigqueryTable {

public static void createBqTableWithProducts() throws IOException {
productsCreateGcsBucketAndUploadJsonFiles();

String dataset = "products";
String validProductsTable = "products";
String invalidProductsTable = "products_some_invalid";
String productSchemaFilePath = "src/main/resources/product_schema.json";
String validProductsSourceFile =
String.format("gs://%s/products.json", ProductsCreateGcsBucket.getBucketName());
String invalidProductsSourceFile =
String.format(
"gs://%s/products_some_invalid.json", ProductsCreateGcsBucket.getBucketName());

BufferedReader bufferedReader = new BufferedReader(new FileReader(productSchemaFilePath));
String jsonToString = bufferedReader.lines().collect(Collectors.joining());
jsonToString = jsonToString.replace("\"fields\"", "\"subFields\"");
Field[] fields = getGson().fromJson(jsonToString, Field[].class);
Schema productSchema = Schema.of(fields);

createBqDataset(dataset);
createBqTable(dataset, validProductsTable, productSchema);
uploadDataToBqTable(dataset, validProductsTable, validProductsSourceFile, productSchema);
createBqTable(dataset, invalidProductsTable, productSchema);
uploadDataToBqTable(dataset, invalidProductsTable, invalidProductsSourceFile, productSchema);
}
}
Loading

0 comments on commit 7de32ef

Please sign in to comment.