diff --git a/aiplatform/snippets/pom.xml b/aiplatform/snippets/pom.xml
index 3fc4e24afa6..83953def3f0 100644
--- a/aiplatform/snippets/pom.xml
+++ b/aiplatform/snippets/pom.xml
@@ -62,6 +62,10 @@
proto-google-cloud-aiplatform-v1beta1
0.17.0
-
+
+ com.google.cloud
+ google-cloud-bigquery
+ 2.13.6
+
diff --git a/aiplatform/snippets/src/main/java/aiplatform/BatchCreateFeaturesSample.java b/aiplatform/snippets/src/main/java/aiplatform/BatchCreateFeaturesSample.java
new file mode 100644
index 00000000000..8b948092798
--- /dev/null
+++ b/aiplatform/snippets/src/main/java/aiplatform/BatchCreateFeaturesSample.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *
+ * Create features in bulk for an existing entity type. See
+ * https://cloud.google.com/vertex-ai/docs/featurestore/setup
+ * before running the code snippet
+ */
+
+package aiplatform;
+
+// [START aiplatform_batch_create_features_sample]
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.aiplatform.v1.BatchCreateFeaturesOperationMetadata;
+import com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest;
+import com.google.cloud.aiplatform.v1.BatchCreateFeaturesResponse;
+import com.google.cloud.aiplatform.v1.CreateFeatureRequest;
+import com.google.cloud.aiplatform.v1.EntityTypeName;
+import com.google.cloud.aiplatform.v1.Feature;
+import com.google.cloud.aiplatform.v1.Feature.ValueType;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceClient;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceSettings;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+public class BatchCreateFeaturesSample {
+
+ public static void main(String[] args)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // TODO(developer): Replace these variables before running the sample.
+ String project = "YOUR_PROJECT_ID";
+ String featurestoreId = "YOUR_FEATURESTORE_ID";
+ String entityTypeId = "YOUR_ENTITY_TYPE_ID";
+ String location = "us-central1";
+ String endpoint = "us-central1-aiplatform.googleapis.com:443";
+ int timeout = 300;
+ batchCreateFeaturesSample(project, featurestoreId, entityTypeId, location, endpoint, timeout);
+ }
+
+ static void batchCreateFeaturesSample(
+ String project,
+ String featurestoreId,
+ String entityTypeId,
+ String location,
+ String endpoint,
+ int timeout)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ FeaturestoreServiceSettings featurestoreServiceSettings =
+ FeaturestoreServiceSettings.newBuilder().setEndpoint(endpoint).build();
+
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests. After completing all of your requests, call
+ // the "close" method on the client to safely clean up any remaining background resources.
+ try (FeaturestoreServiceClient featurestoreServiceClient =
+ FeaturestoreServiceClient.create(featurestoreServiceSettings)) {
+
+ List createFeatureRequests = new ArrayList<>();
+
+ Feature titleFeature =
+ Feature.newBuilder()
+ .setDescription("The title of the movie")
+ .setValueType(ValueType.STRING)
+ .build();
+ Feature genresFeature =
+ Feature.newBuilder()
+ .setDescription("The genres of the movie")
+ .setValueType(ValueType.STRING)
+ .build();
+ Feature averageRatingFeature =
+ Feature.newBuilder()
+ .setDescription("The average rating for the movie, range is [1.0-5.0]")
+ .setValueType(ValueType.DOUBLE)
+ .build();
+
+ createFeatureRequests.add(
+ CreateFeatureRequest.newBuilder().setFeature(titleFeature).setFeatureId("title").build());
+
+ createFeatureRequests.add(
+ CreateFeatureRequest.newBuilder()
+ .setFeature(genresFeature)
+ .setFeatureId("genres")
+ .build());
+
+ createFeatureRequests.add(
+ CreateFeatureRequest.newBuilder()
+ .setFeature(averageRatingFeature)
+ .setFeatureId("average_rating")
+ .build());
+
+ BatchCreateFeaturesRequest batchCreateFeaturesRequest =
+ BatchCreateFeaturesRequest.newBuilder()
+ .setParent(
+ EntityTypeName.of(project, location, featurestoreId, entityTypeId).toString())
+ .addAllRequests(createFeatureRequests)
+ .build();
+
+ OperationFuture
+ batchCreateFeaturesFuture =
+ featurestoreServiceClient.batchCreateFeaturesAsync(batchCreateFeaturesRequest);
+ System.out.format(
+ "Operation name: %s%n", batchCreateFeaturesFuture.getInitialFuture().get().getName());
+ System.out.println("Waiting for operation to finish...");
+ BatchCreateFeaturesResponse batchCreateFeaturesResponse =
+ batchCreateFeaturesFuture.get(timeout, TimeUnit.SECONDS);
+ System.out.println("Batch Create Features Response");
+ System.out.println(batchCreateFeaturesResponse);
+ featurestoreServiceClient.close();
+ }
+ }
+}
+// [END aiplatform_batch_create_features_sample]
diff --git a/aiplatform/snippets/src/main/java/aiplatform/BatchReadFeatureValuesSample.java b/aiplatform/snippets/src/main/java/aiplatform/BatchReadFeatureValuesSample.java
new file mode 100644
index 00000000000..a76c3388d1e
--- /dev/null
+++ b/aiplatform/snippets/src/main/java/aiplatform/BatchReadFeatureValuesSample.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *
+ * Batch read feature values from a featurestore, as determined by your
+ * read instances list file, to export data. See
+ * https://cloud.google.com/vertex-ai/docs/featurestore/setup before running
+ * the code snippet
+ */
+
+package aiplatform;
+
+// [START aiplatform_batch_read_feature_values_sample]
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.aiplatform.v1.BatchReadFeatureValuesOperationMetadata;
+import com.google.cloud.aiplatform.v1.BatchReadFeatureValuesRequest;
+import com.google.cloud.aiplatform.v1.BatchReadFeatureValuesRequest.EntityTypeSpec;
+import com.google.cloud.aiplatform.v1.BatchReadFeatureValuesResponse;
+import com.google.cloud.aiplatform.v1.BigQueryDestination;
+import com.google.cloud.aiplatform.v1.CsvSource;
+import com.google.cloud.aiplatform.v1.FeatureSelector;
+import com.google.cloud.aiplatform.v1.FeatureValueDestination;
+import com.google.cloud.aiplatform.v1.FeaturestoreName;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceClient;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceSettings;
+import com.google.cloud.aiplatform.v1.GcsSource;
+import com.google.cloud.aiplatform.v1.IdMatcher;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+public class BatchReadFeatureValuesSample {
+
+ public static void main(String[] args)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // TODO(developer): Replace these variables before running the sample.
+ String project = "YOUR_PROJECT_ID";
+ String featurestoreId = "YOUR_FEATURESTORE_ID";
+ String entityTypeId = "YOUR_ENTITY_TYPE_ID";
+ String inputCsvFile = "YOU_INPUT_CSV_FILE";
+ String destinationTableUri = "YOUR_DESTINATION_TABLE_URI";
+ List featureSelectorIds = Arrays.asList("title", "genres", "average_rating");
+ String location = "us-central1";
+ String endpoint = "us-central1-aiplatform.googleapis.com:443";
+ int timeout = 300;
+ batchReadFeatureValuesSample(
+ project,
+ featurestoreId,
+ entityTypeId,
+ inputCsvFile,
+ destinationTableUri,
+ featureSelectorIds,
+ location,
+ endpoint,
+ timeout);
+ }
+
+ static void batchReadFeatureValuesSample(
+ String project,
+ String featurestoreId,
+ String entityTypeId,
+ String inputCsvFile,
+ String destinationTableUri,
+ List featureSelectorIds,
+ String location,
+ String endpoint,
+ int timeout)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ FeaturestoreServiceSettings featurestoreServiceSettings =
+ FeaturestoreServiceSettings.newBuilder().setEndpoint(endpoint).build();
+
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests. After completing all of your requests, call
+ // the "close" method on the client to safely clean up any remaining background resources.
+ try (FeaturestoreServiceClient featurestoreServiceClient =
+ FeaturestoreServiceClient.create(featurestoreServiceSettings)) {
+
+ List entityTypeSpecs = new ArrayList<>();
+
+ FeatureSelector featureSelector =
+ FeatureSelector.newBuilder()
+ .setIdMatcher(IdMatcher.newBuilder().addAllIds(featureSelectorIds).build())
+ .build();
+ EntityTypeSpec entityTypeSpec =
+ EntityTypeSpec.newBuilder()
+ .setEntityTypeId(entityTypeId)
+ .setFeatureSelector(featureSelector)
+ .build();
+
+ entityTypeSpecs.add(entityTypeSpec);
+
+ BigQueryDestination bigQueryDestination =
+ BigQueryDestination.newBuilder().setOutputUri(destinationTableUri).build();
+ GcsSource gcsSource = GcsSource.newBuilder().addUris(inputCsvFile).build();
+ BatchReadFeatureValuesRequest batchReadFeatureValuesRequest =
+ BatchReadFeatureValuesRequest.newBuilder()
+ .setFeaturestore(FeaturestoreName.of(project, location, featurestoreId).toString())
+ .setCsvReadInstances(CsvSource.newBuilder().setGcsSource(gcsSource))
+ .setDestination(
+ FeatureValueDestination.newBuilder().setBigqueryDestination(bigQueryDestination))
+ .addAllEntityTypeSpecs(entityTypeSpecs)
+ .build();
+
+ OperationFuture
+ batchReadFeatureValuesFuture =
+ featurestoreServiceClient.batchReadFeatureValuesAsync(batchReadFeatureValuesRequest);
+ System.out.format(
+ "Operation name: %s%n", batchReadFeatureValuesFuture.getInitialFuture().get().getName());
+ System.out.println("Waiting for operation to finish...");
+ BatchReadFeatureValuesResponse batchReadFeatureValuesResponse =
+ batchReadFeatureValuesFuture.get(timeout, TimeUnit.SECONDS);
+ System.out.println("Batch Read Feature Values Response");
+ System.out.println(batchReadFeatureValuesResponse);
+ featurestoreServiceClient.close();
+ }
+ }
+}
+// [END aiplatform_batch_read_feature_values_sample]
diff --git a/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSample.java b/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSample.java
new file mode 100644
index 00000000000..6bb7b00d66e
--- /dev/null
+++ b/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSample.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *
+ * Bulk export feature values from a featurestore. See
+ * https://cloud.google.com/vertex-ai/docs/featurestore/setup before running
+ * the code snippet
+ */
+
+package aiplatform;
+
+// [START aiplatform_export_feature_values_sample]
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.aiplatform.v1.BigQueryDestination;
+import com.google.cloud.aiplatform.v1.EntityTypeName;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesOperationMetadata;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesRequest;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesRequest.FullExport;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesResponse;
+import com.google.cloud.aiplatform.v1.FeatureSelector;
+import com.google.cloud.aiplatform.v1.FeatureValueDestination;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceClient;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceSettings;
+import com.google.cloud.aiplatform.v1.IdMatcher;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+public class ExportFeatureValuesSample {
+
+ public static void main(String[] args)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // TODO(developer): Replace these variables before running the sample.
+ String project = "YOUR_PROJECT_ID";
+ String featurestoreId = "YOUR_FEATURESTORE_ID";
+ String entityTypeId = "YOUR_ENTITY_TYPE_ID";
+ String destinationTableUri = "YOUR_DESTINATION_TABLE_URI";
+ List featureSelectorIds = Arrays.asList("title", "genres", "average_rating");
+ String location = "us-central1";
+ String endpoint = "us-central1-aiplatform.googleapis.com:443";
+ int timeout = 300;
+ exportFeatureValuesSample(
+ project,
+ featurestoreId,
+ entityTypeId,
+ destinationTableUri,
+ featureSelectorIds,
+ location,
+ endpoint,
+ timeout);
+ }
+
+ static void exportFeatureValuesSample(
+ String project,
+ String featurestoreId,
+ String entityTypeId,
+ String destinationTableUri,
+ List featureSelectorIds,
+ String location,
+ String endpoint,
+ int timeout)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ FeaturestoreServiceSettings featurestoreServiceSettings =
+ FeaturestoreServiceSettings.newBuilder().setEndpoint(endpoint).build();
+
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests. After completing all of your requests, call
+ // the "close" method on the client to safely clean up any remaining background resources.
+ try (FeaturestoreServiceClient featurestoreServiceClient =
+ FeaturestoreServiceClient.create(featurestoreServiceSettings)) {
+
+ FeatureSelector featureSelector =
+ FeatureSelector.newBuilder()
+ .setIdMatcher(IdMatcher.newBuilder().addAllIds(featureSelectorIds).build())
+ .build();
+
+ ExportFeatureValuesRequest exportFeatureValuesRequest =
+ ExportFeatureValuesRequest.newBuilder()
+ .setEntityType(
+ EntityTypeName.of(project, location, featurestoreId, entityTypeId).toString())
+ .setDestination(
+ FeatureValueDestination.newBuilder()
+ .setBigqueryDestination(
+ BigQueryDestination.newBuilder().setOutputUri(destinationTableUri)))
+ .setFeatureSelector(featureSelector)
+ .setFullExport(FullExport.newBuilder())
+ .build();
+
+ OperationFuture
+ exportFeatureValuesFuture =
+ featurestoreServiceClient.exportFeatureValuesAsync(exportFeatureValuesRequest);
+ System.out.format(
+ "Operation name: %s%n", exportFeatureValuesFuture.getInitialFuture().get().getName());
+ System.out.println("Waiting for operation to finish...");
+ ExportFeatureValuesResponse exportFeatureValuesResponse =
+ exportFeatureValuesFuture.get(timeout, TimeUnit.SECONDS);
+ System.out.println("Export Feature Values Response");
+ System.out.println(exportFeatureValuesResponse);
+ featurestoreServiceClient.close();
+ }
+ }
+}
+// [END aiplatform_export_feature_values_sample]
diff --git a/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSnapshotSample.java b/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSnapshotSample.java
new file mode 100644
index 00000000000..6d48d34d06c
--- /dev/null
+++ b/aiplatform/snippets/src/main/java/aiplatform/ExportFeatureValuesSnapshotSample.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *
+ * Bulk export feature values from a featurestore. See
+ * https://cloud.google.com/vertex-ai/docs/featurestore/setup before running
+ * the code snippet
+ */
+
+package aiplatform;
+
+// [START aiplatform_export_feature_values_snapshot_sample]
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.aiplatform.v1.BigQueryDestination;
+import com.google.cloud.aiplatform.v1.EntityTypeName;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesOperationMetadata;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesRequest;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesRequest.SnapshotExport;
+import com.google.cloud.aiplatform.v1.ExportFeatureValuesResponse;
+import com.google.cloud.aiplatform.v1.FeatureSelector;
+import com.google.cloud.aiplatform.v1.FeatureValueDestination;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceClient;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceSettings;
+import com.google.cloud.aiplatform.v1.IdMatcher;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+public class ExportFeatureValuesSnapshotSample {
+
+ public static void main(String[] args)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // TODO(developer): Replace these variables before running the sample.
+ String project = "YOUR_PROJECT_ID";
+ String featurestoreId = "YOUR_FEATURESTORE_ID";
+ String entityTypeId = "YOUR_ENTITY_TYPE_ID";
+ String destinationTableUri = "YOUR_DESTINATION_TABLE_URI";
+ List featureSelectorIds = Arrays.asList("title", "genres", "average_rating");
+ String location = "us-central1";
+ String endpoint = "us-central1-aiplatform.googleapis.com:443";
+ int timeout = 300;
+ exportFeatureValuesSnapshotSample(
+ project,
+ featurestoreId,
+ entityTypeId,
+ destinationTableUri,
+ featureSelectorIds,
+ location,
+ endpoint,
+ timeout);
+ }
+
+ static void exportFeatureValuesSnapshotSample(
+ String project,
+ String featurestoreId,
+ String entityTypeId,
+ String destinationTableUri,
+ List featureSelectorIds,
+ String location,
+ String endpoint,
+ int timeout)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ FeaturestoreServiceSettings featurestoreServiceSettings =
+ FeaturestoreServiceSettings.newBuilder().setEndpoint(endpoint).build();
+
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests. After completing all of your requests, call
+ // the "close" method on the client to safely clean up any remaining background resources.
+ try (FeaturestoreServiceClient featurestoreServiceClient =
+ FeaturestoreServiceClient.create(featurestoreServiceSettings)) {
+
+ FeatureSelector featureSelector =
+ FeatureSelector.newBuilder()
+ .setIdMatcher(IdMatcher.newBuilder().addAllIds(featureSelectorIds).build())
+ .build();
+
+ ExportFeatureValuesRequest exportFeatureValuesRequest =
+ ExportFeatureValuesRequest.newBuilder()
+ .setEntityType(
+ EntityTypeName.of(project, location, featurestoreId, entityTypeId).toString())
+ .setDestination(
+ FeatureValueDestination.newBuilder()
+ .setBigqueryDestination(
+ BigQueryDestination.newBuilder().setOutputUri(destinationTableUri)))
+ .setFeatureSelector(featureSelector)
+ .setSnapshotExport(SnapshotExport.newBuilder())
+ .build();
+
+ OperationFuture
+ exportFeatureValuesFuture =
+ featurestoreServiceClient.exportFeatureValuesAsync(exportFeatureValuesRequest);
+ System.out.format(
+ "Operation name: %s%n", exportFeatureValuesFuture.getInitialFuture().get().getName());
+ System.out.println("Waiting for operation to finish...");
+ ExportFeatureValuesResponse exportFeatureValuesResponse =
+ exportFeatureValuesFuture.get(timeout, TimeUnit.SECONDS);
+ System.out.println("Snapshot Export Feature Values Response");
+ System.out.println(exportFeatureValuesResponse);
+ featurestoreServiceClient.close();
+ }
+ }
+}
+// [END aiplatform_export_feature_values_snapshot_sample]
diff --git a/aiplatform/snippets/src/main/java/aiplatform/ImportFeatureValuesSample.java b/aiplatform/snippets/src/main/java/aiplatform/ImportFeatureValuesSample.java
new file mode 100644
index 00000000000..405b05f54fb
--- /dev/null
+++ b/aiplatform/snippets/src/main/java/aiplatform/ImportFeatureValuesSample.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *
+ * Bulk import values into a featurestore for existing features. See
+ * https://cloud.google.com/vertex-ai/docs/featurestore/setup before running
+ * the code snippet
+ */
+
+package aiplatform;
+
+// [START aiplatform_import_feature_values_sample]
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.aiplatform.v1.AvroSource;
+import com.google.cloud.aiplatform.v1.EntityTypeName;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceClient;
+import com.google.cloud.aiplatform.v1.FeaturestoreServiceSettings;
+import com.google.cloud.aiplatform.v1.GcsSource;
+import com.google.cloud.aiplatform.v1.ImportFeatureValuesOperationMetadata;
+import com.google.cloud.aiplatform.v1.ImportFeatureValuesRequest;
+import com.google.cloud.aiplatform.v1.ImportFeatureValuesRequest.FeatureSpec;
+import com.google.cloud.aiplatform.v1.ImportFeatureValuesResponse;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+public class ImportFeatureValuesSample {
+
+ public static void main(String[] args)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // TODO(developer): Replace these variables before running the sample.
+ String project = "YOUR_PROJECT_ID";
+ String featurestoreId = "YOUR_FEATURESTORE_ID";
+ String entityTypeId = "YOUR_ENTITY_TYPE_ID";
+ String entityIdField = "YOUR_ENTITY_FIELD_ID";
+ String featureTimeField = "YOUR_FEATURE_TIME_FIELD";
+ String gcsSourceUri = "YOUR_GCS_SOURCE_URI";
+ int workerCount = 2;
+ String location = "us-central1";
+ String endpoint = "us-central1-aiplatform.googleapis.com:443";
+ int timeout = 300;
+ importFeatureValuesSample(
+ project,
+ featurestoreId,
+ entityTypeId,
+ gcsSourceUri,
+ entityIdField,
+ featureTimeField,
+ workerCount,
+ location,
+ endpoint,
+ timeout);
+ }
+
+ static void importFeatureValuesSample(
+ String project,
+ String featurestoreId,
+ String entityTypeId,
+ String gcsSourceUri,
+ String entityIdField,
+ String featureTimeField,
+ int workerCount,
+ String location,
+ String endpoint,
+ int timeout)
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ FeaturestoreServiceSettings featurestoreServiceSettings =
+ FeaturestoreServiceSettings.newBuilder().setEndpoint(endpoint).build();
+
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests. After completing all of your requests, call
+ // the "close" method on the client to safely clean up any remaining background resources.
+ try (FeaturestoreServiceClient featurestoreServiceClient =
+ FeaturestoreServiceClient.create(featurestoreServiceSettings)) {
+ List featureSpecs = new ArrayList<>();
+
+ featureSpecs.add(FeatureSpec.newBuilder().setId("title").build());
+ featureSpecs.add(FeatureSpec.newBuilder().setId("genres").build());
+ featureSpecs.add(FeatureSpec.newBuilder().setId("average_rating").build());
+ ImportFeatureValuesRequest importFeatureValuesRequest =
+ ImportFeatureValuesRequest.newBuilder()
+ .setEntityType(
+ EntityTypeName.of(project, location, featurestoreId, entityTypeId).toString())
+ .setEntityIdField(entityIdField)
+ .setFeatureTimeField(featureTimeField)
+ .addAllFeatureSpecs(featureSpecs)
+ .setWorkerCount(workerCount)
+ .setAvroSource(
+ AvroSource.newBuilder()
+ .setGcsSource(GcsSource.newBuilder().addUris(gcsSourceUri)))
+ .build();
+ OperationFuture
+ importFeatureValuesFuture =
+ featurestoreServiceClient.importFeatureValuesAsync(importFeatureValuesRequest);
+ System.out.format(
+ "Operation name: %s%n", importFeatureValuesFuture.getInitialFuture().get().getName());
+ System.out.println("Waiting for operation to finish...");
+ ImportFeatureValuesResponse importFeatureValuesResponse =
+ importFeatureValuesFuture.get(timeout, TimeUnit.SECONDS);
+ System.out.println("Import Feature Values Response");
+ System.out.println(importFeatureValuesResponse);
+ featurestoreServiceClient.close();
+ }
+ }
+}
+// [END aiplatform_import_feature_values_sample]
diff --git a/aiplatform/snippets/src/test/java/aiplatform/FeatureValuesSamplesTest.java b/aiplatform/snippets/src/test/java/aiplatform/FeatureValuesSamplesTest.java
new file mode 100644
index 00000000000..56852503b09
--- /dev/null
+++ b/aiplatform/snippets/src/test/java/aiplatform/FeatureValuesSamplesTest.java
@@ -0,0 +1,278 @@
+/*
+ * Copyright 2022 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package aiplatform;
+
+import static com.google.common.truth.Truth.assertThat;
+import static junit.framework.TestCase.assertNotNull;
+
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.BigQueryException;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.Dataset;
+import com.google.cloud.bigquery.DatasetId;
+import com.google.cloud.bigquery.DatasetInfo;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.UUID;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeoutException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class FeatureValuesSamplesTest {
+
+ private static final String PROJECT_ID = System.getenv("UCAIP_PROJECT_ID");
+ private static final int MIN_NODE_COUNT = 1;
+ private static final int MAX_NODE_COUNT = 5;
+ private static final String DESCRIPTION = "Test Description";
+ private static final boolean USE_FORCE = true;
+ private static final String ENTITY_ID_FIELD = "movie_id";
+ private static final String FEATURE_TIME_FIELD = "update_time";
+ private static final String GCS_SOURCE_URI =
+ "gs://cloud-samples-data-us-central1/vertex-ai/feature-store/datasets/movies.avro";
+ private static final int WORKER_COUNT = 2;
+ private static final String INPUT_CSV_FILE =
+ "gs://cloud-samples-data-us-central1/vertex-ai/feature-store/datasets/movie_prediction.csv";
+ private static final List FEATURE_SELECTOR_IDS =
+ Arrays.asList("title", "genres", "average_rating");
+ private static final String LOCATION = "us-central1";
+ private static final String ENDPOINT = "us-central1-aiplatform.googleapis.com:443";
+ private static final int TIMEOUT = 900;
+ private ByteArrayOutputStream bout;
+ private PrintStream out;
+ private PrintStream originalPrintStream;
+ private String featurestoreId;
+ private String destinationTableUri;
+ private Date date;
+ private SimpleDateFormat dateFormat;
+ private String datasetName;
+ private String destinationTableName;
+
+ private static void requireEnvVar(String varName) {
+ String errorMessage =
+ String.format("Environment variable '%s' is required to perform these tests.", varName);
+ assertNotNull(errorMessage, System.getenv(varName));
+ }
+
+ @BeforeClass
+ public static void checkRequirements() {
+ requireEnvVar("GOOGLE_APPLICATION_CREDENTIALS");
+ requireEnvVar("UCAIP_PROJECT_ID");
+ }
+
+ @Before
+ public void setUp() {
+ date = new Date();
+ dateFormat = new SimpleDateFormat("yyyyMMddHHmmSSS");
+ datasetName = "movie_predictions" + dateFormat.format(date);
+ destinationTableName = "training_data";
+ bout = new ByteArrayOutputStream();
+ out = new PrintStream(bout);
+ originalPrintStream = System.out;
+ System.setOut(out);
+ }
+
+ static void createBigQueryDataset(String projectId, String datasetName, String location) {
+ try {
+ // Initialize client that will be used to send requests. This client only needs
+ // to be created
+ // once, and can be reused for multiple requests.
+ BigQuery bigquery =
+ BigQueryOptions.newBuilder()
+ .setLocation(location)
+ .setProjectId(projectId)
+ .build()
+ .getService();
+ DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
+
+ Dataset newDataset = bigquery.create(datasetInfo);
+ String newDatasetName = newDataset.getDatasetId().getDataset();
+ System.out.println(newDatasetName + " created successfully");
+ } catch (BigQueryException e) {
+ System.out.format("Dataset was not created. %n%s", e.toString());
+ }
+ }
+
+ static void deleteBigQueryDataset(String projectId, String datasetName, String location) {
+ try {
+ // Initialize client that will be used to send requests. This client only needs to be created
+ // once, and can be reused for multiple requests.
+ BigQuery bigquery =
+ BigQueryOptions.newBuilder()
+ .setLocation(location)
+ .setProjectId(projectId)
+ .build()
+ .getService();
+
+ DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ boolean success = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+ if (success) {
+ System.out.println("Dataset deleted successfully");
+ } else {
+ System.out.println("Dataset was not found");
+ }
+ } catch (BigQueryException e) {
+ System.out.format("Dataset was not deleted. %n%s", e.toString());
+ }
+ }
+
+ @After
+ public void tearDown()
+ throws InterruptedException, ExecutionException, IOException, TimeoutException {
+
+ // Delete the featurestore
+ DeleteFeaturestoreSample.deleteFeaturestoreSample(
+ PROJECT_ID, featurestoreId, USE_FORCE, LOCATION, ENDPOINT, 300);
+
+ // Assert
+ String deleteFeaturestoreResponse = bout.toString();
+ assertThat(deleteFeaturestoreResponse).contains("Deleted Featurestore");
+
+ // Delete the big query dataset
+ deleteBigQueryDataset(PROJECT_ID, datasetName, LOCATION);
+
+ // Assert
+ String deleteBigQueryResponse = bout.toString();
+ assertThat(deleteBigQueryResponse).contains("Dataset deleted successfully");
+
+ System.out.flush();
+ System.setOut(originalPrintStream);
+ }
+
+ @Test
+ public void testFeatureValuesSamples()
+ throws IOException, InterruptedException, ExecutionException, TimeoutException {
+ // Create the featurestore
+ String tempUuid = UUID.randomUUID().toString().replaceAll("-", "_").substring(0, 26);
+ String id = String.format("temp_create_featurestore_test_%s", tempUuid);
+ CreateFeaturestoreSample.createFeaturestoreSample(
+ PROJECT_ID, id, MIN_NODE_COUNT, MAX_NODE_COUNT, LOCATION, ENDPOINT, 900);
+
+ // Assert
+ String createFeaturestoreResponse = bout.toString();
+ assertThat(createFeaturestoreResponse).contains("Create Featurestore Response");
+ featurestoreId =
+ createFeaturestoreResponse.split("Name: ")[1].split("featurestores/")[1].split("\n")[0]
+ .trim();
+
+ // Create the entity type
+ String entityTypeId = "movies";
+ CreateEntityTypeSample.createEntityTypeSample(
+ PROJECT_ID, featurestoreId, entityTypeId, DESCRIPTION, LOCATION, ENDPOINT, 900);
+
+ // Assert
+ String createEntityTypeResponse = bout.toString();
+ assertThat(createEntityTypeResponse).contains("Create Entity Type Response");
+
+ // Batch create features
+ BatchCreateFeaturesSample.batchCreateFeaturesSample(
+ PROJECT_ID, featurestoreId, entityTypeId, LOCATION, ENDPOINT, TIMEOUT);
+
+ // Assert
+ String batchCreateFeaturesResponse = bout.toString();
+ assertThat(batchCreateFeaturesResponse).contains("Batch Create Features Response");
+
+ // Import feature values
+ ImportFeatureValuesSample.importFeatureValuesSample(
+ PROJECT_ID,
+ featurestoreId,
+ entityTypeId,
+ GCS_SOURCE_URI,
+ ENTITY_ID_FIELD,
+ FEATURE_TIME_FIELD,
+ WORKER_COUNT,
+ LOCATION,
+ ENDPOINT,
+ TIMEOUT);
+
+ // Assert
+ String importFeatureValuesResponse = bout.toString();
+ assertThat(importFeatureValuesResponse).contains("Import Feature Values Response");
+
+ // Create the big query dataset
+ createBigQueryDataset(PROJECT_ID, datasetName, LOCATION);
+ destinationTableUri =
+ String.format("bq://%s.%s.%s_full", PROJECT_ID, datasetName, destinationTableName);
+
+ // Assert
+ String createBigQueryDatasetResponse = bout.toString();
+ assertThat(createBigQueryDatasetResponse).contains(datasetName + " created successfully");
+
+ // Export feature values
+ ExportFeatureValuesSample.exportFeatureValuesSample(
+ PROJECT_ID,
+ featurestoreId,
+ entityTypeId,
+ destinationTableUri,
+ FEATURE_SELECTOR_IDS,
+ LOCATION,
+ ENDPOINT,
+ TIMEOUT);
+
+ // Assert
+ String exportFeatureValuesResponse = bout.toString();
+ assertThat(exportFeatureValuesResponse).contains("Export Feature Values Response");
+
+ destinationTableUri =
+ String.format("bq://%s.%s.%s_snapshot", PROJECT_ID, datasetName, destinationTableName);
+
+ // Snapshot export feature values
+ ExportFeatureValuesSnapshotSample.exportFeatureValuesSnapshotSample(
+ PROJECT_ID,
+ featurestoreId,
+ entityTypeId,
+ destinationTableUri,
+ FEATURE_SELECTOR_IDS,
+ LOCATION,
+ ENDPOINT,
+ TIMEOUT);
+
+ // Assert
+ String snapshotResponse = bout.toString();
+ assertThat(snapshotResponse).contains("Snapshot Export Feature Values Response");
+
+ destinationTableUri =
+ String.format("bq://%s.%s.%s_batchRead", PROJECT_ID, datasetName, destinationTableName);
+
+ // Batch read feature values
+ BatchReadFeatureValuesSample.batchReadFeatureValuesSample(
+ PROJECT_ID,
+ featurestoreId,
+ entityTypeId,
+ INPUT_CSV_FILE,
+ destinationTableUri,
+ FEATURE_SELECTOR_IDS,
+ LOCATION,
+ ENDPOINT,
+ TIMEOUT);
+
+ // Assert
+ String batchReadFeatureValuesResponse = bout.toString();
+ assertThat(batchReadFeatureValuesResponse).contains("Batch Read Feature Values Response");
+ }
+}