Skip to content

Commit

Permalink
samples(discoveryengine): Add Import Documents Samples for Google Clo…
Browse files Browse the repository at this point in the history
…ud Sources (#11958)

* samples(discoveryengine): Add Import Documents Samples for Google Cloud sources

https://cloud.google.com/generative-ai-app-builder/docs/create-data-store-es
  • Loading branch information
holtskinner authored Jul 15, 2024
1 parent b97d17e commit 47c059d
Show file tree
Hide file tree
Showing 3 changed files with 567 additions and 46 deletions.
3 changes: 2 additions & 1 deletion discoveryengine/create_data_store_sample.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,9 @@ def create_data_store_sample(
industry_vertical=discoveryengine.IndustryVertical.GENERIC,
# Options: SOLUTION_TYPE_RECOMMENDATION, SOLUTION_TYPE_SEARCH, SOLUTION_TYPE_CHAT, SOLUTION_TYPE_GENERATIVE_CHAT
solution_types=[discoveryengine.SolutionType.SOLUTION_TYPE_SEARCH],
# TODO(developer): Update content_config based on data store type.
# Options: NO_CONTENT, CONTENT_REQUIRED, PUBLIC_WEBSITE
content_config=discoveryengine.DataStore.ContentConfig.PUBLIC_WEBSITE,
content_config=discoveryengine.DataStore.ContentConfig.CONTENT_REQUIRED,
)

request = discoveryengine.CreateDataStoreRequest(
Expand Down
115 changes: 106 additions & 9 deletions discoveryengine/documents_sample_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,31 @@
from discoveryengine import import_documents_sample
from discoveryengine import list_documents_sample

import pytest

project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
location = "global"
data_store_id = "test-structured-data-engine"
gcs_uri = "gs://cloud-samples-data/gen-app-builder/search/empty.json"

# Empty Dataset
bigquery_dataset = "genappbuilder_test"
bigquery_table = "import_documents_test"

def test_import_documents_bigquery():
# Empty Dataset
bigquery_dataset = "genappbuilder_test"
bigquery_table = "import_documents_test"
operation_name = import_documents_sample.import_documents_bigquery_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
bigquery_dataset=bigquery_dataset,
bigquery_table=bigquery_table,
)

assert "operations/import-documents" in operation_name


def test_import_documents_gcs():
operation_name = import_documents_sample.import_documents_sample(
gcs_uri = "gs://cloud-samples-data/gen-app-builder/search/empty.json"
operation_name = import_documents_sample.import_documents_gcs_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
Expand All @@ -39,13 +52,97 @@ def test_import_documents_gcs():
assert "operations/import-documents" in operation_name


def test_import_documents_bigquery():
operation_name = import_documents_sample.import_documents_sample(
@pytest.mark.skip(reason="Permissions")
def test_import_documents_cloud_sql():
sql_project_id = project_id
sql_instance_id = "vertex-ai-search-tests"
sql_database_id = "test-db"
sql_table_id = "products"

operation_name = import_documents_sample.import_documents_cloud_sql_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
bigquery_dataset=bigquery_dataset,
bigquery_table=bigquery_table,
sql_project_id=sql_project_id,
sql_instance_id=sql_instance_id,
sql_database_id=sql_database_id,
sql_table_id=sql_table_id,
)

assert "operations/import-documents" in operation_name


def test_import_documents_spanner():
spanner_project_id = project_id
spanner_instance_id = "test-instance"
spanner_database_id = "vais-test-db"
spanner_table_id = "products"

operation_name = import_documents_sample.import_documents_spanner_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
spanner_project_id=spanner_project_id,
spanner_instance_id=spanner_instance_id,
spanner_database_id=spanner_database_id,
spanner_table_id=spanner_table_id,
)

assert "operations/import-documents" in operation_name


def test_import_documents_firestore():
firestore_project_id = project_id
firestore_database_id = "vais-tests"
firestore_collection_id = "products"

operation_name = import_documents_sample.import_documents_firestore_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
firestore_project_id=firestore_project_id,
firestore_database_id=firestore_database_id,
firestore_collection_id=firestore_collection_id,
)

assert "operations/import-documents" in operation_name


@pytest.mark.skip(reason="Timeout")
def test_import_documents_bigtable():
bigtable_project_id = project_id
bigtable_instance_id = "bigtable-test"
bigtable_table_id = "vais-test"

operation_name = import_documents_sample.import_documents_bigtable_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
bigtable_project_id=bigtable_project_id,
bigtable_instance_id=bigtable_instance_id,
bigtable_table_id=bigtable_table_id,
)

assert "operations/import-documents" in operation_name


@pytest.mark.skip(reason="Permissions")
def test_import_documents_healthcare_fhir_sample():
location = "us"
data_store_id = "healthcare-search-test"
healthcare_project_id = project_id
healthcare_location = "us-central1"
healthcare_dataset_id = "vais_testing"
healthcare_fihr_store_id = "vais_test_fihr_data"

operation_name = import_documents_sample.import_documents_healthcare_fhir_sample(
project_id=project_id,
location=location,
data_store_id=data_store_id,
healthcare_project_id=healthcare_project_id,
healthcare_location=healthcare_location,
healthcare_dataset_id=healthcare_dataset_id,
healthcare_fihr_store_id=healthcare_fihr_store_id,
)

assert "operations/import-documents" in operation_name
Expand Down
Loading

0 comments on commit 47c059d

Please sign in to comment.