Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add ImportModelEvaluation in aiplatform v1 model_service.proto #1105

Merged
merged 2 commits into from
Mar 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions google/cloud/aiplatform_v1/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,6 +348,7 @@
from .types.model_service import GetModelEvaluationRequest
from .types.model_service import GetModelEvaluationSliceRequest
from .types.model_service import GetModelRequest
from .types.model_service import ImportModelEvaluationRequest
from .types.model_service import ListModelEvaluationSlicesRequest
from .types.model_service import ListModelEvaluationSlicesResponse
from .types.model_service import ListModelEvaluationsRequest
Expand Down Expand Up @@ -720,6 +721,7 @@
"ImportFeatureValuesOperationMetadata",
"ImportFeatureValuesRequest",
"ImportFeatureValuesResponse",
"ImportModelEvaluationRequest",
"Index",
"IndexEndpoint",
"IndexEndpointServiceClient",
Expand Down
10 changes: 10 additions & 0 deletions google/cloud/aiplatform_v1/gapic_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -1291,6 +1291,11 @@
"get_model_evaluation_slice"
]
},
"ImportModelEvaluation": {
"methods": [
"import_model_evaluation"
]
},
"ListModelEvaluationSlices": {
"methods": [
"list_model_evaluation_slices"
Expand Down Expand Up @@ -1346,6 +1351,11 @@
"get_model_evaluation_slice"
]
},
"ImportModelEvaluation": {
"methods": [
"import_model_evaluation"
]
},
"ListModelEvaluationSlices": {
"methods": [
"list_model_evaluation_slices"
Expand Down
103 changes: 103 additions & 0 deletions google/cloud/aiplatform_v1/services/model_service/async_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from google.cloud.aiplatform_v1.types import model
from google.cloud.aiplatform_v1.types import model as gca_model
from google.cloud.aiplatform_v1.types import model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation_slice
from google.cloud.aiplatform_v1.types import model_service
from google.cloud.aiplatform_v1.types import operation as gca_operation
Expand Down Expand Up @@ -886,6 +887,108 @@ def sample_export_model():
# Done; return the response.
return response

async def import_model_evaluation(
self,
request: Union[model_service.ImportModelEvaluationRequest, dict] = None,
*,
parent: str = None,
model_evaluation: gca_model_evaluation.ModelEvaluation = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_model_evaluation.ModelEvaluation:
r"""Imports an externally generated ModelEvaluation.

.. code-block:: python

from google.cloud import aiplatform_v1

def sample_import_model_evaluation():
# Create a client
client = aiplatform_v1.ModelServiceClient()

# Initialize request argument(s)
request = aiplatform_v1.ImportModelEvaluationRequest(
parent="parent_value",
)

# Make the request
response = client.import_model_evaluation(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.aiplatform_v1.types.ImportModelEvaluationRequest, dict]):
The request object. Request message for
[ModelService.ImportModelEvaluation][google.cloud.aiplatform.v1.ModelService.ImportModelEvaluation]
parent (:class:`str`):
Required. The name of the parent model resource. Format:
``projects/{project}/locations/{location}/models/{model}``

This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model_evaluation (:class:`google.cloud.aiplatform_v1.types.ModelEvaluation`):
Required. Model evaluation resource
to be imported.

This corresponds to the ``model_evaluation`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.

Returns:
google.cloud.aiplatform_v1.types.ModelEvaluation:
A collection of metrics calculated by
comparing Model's predictions on all of
the test data against annotations from
the test data.

"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, model_evaluation])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

request = model_service.ImportModelEvaluationRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if model_evaluation is not None:
request.model_evaluation = model_evaluation

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.import_model_evaluation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)

# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)

# Done; return the response.
return response

async def get_model_evaluation(
self,
request: Union[model_service.GetModelEvaluationRequest, dict] = None,
Expand Down
103 changes: 103 additions & 0 deletions google/cloud/aiplatform_v1/services/model_service/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
from google.cloud.aiplatform_v1.types import model
from google.cloud.aiplatform_v1.types import model as gca_model
from google.cloud.aiplatform_v1.types import model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation_slice
from google.cloud.aiplatform_v1.types import model_service
from google.cloud.aiplatform_v1.types import operation as gca_operation
Expand Down Expand Up @@ -1143,6 +1144,108 @@ def sample_export_model():
# Done; return the response.
return response

def import_model_evaluation(
self,
request: Union[model_service.ImportModelEvaluationRequest, dict] = None,
*,
parent: str = None,
model_evaluation: gca_model_evaluation.ModelEvaluation = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gca_model_evaluation.ModelEvaluation:
r"""Imports an externally generated ModelEvaluation.

.. code-block:: python

from google.cloud import aiplatform_v1

def sample_import_model_evaluation():
# Create a client
client = aiplatform_v1.ModelServiceClient()

# Initialize request argument(s)
request = aiplatform_v1.ImportModelEvaluationRequest(
parent="parent_value",
)

# Make the request
response = client.import_model_evaluation(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.aiplatform_v1.types.ImportModelEvaluationRequest, dict]):
The request object. Request message for
[ModelService.ImportModelEvaluation][google.cloud.aiplatform.v1.ModelService.ImportModelEvaluation]
parent (str):
Required. The name of the parent model resource. Format:
``projects/{project}/locations/{location}/models/{model}``

This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
model_evaluation (google.cloud.aiplatform_v1.types.ModelEvaluation):
Required. Model evaluation resource
to be imported.

This corresponds to the ``model_evaluation`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.

Returns:
google.cloud.aiplatform_v1.types.ModelEvaluation:
A collection of metrics calculated by
comparing Model's predictions on all of
the test data against annotations from
the test data.

"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, model_evaluation])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

# Minor optimization to avoid making a copy if the user passes
# in a model_service.ImportModelEvaluationRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, model_service.ImportModelEvaluationRequest):
request = model_service.ImportModelEvaluationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if model_evaluation is not None:
request.model_evaluation = model_evaluation

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.import_model_evaluation]

# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)

# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)

# Done; return the response.
return response

def get_model_evaluation(
self,
request: Union[model_service.GetModelEvaluationRequest, dict] = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from google.cloud.aiplatform_v1.types import model
from google.cloud.aiplatform_v1.types import model as gca_model
from google.cloud.aiplatform_v1.types import model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation_slice
from google.cloud.aiplatform_v1.types import model_service
from google.longrunning import operations_pb2 # type: ignore
Expand Down Expand Up @@ -144,6 +145,11 @@ def _prep_wrapped_messages(self, client_info):
self.export_model: gapic_v1.method.wrap_method(
self.export_model, default_timeout=None, client_info=client_info,
),
self.import_model_evaluation: gapic_v1.method.wrap_method(
self.import_model_evaluation,
default_timeout=None,
client_info=client_info,
),
self.get_model_evaluation: gapic_v1.method.wrap_method(
self.get_model_evaluation,
default_timeout=None,
Expand Down Expand Up @@ -236,6 +242,18 @@ def export_model(
]:
raise NotImplementedError()

@property
def import_model_evaluation(
self,
) -> Callable[
[model_service.ImportModelEvaluationRequest],
Union[
gca_model_evaluation.ModelEvaluation,
Awaitable[gca_model_evaluation.ModelEvaluation],
],
]:
raise NotImplementedError()

@property
def get_model_evaluation(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from google.cloud.aiplatform_v1.types import model
from google.cloud.aiplatform_v1.types import model as gca_model
from google.cloud.aiplatform_v1.types import model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation as gca_model_evaluation
from google.cloud.aiplatform_v1.types import model_evaluation_slice
from google.cloud.aiplatform_v1.types import model_service
from google.longrunning import operations_pb2 # type: ignore
Expand Down Expand Up @@ -413,6 +414,35 @@ def export_model(
)
return self._stubs["export_model"]

@property
def import_model_evaluation(
self,
) -> Callable[
[model_service.ImportModelEvaluationRequest],
gca_model_evaluation.ModelEvaluation,
]:
r"""Return a callable for the import model evaluation method over gRPC.

Imports an externally generated ModelEvaluation.

Returns:
Callable[[~.ImportModelEvaluationRequest],
~.ModelEvaluation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "import_model_evaluation" not in self._stubs:
self._stubs["import_model_evaluation"] = self.grpc_channel.unary_unary(
"/google.cloud.aiplatform.v1.ModelService/ImportModelEvaluation",
request_serializer=model_service.ImportModelEvaluationRequest.serialize,
response_deserializer=gca_model_evaluation.ModelEvaluation.deserialize,
)
return self._stubs["import_model_evaluation"]

@property
def get_model_evaluation(
self,
Expand Down
Loading