Skip to content

Commit

Permalink
feat(api): update via SDK Studio (#274)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored and stainless-bot committed Jul 23, 2024
1 parent dc7ef78 commit 2e703d3
Show file tree
Hide file tree
Showing 7 changed files with 317 additions and 3 deletions.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 12
configured_endpoints: 13
3 changes: 2 additions & 1 deletion api.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,13 @@ Methods:
Types:

```python
from openlayer.types import InferencePipelineRetrieveResponse
from openlayer.types import InferencePipelineRetrieveResponse, InferencePipelineUpdateResponse
```

Methods:

- <code title="get /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">retrieve</a>(inference_pipeline_id) -> <a href="./src/openlayer/types/inference_pipeline_retrieve_response.py">InferencePipelineRetrieveResponse</a></code>
- <code title="put /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">update</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipeline_update_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipeline_update_response.py">InferencePipelineUpdateResponse</a></code>
- <code title="delete /inference-pipelines/{inferencePipelineId}">client.inference_pipelines.<a href="./src/openlayer/resources/inference_pipelines/inference_pipelines.py">delete</a>(inference_pipeline_id) -> None</code>

## Data
Expand Down
126 changes: 126 additions & 0 deletions src/openlayer/resources/inference_pipelines/inference_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

from __future__ import annotations

from typing import Optional

import httpx

from .data import (
Expand All @@ -20,7 +22,12 @@
RowsResourceWithStreamingResponse,
AsyncRowsResourceWithStreamingResponse,
)
from ...types import inference_pipeline_update_params
from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
from ..._utils import (
maybe_transform,
async_maybe_transform,
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
Expand All @@ -38,6 +45,7 @@
AsyncTestResultsResourceWithStreamingResponse,
)
from ..._base_client import make_request_options
from ...types.inference_pipeline_update_response import InferencePipelineUpdateResponse
from ...types.inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse

__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]
Expand Down Expand Up @@ -99,6 +107,59 @@ def retrieve(
cast_to=InferencePipelineRetrieveResponse,
)

def update(
self,
inference_pipeline_id: str,
*,
description: Optional[str] | NotGiven = NOT_GIVEN,
name: str | NotGiven = NOT_GIVEN,
reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> InferencePipelineUpdateResponse:
"""
Update inference pipeline.
Args:
description: The inference pipeline description.
name: The inference pipeline name.
reference_dataset_uri: The storage uri of your reference dataset. We recommend using the Python SDK or
the UI to handle your reference dataset updates.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not inference_pipeline_id:
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
return self._put(
f"/inference-pipelines/{inference_pipeline_id}",
body=maybe_transform(
{
"description": description,
"name": name,
"reference_dataset_uri": reference_dataset_uri,
},
inference_pipeline_update_params.InferencePipelineUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=InferencePipelineUpdateResponse,
)

def delete(
self,
inference_pipeline_id: str,
Expand Down Expand Up @@ -192,6 +253,59 @@ async def retrieve(
cast_to=InferencePipelineRetrieveResponse,
)

async def update(
self,
inference_pipeline_id: str,
*,
description: Optional[str] | NotGiven = NOT_GIVEN,
name: str | NotGiven = NOT_GIVEN,
reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> InferencePipelineUpdateResponse:
"""
Update inference pipeline.
Args:
description: The inference pipeline description.
name: The inference pipeline name.
reference_dataset_uri: The storage uri of your reference dataset. We recommend using the Python SDK or
the UI to handle your reference dataset updates.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not inference_pipeline_id:
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
return await self._put(
f"/inference-pipelines/{inference_pipeline_id}",
body=await async_maybe_transform(
{
"description": description,
"name": name,
"reference_dataset_uri": reference_dataset_uri,
},
inference_pipeline_update_params.InferencePipelineUpdateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=InferencePipelineUpdateResponse,
)

async def delete(
self,
inference_pipeline_id: str,
Expand Down Expand Up @@ -236,6 +350,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.retrieve = to_raw_response_wrapper(
inference_pipelines.retrieve,
)
self.update = to_raw_response_wrapper(
inference_pipelines.update,
)
self.delete = to_raw_response_wrapper(
inference_pipelines.delete,
)
Expand All @@ -260,6 +377,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.retrieve = async_to_raw_response_wrapper(
inference_pipelines.retrieve,
)
self.update = async_to_raw_response_wrapper(
inference_pipelines.update,
)
self.delete = async_to_raw_response_wrapper(
inference_pipelines.delete,
)
Expand All @@ -284,6 +404,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.retrieve = to_streamed_response_wrapper(
inference_pipelines.retrieve,
)
self.update = to_streamed_response_wrapper(
inference_pipelines.update,
)
self.delete = to_streamed_response_wrapper(
inference_pipelines.delete,
)
Expand All @@ -308,6 +431,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.retrieve = async_to_streamed_response_wrapper(
inference_pipelines.retrieve,
)
self.update = async_to_streamed_response_wrapper(
inference_pipelines.update,
)
self.delete = async_to_streamed_response_wrapper(
inference_pipelines.delete,
)
Expand Down
2 changes: 2 additions & 0 deletions src/openlayer/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,6 @@
from .project_create_params import ProjectCreateParams as ProjectCreateParams
from .project_list_response import ProjectListResponse as ProjectListResponse
from .project_create_response import ProjectCreateResponse as ProjectCreateResponse
from .inference_pipeline_update_params import InferencePipelineUpdateParams as InferencePipelineUpdateParams
from .inference_pipeline_update_response import InferencePipelineUpdateResponse as InferencePipelineUpdateResponse
from .inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse as InferencePipelineRetrieveResponse
25 changes: 25 additions & 0 deletions src/openlayer/types/inference_pipeline_update_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import Optional
from typing_extensions import Annotated, TypedDict

from .._utils import PropertyInfo

__all__ = ["InferencePipelineUpdateParams"]


class InferencePipelineUpdateParams(TypedDict, total=False):
description: Optional[str]
"""The inference pipeline description."""

name: str
"""The inference pipeline name."""

reference_dataset_uri: Annotated[Optional[str], PropertyInfo(alias="referenceDatasetUri")]
"""The storage uri of your reference dataset.
We recommend using the Python SDK or the UI to handle your reference dataset
updates.
"""
61 changes: 61 additions & 0 deletions src/openlayer/types/inference_pipeline_update_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing import Optional
from datetime import datetime
from typing_extensions import Literal

from pydantic import Field as FieldInfo

from .._models import BaseModel

__all__ = ["InferencePipelineUpdateResponse", "Links"]


class Links(BaseModel):
app: str


class InferencePipelineUpdateResponse(BaseModel):
id: str
"""The inference pipeline id."""

date_created: datetime = FieldInfo(alias="dateCreated")
"""The creation date."""

date_last_evaluated: Optional[datetime] = FieldInfo(alias="dateLastEvaluated", default=None)
"""The last test evaluation date."""

date_last_sample_received: Optional[datetime] = FieldInfo(alias="dateLastSampleReceived", default=None)
"""The last data sample received date."""

date_of_next_evaluation: Optional[datetime] = FieldInfo(alias="dateOfNextEvaluation", default=None)
"""The next test evaluation date."""

date_updated: datetime = FieldInfo(alias="dateUpdated")
"""The last updated date."""

description: Optional[str] = None
"""The inference pipeline description."""

failing_goal_count: int = FieldInfo(alias="failingGoalCount")
"""The number of tests failing."""

links: Links

name: str
"""The inference pipeline name."""

passing_goal_count: int = FieldInfo(alias="passingGoalCount")
"""The number of tests passing."""

project_id: str = FieldInfo(alias="projectId")
"""The project id."""

status: Literal["queued", "running", "paused", "failed", "completed", "unknown"]
"""The status of test evaluation for the inference pipeline."""

status_message: Optional[str] = FieldInfo(alias="statusMessage", default=None)
"""The status message of test evaluation for the inference pipeline."""

total_goal_count: int = FieldInfo(alias="totalGoalCount")
"""The total number of tests."""
Loading

0 comments on commit 2e703d3

Please sign in to comment.