diff --git a/.stats.yml b/.stats.yml
index 6ecfe8d4..6a8c1428 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 12
+configured_endpoints: 13
diff --git a/api.md b/api.md
index 4a8ff23b..82e9d940 100644
--- a/api.md
+++ b/api.md
@@ -55,12 +55,13 @@ Methods:
Types:
```python
-from openlayer.types import InferencePipelineRetrieveResponse
+from openlayer.types import InferencePipelineRetrieveResponse, InferencePipelineUpdateResponse
```
Methods:
- client.inference_pipelines.retrieve(inference_pipeline_id) -> InferencePipelineRetrieveResponse
+- client.inference_pipelines.update(inference_pipeline_id, \*\*params) -> InferencePipelineUpdateResponse
- client.inference_pipelines.delete(inference_pipeline_id) -> None
## Data
diff --git a/src/openlayer/resources/inference_pipelines/inference_pipelines.py b/src/openlayer/resources/inference_pipelines/inference_pipelines.py
index 8f473441..f64b9dea 100644
--- a/src/openlayer/resources/inference_pipelines/inference_pipelines.py
+++ b/src/openlayer/resources/inference_pipelines/inference_pipelines.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+from typing import Optional
+
import httpx
from .data import (
@@ -20,7 +22,12 @@
RowsResourceWithStreamingResponse,
AsyncRowsResourceWithStreamingResponse,
)
+from ...types import inference_pipeline_update_params
from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._utils import (
+ maybe_transform,
+ async_maybe_transform,
+)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
@@ -38,6 +45,7 @@
AsyncTestResultsResourceWithStreamingResponse,
)
from ..._base_client import make_request_options
+from ...types.inference_pipeline_update_response import InferencePipelineUpdateResponse
from ...types.inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse
__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]
@@ -99,6 +107,59 @@ def retrieve(
cast_to=InferencePipelineRetrieveResponse,
)
+ def update(
+ self,
+ inference_pipeline_id: str,
+ *,
+ description: Optional[str] | NotGiven = NOT_GIVEN,
+ name: str | NotGiven = NOT_GIVEN,
+ reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> InferencePipelineUpdateResponse:
+ """
+ Update inference pipeline.
+
+ Args:
+ description: The inference pipeline description.
+
+ name: The inference pipeline name.
+
+ reference_dataset_uri: The storage uri of your reference dataset. We recommend using the Python SDK or
+ the UI to handle your reference dataset updates.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not inference_pipeline_id:
+ raise ValueError(
+ f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
+ )
+ return self._put(
+ f"/inference-pipelines/{inference_pipeline_id}",
+ body=maybe_transform(
+ {
+ "description": description,
+ "name": name,
+ "reference_dataset_uri": reference_dataset_uri,
+ },
+ inference_pipeline_update_params.InferencePipelineUpdateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=InferencePipelineUpdateResponse,
+ )
+
def delete(
self,
inference_pipeline_id: str,
@@ -192,6 +253,59 @@ async def retrieve(
cast_to=InferencePipelineRetrieveResponse,
)
+ async def update(
+ self,
+ inference_pipeline_id: str,
+ *,
+ description: Optional[str] | NotGiven = NOT_GIVEN,
+ name: str | NotGiven = NOT_GIVEN,
+ reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> InferencePipelineUpdateResponse:
+ """
+ Update inference pipeline.
+
+ Args:
+ description: The inference pipeline description.
+
+ name: The inference pipeline name.
+
+ reference_dataset_uri: The storage uri of your reference dataset. We recommend using the Python SDK or
+ the UI to handle your reference dataset updates.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not inference_pipeline_id:
+ raise ValueError(
+ f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
+ )
+ return await self._put(
+ f"/inference-pipelines/{inference_pipeline_id}",
+ body=await async_maybe_transform(
+ {
+ "description": description,
+ "name": name,
+ "reference_dataset_uri": reference_dataset_uri,
+ },
+ inference_pipeline_update_params.InferencePipelineUpdateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=InferencePipelineUpdateResponse,
+ )
+
async def delete(
self,
inference_pipeline_id: str,
@@ -236,6 +350,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.retrieve = to_raw_response_wrapper(
inference_pipelines.retrieve,
)
+ self.update = to_raw_response_wrapper(
+ inference_pipelines.update,
+ )
self.delete = to_raw_response_wrapper(
inference_pipelines.delete,
)
@@ -260,6 +377,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.retrieve = async_to_raw_response_wrapper(
inference_pipelines.retrieve,
)
+ self.update = async_to_raw_response_wrapper(
+ inference_pipelines.update,
+ )
self.delete = async_to_raw_response_wrapper(
inference_pipelines.delete,
)
@@ -284,6 +404,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.retrieve = to_streamed_response_wrapper(
inference_pipelines.retrieve,
)
+ self.update = to_streamed_response_wrapper(
+ inference_pipelines.update,
+ )
self.delete = to_streamed_response_wrapper(
inference_pipelines.delete,
)
@@ -308,6 +431,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.retrieve = async_to_streamed_response_wrapper(
inference_pipelines.retrieve,
)
+ self.update = async_to_streamed_response_wrapper(
+ inference_pipelines.update,
+ )
self.delete = async_to_streamed_response_wrapper(
inference_pipelines.delete,
)
diff --git a/src/openlayer/types/__init__.py b/src/openlayer/types/__init__.py
index 416761d7..58883aff 100644
--- a/src/openlayer/types/__init__.py
+++ b/src/openlayer/types/__init__.py
@@ -6,4 +6,6 @@
from .project_create_params import ProjectCreateParams as ProjectCreateParams
from .project_list_response import ProjectListResponse as ProjectListResponse
from .project_create_response import ProjectCreateResponse as ProjectCreateResponse
+from .inference_pipeline_update_params import InferencePipelineUpdateParams as InferencePipelineUpdateParams
+from .inference_pipeline_update_response import InferencePipelineUpdateResponse as InferencePipelineUpdateResponse
from .inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse as InferencePipelineRetrieveResponse
diff --git a/src/openlayer/types/inference_pipeline_update_params.py b/src/openlayer/types/inference_pipeline_update_params.py
new file mode 100644
index 00000000..29ae9076
--- /dev/null
+++ b/src/openlayer/types/inference_pipeline_update_params.py
@@ -0,0 +1,25 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Annotated, TypedDict
+
+from .._utils import PropertyInfo
+
+__all__ = ["InferencePipelineUpdateParams"]
+
+
+class InferencePipelineUpdateParams(TypedDict, total=False):
+ description: Optional[str]
+ """The inference pipeline description."""
+
+ name: str
+ """The inference pipeline name."""
+
+ reference_dataset_uri: Annotated[Optional[str], PropertyInfo(alias="referenceDatasetUri")]
+ """The storage uri of your reference dataset.
+
+ We recommend using the Python SDK or the UI to handle your reference dataset
+ updates.
+ """
diff --git a/src/openlayer/types/inference_pipeline_update_response.py b/src/openlayer/types/inference_pipeline_update_response.py
new file mode 100644
index 00000000..ca0e5ec2
--- /dev/null
+++ b/src/openlayer/types/inference_pipeline_update_response.py
@@ -0,0 +1,61 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["InferencePipelineUpdateResponse", "Links"]
+
+
+class Links(BaseModel):
+ app: str
+
+
+class InferencePipelineUpdateResponse(BaseModel):
+ id: str
+ """The inference pipeline id."""
+
+ date_created: datetime = FieldInfo(alias="dateCreated")
+ """The creation date."""
+
+ date_last_evaluated: Optional[datetime] = FieldInfo(alias="dateLastEvaluated", default=None)
+ """The last test evaluation date."""
+
+ date_last_sample_received: Optional[datetime] = FieldInfo(alias="dateLastSampleReceived", default=None)
+ """The last data sample received date."""
+
+ date_of_next_evaluation: Optional[datetime] = FieldInfo(alias="dateOfNextEvaluation", default=None)
+ """The next test evaluation date."""
+
+ date_updated: datetime = FieldInfo(alias="dateUpdated")
+ """The last updated date."""
+
+ description: Optional[str] = None
+ """The inference pipeline description."""
+
+ failing_goal_count: int = FieldInfo(alias="failingGoalCount")
+ """The number of tests failing."""
+
+ links: Links
+
+ name: str
+ """The inference pipeline name."""
+
+ passing_goal_count: int = FieldInfo(alias="passingGoalCount")
+ """The number of tests passing."""
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The project id."""
+
+ status: Literal["queued", "running", "paused", "failed", "completed", "unknown"]
+ """The status of test evaluation for the inference pipeline."""
+
+ status_message: Optional[str] = FieldInfo(alias="statusMessage", default=None)
+ """The status message of test evaluation for the inference pipeline."""
+
+ total_goal_count: int = FieldInfo(alias="totalGoalCount")
+ """The total number of tests."""
diff --git a/tests/api_resources/test_inference_pipelines.py b/tests/api_resources/test_inference_pipelines.py
index 883487c5..35de2478 100644
--- a/tests/api_resources/test_inference_pipelines.py
+++ b/tests/api_resources/test_inference_pipelines.py
@@ -9,7 +9,10 @@
from openlayer import Openlayer, AsyncOpenlayer
from tests.utils import assert_matches_type
-from openlayer.types import InferencePipelineRetrieveResponse
+from openlayer.types import (
+ InferencePipelineUpdateResponse,
+ InferencePipelineRetrieveResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -55,6 +58,54 @@ def test_path_params_retrieve(self, client: Openlayer) -> None:
"",
)
+ @parametrize
+ def test_method_update(self, client: Openlayer) -> None:
+ inference_pipeline = client.inference_pipelines.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_method_update_with_all_params(self, client: Openlayer) -> None:
+ inference_pipeline = client.inference_pipelines.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ reference_dataset_uri="referenceDatasetUri",
+ )
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_raw_response_update(self, client: Openlayer) -> None:
+ response = client.inference_pipelines.with_raw_response.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_streaming_response_update(self, client: Openlayer) -> None:
+ with client.inference_pipelines.with_streaming_response.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_update(self, client: Openlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
+ client.inference_pipelines.with_raw_response.update(
+ inference_pipeline_id="",
+ )
+
@parametrize
def test_method_delete(self, client: Openlayer) -> None:
inference_pipeline = client.inference_pipelines.delete(
@@ -135,6 +186,54 @@ async def test_path_params_retrieve(self, async_client: AsyncOpenlayer) -> None:
"",
)
+ @parametrize
+ async def test_method_update(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.inference_pipelines.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_method_update_with_all_params(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.inference_pipelines.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ reference_dataset_uri="referenceDatasetUri",
+ )
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_raw_response_update(self, async_client: AsyncOpenlayer) -> None:
+ response = await async_client.inference_pipelines.with_raw_response.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_update(self, async_client: AsyncOpenlayer) -> None:
+ async with async_client.inference_pipelines.with_streaming_response.update(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineUpdateResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_update(self, async_client: AsyncOpenlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
+ await async_client.inference_pipelines.with_raw_response.update(
+ inference_pipeline_id="",
+ )
+
@parametrize
async def test_method_delete(self, async_client: AsyncOpenlayer) -> None:
inference_pipeline = await async_client.inference_pipelines.delete(