Skip to content

Commit

Permalink
feat(api): update via SDK Studio (#262)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored Jul 17, 2024
1 parent 1b800ac commit 5d70200
Show file tree
Hide file tree
Showing 9 changed files with 446 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 8
configured_endpoints: 9
12 changes: 12 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,18 @@ Methods:

- <code title="post /inference-pipelines/{inferencePipelineId}/data-stream">client.inference_pipelines.data.<a href="./src/openlayer/resources/inference_pipelines/data.py">stream</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipelines/data_stream_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/data_stream_response.py">DataStreamResponse</a></code>

## Rows

Types:

```python
from openlayer.types.inference_pipelines import RowStreamResponse
```

Methods:

- <code title="put /inference-pipelines/{inferencePipelineId}/rows">client.inference_pipelines.rows.<a href="./src/openlayer/resources/inference_pipelines/rows.py">stream</a>(inference_pipeline_id, \*\*<a href="src/openlayer/types/inference_pipelines/row_stream_params.py">params</a>) -> <a href="./src/openlayer/types/inference_pipelines/row_stream_response.py">RowStreamResponse</a></code>

## TestResults

Types:
Expand Down
14 changes: 14 additions & 0 deletions src/openlayer/resources/inference_pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@
DataResourceWithStreamingResponse,
AsyncDataResourceWithStreamingResponse,
)
from .rows import (
RowsResource,
AsyncRowsResource,
RowsResourceWithRawResponse,
AsyncRowsResourceWithRawResponse,
RowsResourceWithStreamingResponse,
AsyncRowsResourceWithStreamingResponse,
)
from .test_results import (
TestResultsResource,
AsyncTestResultsResource,
Expand All @@ -32,6 +40,12 @@
"AsyncDataResourceWithRawResponse",
"DataResourceWithStreamingResponse",
"AsyncDataResourceWithStreamingResponse",
"RowsResource",
"AsyncRowsResource",
"RowsResourceWithRawResponse",
"AsyncRowsResourceWithRawResponse",
"RowsResourceWithStreamingResponse",
"AsyncRowsResourceWithStreamingResponse",
"TestResultsResource",
"AsyncTestResultsResource",
"TestResultsResourceWithRawResponse",
Expand Down
32 changes: 32 additions & 0 deletions src/openlayer/resources/inference_pipelines/inference_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,14 @@
DataResourceWithStreamingResponse,
AsyncDataResourceWithStreamingResponse,
)
from .rows import (
RowsResource,
AsyncRowsResource,
RowsResourceWithRawResponse,
AsyncRowsResourceWithRawResponse,
RowsResourceWithStreamingResponse,
AsyncRowsResourceWithStreamingResponse,
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from .test_results import (
Expand All @@ -29,6 +37,10 @@ class InferencePipelinesResource(SyncAPIResource):
def data(self) -> DataResource:
return DataResource(self._client)

@cached_property
def rows(self) -> RowsResource:
return RowsResource(self._client)

@cached_property
def test_results(self) -> TestResultsResource:
return TestResultsResource(self._client)
Expand All @@ -47,6 +59,10 @@ class AsyncInferencePipelinesResource(AsyncAPIResource):
def data(self) -> AsyncDataResource:
return AsyncDataResource(self._client)

@cached_property
def rows(self) -> AsyncRowsResource:
return AsyncRowsResource(self._client)

@cached_property
def test_results(self) -> AsyncTestResultsResource:
return AsyncTestResultsResource(self._client)
Expand All @@ -68,6 +84,10 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
def data(self) -> DataResourceWithRawResponse:
return DataResourceWithRawResponse(self._inference_pipelines.data)

@cached_property
def rows(self) -> RowsResourceWithRawResponse:
return RowsResourceWithRawResponse(self._inference_pipelines.rows)

@cached_property
def test_results(self) -> TestResultsResourceWithRawResponse:
return TestResultsResourceWithRawResponse(self._inference_pipelines.test_results)
Expand All @@ -81,6 +101,10 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
def data(self) -> AsyncDataResourceWithRawResponse:
return AsyncDataResourceWithRawResponse(self._inference_pipelines.data)

@cached_property
def rows(self) -> AsyncRowsResourceWithRawResponse:
return AsyncRowsResourceWithRawResponse(self._inference_pipelines.rows)

@cached_property
def test_results(self) -> AsyncTestResultsResourceWithRawResponse:
return AsyncTestResultsResourceWithRawResponse(self._inference_pipelines.test_results)
Expand All @@ -94,6 +118,10 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
def data(self) -> DataResourceWithStreamingResponse:
return DataResourceWithStreamingResponse(self._inference_pipelines.data)

@cached_property
def rows(self) -> RowsResourceWithStreamingResponse:
return RowsResourceWithStreamingResponse(self._inference_pipelines.rows)

@cached_property
def test_results(self) -> TestResultsResourceWithStreamingResponse:
return TestResultsResourceWithStreamingResponse(self._inference_pipelines.test_results)
Expand All @@ -107,6 +135,10 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
def data(self) -> AsyncDataResourceWithStreamingResponse:
return AsyncDataResourceWithStreamingResponse(self._inference_pipelines.data)

@cached_property
def rows(self) -> AsyncRowsResourceWithStreamingResponse:
return AsyncRowsResourceWithStreamingResponse(self._inference_pipelines.rows)

@cached_property
def test_results(self) -> AsyncTestResultsResourceWithStreamingResponse:
return AsyncTestResultsResourceWithStreamingResponse(self._inference_pipelines.test_results)
184 changes: 184 additions & 0 deletions src/openlayer/resources/inference_pipelines/rows.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import Optional

import httpx

from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
from ..._utils import (
maybe_transform,
async_maybe_transform,
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
from ...types.inference_pipelines import row_stream_params
from ...types.inference_pipelines.row_stream_response import RowStreamResponse

__all__ = ["RowsResource", "AsyncRowsResource"]


class RowsResource(SyncAPIResource):
@cached_property
def with_raw_response(self) -> RowsResourceWithRawResponse:
return RowsResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> RowsResourceWithStreamingResponse:
return RowsResourceWithStreamingResponse(self)

def stream(
self,
inference_pipeline_id: str,
*,
inference_id: str,
row: object,
config: Optional[row_stream_params.Config] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> RowStreamResponse:
"""
Update an inference data point in an inference pipeline.
Args:
inference_id: Specify the inference id as a query param.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not inference_pipeline_id:
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
return self._put(
f"/inference-pipelines/{inference_pipeline_id}/rows",
body=maybe_transform(
{
"row": row,
"config": config,
},
row_stream_params.RowStreamParams,
),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform({"inference_id": inference_id}, row_stream_params.RowStreamParams),
),
cast_to=RowStreamResponse,
)


class AsyncRowsResource(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncRowsResourceWithRawResponse:
return AsyncRowsResourceWithRawResponse(self)

@cached_property
def with_streaming_response(self) -> AsyncRowsResourceWithStreamingResponse:
return AsyncRowsResourceWithStreamingResponse(self)

async def stream(
self,
inference_pipeline_id: str,
*,
inference_id: str,
row: object,
config: Optional[row_stream_params.Config] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> RowStreamResponse:
"""
Update an inference data point in an inference pipeline.
Args:
inference_id: Specify the inference id as a query param.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not inference_pipeline_id:
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
return await self._put(
f"/inference-pipelines/{inference_pipeline_id}/rows",
body=await async_maybe_transform(
{
"row": row,
"config": config,
},
row_stream_params.RowStreamParams,
),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=await async_maybe_transform({"inference_id": inference_id}, row_stream_params.RowStreamParams),
),
cast_to=RowStreamResponse,
)


class RowsResourceWithRawResponse:
def __init__(self, rows: RowsResource) -> None:
self._rows = rows

self.stream = to_raw_response_wrapper(
rows.stream,
)


class AsyncRowsResourceWithRawResponse:
def __init__(self, rows: AsyncRowsResource) -> None:
self._rows = rows

self.stream = async_to_raw_response_wrapper(
rows.stream,
)


class RowsResourceWithStreamingResponse:
def __init__(self, rows: RowsResource) -> None:
self._rows = rows

self.stream = to_streamed_response_wrapper(
rows.stream,
)


class AsyncRowsResourceWithStreamingResponse:
def __init__(self, rows: AsyncRowsResource) -> None:
self._rows = rows

self.stream = async_to_streamed_response_wrapper(
rows.stream,
)
2 changes: 2 additions & 0 deletions src/openlayer/types/inference_pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

from __future__ import annotations

from .row_stream_params import RowStreamParams as RowStreamParams
from .data_stream_params import DataStreamParams as DataStreamParams
from .row_stream_response import RowStreamResponse as RowStreamResponse
from .data_stream_response import DataStreamResponse as DataStreamResponse
from .test_result_list_params import TestResultListParams as TestResultListParams
from .test_result_list_response import TestResultListResponse as TestResultListResponse
44 changes: 44 additions & 0 deletions src/openlayer/types/inference_pipelines/row_stream_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from __future__ import annotations

from typing import Optional
from typing_extensions import Required, Annotated, TypedDict

from ..._utils import PropertyInfo

__all__ = ["RowStreamParams", "Config"]


class RowStreamParams(TypedDict, total=False):
inference_id: Required[Annotated[str, PropertyInfo(alias="inferenceId")]]
"""Specify the inference id as a query param."""

row: Required[object]

config: Optional[Config]


class Config(TypedDict, total=False):
ground_truth_column_name: Annotated[Optional[str], PropertyInfo(alias="groundTruthColumnName")]
"""Name of the column with the ground truths."""

human_feedback_column_name: Annotated[Optional[str], PropertyInfo(alias="humanFeedbackColumnName")]
"""Name of the column with human feedback."""

inference_id_column_name: Annotated[Optional[str], PropertyInfo(alias="inferenceIdColumnName")]
"""Name of the column with the inference ids.
This is useful if you want to update rows at a later point in time. If not
provided, a unique id is generated by Openlayer.
"""

latency_column_name: Annotated[Optional[str], PropertyInfo(alias="latencyColumnName")]
"""Name of the column with the latencies."""

timestamp_column_name: Annotated[Optional[str], PropertyInfo(alias="timestampColumnName")]
"""Name of the column with the timestamps.
Timestamps must be in UNIX sec format. If not provided, the upload timestamp is
used.
"""
11 changes: 11 additions & 0 deletions src/openlayer/types/inference_pipelines/row_stream_response.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing_extensions import Literal

from ..._models import BaseModel

__all__ = ["RowStreamResponse"]


class RowStreamResponse(BaseModel):
success: Literal[True]
Loading

0 comments on commit 5d70200

Please sign in to comment.