Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): update via SDK Studio #252

Merged
merged 1 commit into from
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 6
configured_endpoints: 8
6 changes: 4 additions & 2 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
Types:

```python
from openlayer.types import ProjectListResponse
from openlayer.types import ProjectCreateResponse, ProjectListResponse
```

Methods:

- <code title="post /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">create</a>(\*\*<a href="src/openlayer/types/project_create_params.py">params</a>) -> <a href="./src/openlayer/types/project_create_response.py">ProjectCreateResponse</a></code>
- <code title="get /projects">client.projects.<a href="./src/openlayer/resources/projects/projects.py">list</a>(\*\*<a href="src/openlayer/types/project_list_params.py">params</a>) -> <a href="./src/openlayer/types/project_list_response.py">ProjectListResponse</a></code>

## Commits
Expand All @@ -27,11 +28,12 @@ Methods:
Types:

```python
from openlayer.types.projects import InferencePipelineListResponse
from openlayer.types.projects import InferencePipelineCreateResponse, InferencePipelineListResponse
```

Methods:

- <code title="post /projects/{id}/inference-pipelines">client.projects.inference_pipelines.<a href="./src/openlayer/resources/projects/inference_pipelines.py">create</a>(id, \*\*<a href="src/openlayer/types/projects/inference_pipeline_create_params.py">params</a>) -> <a href="./src/openlayer/types/projects/inference_pipeline_create_response.py">InferencePipelineCreateResponse</a></code>
- <code title="get /projects/{id}/inference-pipelines">client.projects.inference_pipelines.<a href="./src/openlayer/resources/projects/inference_pipelines.py">list</a>(id, \*\*<a href="src/openlayer/types/projects/inference_pipeline_list_params.py">params</a>) -> <a href="./src/openlayer/types/projects/inference_pipeline_list_response.py">InferencePipelineListResponse</a></code>

# Commits
Expand Down
126 changes: 125 additions & 1 deletion src/openlayer/resources/projects/inference_pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

from __future__ import annotations

from typing import Optional
from typing_extensions import Literal

import httpx

from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
Expand All @@ -20,8 +23,9 @@
from ..._base_client import (
make_request_options,
)
from ...types.projects import inference_pipeline_list_params
from ...types.projects import inference_pipeline_list_params, inference_pipeline_create_params
from ...types.projects.inference_pipeline_list_response import InferencePipelineListResponse
from ...types.projects.inference_pipeline_create_response import InferencePipelineCreateResponse

__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]

Expand All @@ -35,6 +39,60 @@ def with_raw_response(self) -> InferencePipelinesResourceWithRawResponse:
def with_streaming_response(self) -> InferencePipelinesResourceWithStreamingResponse:
return InferencePipelinesResourceWithStreamingResponse(self)

def create(
self,
id: str,
*,
description: Optional[str],
name: str,
reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
storage_type: Literal["local", "s3", "gcs", "azure"] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> InferencePipelineCreateResponse:
"""
Create an inference pipeline under a project.

Args:
description: The inference pipeline description.

name: The inference pipeline name.

reference_dataset_uri: The reference dataset URI.

storage_type: The storage type.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return self._post(
f"/projects/{id}/inference-pipelines",
body=maybe_transform(
{
"description": description,
"name": name,
"reference_dataset_uri": reference_dataset_uri,
"storage_type": storage_type,
},
inference_pipeline_create_params.InferencePipelineCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=InferencePipelineCreateResponse,
)

def list(
self,
id: str,
Expand Down Expand Up @@ -98,6 +156,60 @@ def with_raw_response(self) -> AsyncInferencePipelinesResourceWithRawResponse:
def with_streaming_response(self) -> AsyncInferencePipelinesResourceWithStreamingResponse:
return AsyncInferencePipelinesResourceWithStreamingResponse(self)

async def create(
self,
id: str,
*,
description: Optional[str],
name: str,
reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
storage_type: Literal["local", "s3", "gcs", "azure"] | NotGiven = NOT_GIVEN,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
) -> InferencePipelineCreateResponse:
"""
Create an inference pipeline under a project.

Args:
description: The inference pipeline description.

name: The inference pipeline name.

reference_dataset_uri: The reference dataset URI.

storage_type: The storage type.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request

extra_body: Add additional JSON properties to the request

timeout: Override the client-level default timeout for this request, in seconds
"""
if not id:
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
return await self._post(
f"/projects/{id}/inference-pipelines",
body=await async_maybe_transform(
{
"description": description,
"name": name,
"reference_dataset_uri": reference_dataset_uri,
"storage_type": storage_type,
},
inference_pipeline_create_params.InferencePipelineCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=InferencePipelineCreateResponse,
)

async def list(
self,
id: str,
Expand Down Expand Up @@ -156,6 +268,9 @@ class InferencePipelinesResourceWithRawResponse:
def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines

self.create = to_raw_response_wrapper(
inference_pipelines.create,
)
self.list = to_raw_response_wrapper(
inference_pipelines.list,
)
Expand All @@ -165,6 +280,9 @@ class AsyncInferencePipelinesResourceWithRawResponse:
def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines

self.create = async_to_raw_response_wrapper(
inference_pipelines.create,
)
self.list = async_to_raw_response_wrapper(
inference_pipelines.list,
)
Expand All @@ -174,6 +292,9 @@ class InferencePipelinesResourceWithStreamingResponse:
def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines

self.create = to_streamed_response_wrapper(
inference_pipelines.create,
)
self.list = to_streamed_response_wrapper(
inference_pipelines.list,
)
Expand All @@ -183,6 +304,9 @@ class AsyncInferencePipelinesResourceWithStreamingResponse:
def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines

self.create = async_to_streamed_response_wrapper(
inference_pipelines.create,
)
self.list = async_to_streamed_response_wrapper(
inference_pipelines.list,
)
Loading