Skip to content

Commit a918b43

Browse files
feat(api): removing openai/v1
1 parent 859d318 commit a918b43

34 files changed

+239
-1255
lines changed

.stats.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
configured_endpoints: 111
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-f252873ea1e1f38fd207331ef2621c511154d5be3f4076e59cc15754fc58eee4.yml
3-
openapi_spec_hash: 10cbb4337a06a9fdd7d08612dd6044c3
4-
config_hash: 0358112cc0f3d880b4d55debdbe1cfa3
1+
configured_endpoints: 107
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-1eddf141208c131ee4a64ef996f8f419b444f60450de6807a9f6bc711ed8b661.yml
3+
openapi_spec_hash: 94765c67ea99b1358169d41d810dd395
4+
config_hash: 7ec5a583f9c26b38993013bdfb0e7d46

README.md

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,48 @@ Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typ
118118

119119
Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`.
120120

121+
## Nested params
122+
123+
Nested parameters are dictionaries, typed using `TypedDict`, for example:
124+
125+
```python
126+
from llama_stack_client import LlamaStackClient
127+
128+
client = LlamaStackClient()
129+
130+
chat_completion_response = client.inference.chat_completion(
131+
messages=[
132+
{
133+
"content": "string",
134+
"role": "user",
135+
}
136+
],
137+
model_id="model_id",
138+
logprobs={},
139+
)
140+
print(chat_completion_response.logprobs)
141+
```
142+
143+
## File uploads
144+
145+
Request parameters that correspond to file uploads can be passed as `bytes`, or a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance or a tuple of `(filename, contents, media type)`.
146+
147+
```python
148+
from pathlib import Path
149+
from llama_stack_client import LlamaStackClient
150+
151+
client = LlamaStackClient()
152+
153+
client.files.create(
154+
expires_after_anchor="expires_after_anchor",
155+
expires_after_seconds=0,
156+
file=Path("/path/to/file"),
157+
purpose="assistants",
158+
)
159+
```
160+
161+
The async client uses the exact same interface. If you pass a [`PathLike`](https://docs.python.org/3/library/os.html#os.PathLike) instance, the file contents will be read asynchronously automatically.
162+
121163
## Handling errors
122164

123165
When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `llama_stack_client.APIConnectionError` is raised.

api.md

Lines changed: 29 additions & 35 deletions
Large diffs are not rendered by default.

src/llama_stack_client/resources/chat/completions.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -372,7 +372,7 @@ def create(
372372
timeout: float | httpx.Timeout | None | NotGiven = not_given,
373373
) -> CompletionCreateResponse | Stream[ChatCompletionChunk]:
374374
return self._post(
375-
"/v1/openai/v1/chat/completions",
375+
"/v1/chat/completions",
376376
body=maybe_transform(
377377
{
378378
"messages": messages,
@@ -439,7 +439,7 @@ def retrieve(
439439
if not completion_id:
440440
raise ValueError(f"Expected a non-empty value for `completion_id` but received {completion_id!r}")
441441
return self._get(
442-
f"/v1/openai/v1/chat/completions/{completion_id}",
442+
f"/v1/chat/completions/{completion_id}",
443443
options=make_request_options(
444444
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
445445
),
@@ -481,7 +481,7 @@ def list(
481481
timeout: Override the client-level default timeout for this request, in seconds
482482
"""
483483
return self._get_api_list(
484-
"/v1/openai/v1/chat/completions",
484+
"/v1/chat/completions",
485485
page=SyncOpenAICursorPage[CompletionListResponse],
486486
options=make_request_options(
487487
extra_headers=extra_headers,
@@ -845,7 +845,7 @@ async def create(
845845
timeout: float | httpx.Timeout | None | NotGiven = not_given,
846846
) -> CompletionCreateResponse | AsyncStream[ChatCompletionChunk]:
847847
return await self._post(
848-
"/v1/openai/v1/chat/completions",
848+
"/v1/chat/completions",
849849
body=await async_maybe_transform(
850850
{
851851
"messages": messages,
@@ -912,7 +912,7 @@ async def retrieve(
912912
if not completion_id:
913913
raise ValueError(f"Expected a non-empty value for `completion_id` but received {completion_id!r}")
914914
return await self._get(
915-
f"/v1/openai/v1/chat/completions/{completion_id}",
915+
f"/v1/chat/completions/{completion_id}",
916916
options=make_request_options(
917917
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
918918
),
@@ -954,7 +954,7 @@ def list(
954954
timeout: Override the client-level default timeout for this request, in seconds
955955
"""
956956
return self._get_api_list(
957-
"/v1/openai/v1/chat/completions",
957+
"/v1/chat/completions",
958958
page=AsyncOpenAICursorPage[CompletionListResponse],
959959
options=make_request_options(
960960
extra_headers=extra_headers,

src/llama_stack_client/resources/completions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ def create(
326326
timeout: float | httpx.Timeout | None | NotGiven = not_given,
327327
) -> CompletionCreateResponse | Stream[CompletionCreateResponse]:
328328
return self._post(
329-
"/v1/openai/v1/completions",
329+
"/v1/completions",
330330
body=maybe_transform(
331331
{
332332
"model": model,
@@ -664,7 +664,7 @@ async def create(
664664
timeout: float | httpx.Timeout | None | NotGiven = not_given,
665665
) -> CompletionCreateResponse | AsyncStream[CompletionCreateResponse]:
666666
return await self._post(
667-
"/v1/openai/v1/completions",
667+
"/v1/completions",
668668
body=await async_maybe_transform(
669669
{
670670
"model": model,

src/llama_stack_client/resources/embeddings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def create(
8787
timeout: Override the client-level default timeout for this request, in seconds
8888
"""
8989
return self._post(
90-
"/v1/openai/v1/embeddings",
90+
"/v1/embeddings",
9191
body=maybe_transform(
9292
{
9393
"input": input,
@@ -169,7 +169,7 @@ async def create(
169169
timeout: Override the client-level default timeout for this request, in seconds
170170
"""
171171
return await self._post(
172-
"/v1/openai/v1/embeddings",
172+
"/v1/embeddings",
173173
body=await async_maybe_transform(
174174
{
175175
"input": input,

src/llama_stack_client/resources/files.py

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from __future__ import annotations
44

5-
from typing import Mapping, cast
5+
from typing import Mapping, Optional, cast
66
from typing_extensions import Literal
77

88
import httpx
@@ -49,6 +49,8 @@ def with_streaming_response(self) -> FilesResourceWithStreamingResponse:
4949
def create(
5050
self,
5151
*,
52+
expires_after_anchor: Optional[str],
53+
expires_after_seconds: Optional[int],
5254
file: FileTypes,
5355
purpose: Literal["assistants", "batch"],
5456
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -65,6 +67,9 @@ def create(
6567
6668
- file: The File object (not file name) to be uploaded.
6769
- purpose: The intended purpose of the uploaded file.
70+
- expires_after: Optional form values describing expiration for the file.
71+
Expected expires_after[anchor] = "created_at", expires_after[seconds] =
72+
{integer}. Seconds must be between 3600 and 2592000 (1 hour to 30 days).
6873
6974
Args:
7075
purpose: Valid purpose values for OpenAI Files API.
@@ -79,6 +84,8 @@ def create(
7984
"""
8085
body = deepcopy_minimal(
8186
{
87+
"expires_after_anchor": expires_after_anchor,
88+
"expires_after_seconds": expires_after_seconds,
8289
"file": file,
8390
"purpose": purpose,
8491
}
@@ -89,7 +96,7 @@ def create(
8996
# multipart/form-data; boundary=---abc--
9097
extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})}
9198
return self._post(
92-
"/v1/openai/v1/files",
99+
"/v1/files",
93100
body=maybe_transform(body, file_create_params.FileCreateParams),
94101
files=files,
95102
options=make_request_options(
@@ -124,7 +131,7 @@ def retrieve(
124131
if not file_id:
125132
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
126133
return self._get(
127-
f"/v1/openai/v1/files/{file_id}",
134+
f"/v1/files/{file_id}",
128135
options=make_request_options(
129136
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
130137
),
@@ -171,7 +178,7 @@ def list(
171178
timeout: Override the client-level default timeout for this request, in seconds
172179
"""
173180
return self._get_api_list(
174-
"/v1/openai/v1/files",
181+
"/v1/files",
175182
page=SyncOpenAICursorPage[File],
176183
options=make_request_options(
177184
extra_headers=extra_headers,
@@ -217,7 +224,7 @@ def delete(
217224
if not file_id:
218225
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
219226
return self._delete(
220-
f"/v1/openai/v1/files/{file_id}",
227+
f"/v1/files/{file_id}",
221228
options=make_request_options(
222229
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
223230
),
@@ -250,7 +257,7 @@ def content(
250257
if not file_id:
251258
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
252259
return self._get(
253-
f"/v1/openai/v1/files/{file_id}/content",
260+
f"/v1/files/{file_id}/content",
254261
options=make_request_options(
255262
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
256263
),
@@ -281,6 +288,8 @@ def with_streaming_response(self) -> AsyncFilesResourceWithStreamingResponse:
281288
async def create(
282289
self,
283290
*,
291+
expires_after_anchor: Optional[str],
292+
expires_after_seconds: Optional[int],
284293
file: FileTypes,
285294
purpose: Literal["assistants", "batch"],
286295
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -297,6 +306,9 @@ async def create(
297306
298307
- file: The File object (not file name) to be uploaded.
299308
- purpose: The intended purpose of the uploaded file.
309+
- expires_after: Optional form values describing expiration for the file.
310+
Expected expires_after[anchor] = "created_at", expires_after[seconds] =
311+
{integer}. Seconds must be between 3600 and 2592000 (1 hour to 30 days).
300312
301313
Args:
302314
purpose: Valid purpose values for OpenAI Files API.
@@ -311,6 +323,8 @@ async def create(
311323
"""
312324
body = deepcopy_minimal(
313325
{
326+
"expires_after_anchor": expires_after_anchor,
327+
"expires_after_seconds": expires_after_seconds,
314328
"file": file,
315329
"purpose": purpose,
316330
}
@@ -321,7 +335,7 @@ async def create(
321335
# multipart/form-data; boundary=---abc--
322336
extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})}
323337
return await self._post(
324-
"/v1/openai/v1/files",
338+
"/v1/files",
325339
body=await async_maybe_transform(body, file_create_params.FileCreateParams),
326340
files=files,
327341
options=make_request_options(
@@ -356,7 +370,7 @@ async def retrieve(
356370
if not file_id:
357371
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
358372
return await self._get(
359-
f"/v1/openai/v1/files/{file_id}",
373+
f"/v1/files/{file_id}",
360374
options=make_request_options(
361375
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
362376
),
@@ -403,7 +417,7 @@ def list(
403417
timeout: Override the client-level default timeout for this request, in seconds
404418
"""
405419
return self._get_api_list(
406-
"/v1/openai/v1/files",
420+
"/v1/files",
407421
page=AsyncOpenAICursorPage[File],
408422
options=make_request_options(
409423
extra_headers=extra_headers,
@@ -449,7 +463,7 @@ async def delete(
449463
if not file_id:
450464
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
451465
return await self._delete(
452-
f"/v1/openai/v1/files/{file_id}",
466+
f"/v1/files/{file_id}",
453467
options=make_request_options(
454468
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
455469
),
@@ -482,7 +496,7 @@ async def content(
482496
if not file_id:
483497
raise ValueError(f"Expected a non-empty value for `file_id` but received {file_id!r}")
484498
return await self._get(
485-
f"/v1/openai/v1/files/{file_id}/content",
499+
f"/v1/files/{file_id}/content",
486500
options=make_request_options(
487501
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
488502
),

0 commit comments

Comments
 (0)