|
20 | 20 | from ...._streaming import Stream, AsyncStream |
21 | 21 | from ...._base_client import make_request_options |
22 | 22 | from ....types.agents.chat import completion_create_params |
23 | | -from ....types.agents.chat.chat_completion_chunk import ChatCompletionChunk |
24 | 23 | from ....types.agents.chat.completion_create_response import CompletionCreateResponse |
| 24 | +from ....types.agents.chat.agent_chat_completion_chunk import AgentChatCompletionChunk |
25 | 25 |
|
26 | 26 | __all__ = ["CompletionsResource", "AsyncCompletionsResource"] |
27 | 27 |
|
@@ -186,7 +186,7 @@ def create( |
186 | 186 | extra_query: Query | None = None, |
187 | 187 | extra_body: Body | None = None, |
188 | 188 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
189 | | - ) -> Stream[ChatCompletionChunk]: |
| 189 | + ) -> Stream[AgentChatCompletionChunk]: |
190 | 190 | """ |
191 | 191 | Creates a model response for the given chat conversation. |
192 | 192 |
|
@@ -299,7 +299,7 @@ def create( |
299 | 299 | extra_query: Query | None = None, |
300 | 300 | extra_body: Body | None = None, |
301 | 301 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
302 | | - ) -> CompletionCreateResponse | Stream[ChatCompletionChunk]: |
| 302 | + ) -> CompletionCreateResponse | Stream[AgentChatCompletionChunk]: |
303 | 303 | """ |
304 | 304 | Creates a model response for the given chat conversation. |
305 | 305 |
|
@@ -412,7 +412,7 @@ def create( |
412 | 412 | extra_query: Query | None = None, |
413 | 413 | extra_body: Body | None = None, |
414 | 414 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
415 | | - ) -> CompletionCreateResponse | Stream[ChatCompletionChunk]: |
| 415 | + ) -> CompletionCreateResponse | Stream[AgentChatCompletionChunk]: |
416 | 416 | return self._post( |
417 | 417 | "/chat/completions" |
418 | 418 | if self._client._base_url_overridden |
@@ -446,7 +446,7 @@ def create( |
446 | 446 | ), |
447 | 447 | cast_to=CompletionCreateResponse, |
448 | 448 | stream=stream or False, |
449 | | - stream_cls=Stream[ChatCompletionChunk], |
| 449 | + stream_cls=Stream[AgentChatCompletionChunk], |
450 | 450 | ) |
451 | 451 |
|
452 | 452 |
|
@@ -610,7 +610,7 @@ async def create( |
610 | 610 | extra_query: Query | None = None, |
611 | 611 | extra_body: Body | None = None, |
612 | 612 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
613 | | - ) -> AsyncStream[ChatCompletionChunk]: |
| 613 | + ) -> AsyncStream[AgentChatCompletionChunk]: |
614 | 614 | """ |
615 | 615 | Creates a model response for the given chat conversation. |
616 | 616 |
|
@@ -723,7 +723,7 @@ async def create( |
723 | 723 | extra_query: Query | None = None, |
724 | 724 | extra_body: Body | None = None, |
725 | 725 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
726 | | - ) -> CompletionCreateResponse | AsyncStream[ChatCompletionChunk]: |
| 726 | + ) -> CompletionCreateResponse | AsyncStream[AgentChatCompletionChunk]: |
727 | 727 | """ |
728 | 728 | Creates a model response for the given chat conversation. |
729 | 729 |
|
@@ -836,7 +836,7 @@ async def create( |
836 | 836 | extra_query: Query | None = None, |
837 | 837 | extra_body: Body | None = None, |
838 | 838 | timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, |
839 | | - ) -> CompletionCreateResponse | AsyncStream[ChatCompletionChunk]: |
| 839 | + ) -> CompletionCreateResponse | AsyncStream[AgentChatCompletionChunk]: |
840 | 840 | return await self._post( |
841 | 841 | "/chat/completions" |
842 | 842 | if self._client._base_url_overridden |
@@ -870,7 +870,7 @@ async def create( |
870 | 870 | ), |
871 | 871 | cast_to=CompletionCreateResponse, |
872 | 872 | stream=stream or False, |
873 | | - stream_cls=AsyncStream[ChatCompletionChunk], |
| 873 | + stream_cls=AsyncStream[AgentChatCompletionChunk], |
874 | 874 | ) |
875 | 875 |
|
876 | 876 |
|
|
0 commit comments