From 2282e4aa2f4b83e9ca1f727e90a9ca12b1a7f383 Mon Sep 17 00:00:00 2001 From: alex-ac Date: Wed, 4 Dec 2024 14:10:32 +0100 Subject: [PATCH] regent main sdk - new speakeasy version --- .speakeasy/gen.lock | 12 +- .speakeasy/workflow.lock | 5 +- README.md | 352 +++++++++--------- USAGE.md | 172 ++++----- docs/sdks/agents/README.md | 55 ++- docs/sdks/batch/README.md | 6 +- docs/sdks/chat/README.md | 55 ++- docs/sdks/classifiers/README.md | 52 ++- docs/sdks/embeddings/README.md | 21 +- docs/sdks/files/README.md | 84 ++--- docs/sdks/fim/README.md | 31 +- docs/sdks/finetuning/README.md | 6 +- docs/sdks/jobs/README.md | 65 ++-- docs/sdks/mistral/README.md | 4 +- docs/sdks/mistraljobs/README.md | 56 ++- docs/sdks/models/README.md | 78 ++-- src/mistralai/agents.py | 10 +- src/mistralai/chat.py | 10 +- src/mistralai/fim.py | 10 +- src/mistralai/httpclient.py | 6 + .../models/agentscompletionrequest.py | 34 +- .../models/agentscompletionstreamrequest.py | 36 +- src/mistralai/models/assistantmessage.py | 10 +- .../models/chatclassificationrequest.py | 43 ++- src/mistralai/models/chatcompletionrequest.py | 30 +- .../models/chatcompletionstreamrequest.py | 36 +- src/mistralai/models/classificationrequest.py | 10 +- src/mistralai/models/contentchunk.py | 9 +- src/mistralai/models/deltamessage.py | 8 +- src/mistralai/models/embeddingrequest.py | 6 +- src/mistralai/models/fimcompletionrequest.py | 10 +- .../models/fimcompletionstreamrequest.py | 10 +- src/mistralai/models/functioncall.py | 6 +- src/mistralai/models/imageurlchunk.py | 8 +- ...es_fine_tuning_create_fine_tuning_jobop.py | 13 +- src/mistralai/models/modellist.py | 6 +- ...retrieve_model_v1_models_model_id_getop.py | 9 +- src/mistralai/models/systemmessage.py | 10 +- src/mistralai/models/toolmessage.py | 8 +- src/mistralai/models/usermessage.py | 8 +- src/mistralai/models/validationerror.py | 6 +- src/mistralai/sdk.py | 14 + src/mistralai/sdkconfiguration.py | 4 +- src/mistralai/utils/annotations.py | 59 ++- src/mistralai/utils/eventstreaming.py | 62 ++- 45 files changed, 861 insertions(+), 684 deletions(-) diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 7ab50a7..e242222 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -3,8 +3,8 @@ id: 2d045ec7-2ebb-4f4d-ad25-40953b132161 management: docChecksum: 36ad3563d9d2b3af47015100d060570b docVersion: 0.0.2 - speakeasyVersion: 1.440.1 - generationVersion: 2.460.1 + speakeasyVersion: 1.451.1 + generationVersion: 2.470.1 releaseVersion: 1.2.4 configChecksum: 4fe789bac842073beb4e2d9c6c3f833d repoURL: https://github.com/mistralai/client-python.git @@ -14,7 +14,7 @@ features: python: additionalDependencies: 1.0.0 constsAndDefaults: 1.0.5 - core: 5.6.5 + core: 5.6.8 defaultEnabledRetries: 0.2.0 downloadStreams: 1.0.1 enumUnions: 0.1.0 @@ -34,11 +34,10 @@ features: responseFormat: 1.0.1 retries: 3.0.2 sdkHooks: 1.0.0 - serverEvents: 1.0.4 + serverEvents: 1.0.7 serverEventsSentinels: 0.1.0 serverIDs: 3.0.0 - tests: 1.6.0 - unions: 3.0.3 + unions: 3.0.4 uploadStreams: 1.0.0 generatedFiles: - .gitattributes @@ -583,3 +582,4 @@ examples: responses: "200": application/json: {"url": "https://scornful-daughter.com/"} +generatedTests: {} diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 07f3f6b..770090a 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -1,4 +1,4 @@ -speakeasyVersion: 1.440.1 +speakeasyVersion: 1.451.1 sources: mistral-azure-source: sourceNamespace: mistral-azure-source @@ -18,7 +18,6 @@ sources: sourceBlobDigest: sha256:5de08a038994ec94c0889341d434b598f541459d114f9935deb9ef3b3af90c5f tags: - latest - - speakeasy-sdk-regen-1733149559 targets: mistralai-azure-sdk: source: mistral-azure-source @@ -40,7 +39,7 @@ targets: sourceRevisionDigest: sha256:f74c08bdc7ae39f5fe2394df8f31ae623ece30a7f65019ab6b7bcea352953f05 sourceBlobDigest: sha256:5de08a038994ec94c0889341d434b598f541459d114f9935deb9ef3b3af90c5f codeSamplesNamespace: mistral-openapi-code-samples - codeSamplesRevisionDigest: sha256:800804bcf76f579fd76510126828c03be89bb4964a01da87f376148f86cc88dc + codeSamplesRevisionDigest: sha256:09212fda8fc13e0f486f157495d028138bc9babedfba6dd85f7024575f30fd0e workflow: workflowVersion: 1.0.0 speakeasyVersion: latest diff --git a/README.md b/README.md index 4a1fc83..7a88608 100644 --- a/README.md +++ b/README.md @@ -27,19 +27,26 @@ Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create yo ## Table of Contents + +* [Mistral Python Client](#mistral-python-client) + * [Migration warning](#migration-warning) + * [API Key Setup](#api-key-setup) + * [SDK Installation](#sdk-installation) + * [SDK Example Usage](#sdk-example-usage) + * [Providers' SDKs Example Usage](#providers-sdks-example-usage) + * [Available Resources and Operations](#available-resources-and-operations) + * [Server-sent event streaming](#server-sent-event-streaming) + * [File uploads](#file-uploads) + * [Retries](#retries) + * [Error Handling](#error-handling) + * [Server Selection](#server-selection) + * [Custom HTTP Client](#custom-http-client) + * [Authentication](#authentication) + * [Debugging](#debugging) + * [IDE Support](#ide-support) +* [Development](#development) + * [Contributions](#contributions) -* [SDK Installation](#sdk-installation) -* [IDE Support](#ide-support) -* [SDK Example Usage](#sdk-example-usage) -* [Available Resources and Operations](#available-resources-and-operations) -* [Server-sent event streaming](#server-sent-event-streaming) -* [File uploads](#file-uploads) -* [Retries](#retries) -* [Error Handling](#error-handling) -* [Server Selection](#server-selection) -* [Custom HTTP Client](#custom-http-client) -* [Authentication](#authentication) -* [Debugging](#debugging) @@ -76,20 +83,19 @@ This example shows how to create chat completions. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.chat.complete(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -]) +) as s: + res = s.chat.complete(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -102,18 +108,19 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.chat.complete_async(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, - ]) - if res is not None: - # handle response - pass + ) as s: + res = await s.chat.complete_async(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -127,18 +134,17 @@ This example shows how to upload a file. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.upload(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), -}) +) as s: + res = s.files.upload(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -151,16 +157,17 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.files.upload_async(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), - }) - if res is not None: - # handle response - pass + ) as s: + res = await s.files.upload_async(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -174,20 +181,19 @@ This example shows how to create agents completions. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.agents.complete(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -], agent_id="") +) as s: + res = s.agents.complete(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -200,18 +206,19 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.agents.complete_async(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, - ], agent_id="") - if res is not None: - # handle response - pass + ) as s: + res = await s.agents.complete_async(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -225,18 +232,17 @@ This example shows how to create embedding request. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.embeddings.create(inputs=[ - "Embed this sentence.", - "As well as this one.", -], model="Wrangler") +) as s: + res = s.embeddings.create(inputs=[ + "Embed this sentence.", + "As well as this one.", + ], model="Wrangler") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -249,16 +255,17 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.embeddings.create_async(inputs=[ - "Embed this sentence.", - "As well as this one.", - ], model="Wrangler") - if res is not None: - # handle response - pass + ) as s: + res = await s.embeddings.create_async(inputs=[ + "Embed this sentence.", + "As well as this one.", + ], model="Wrangler") + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -439,32 +446,36 @@ The documentation for the GCP SDK is available [here](packages/mistralai_gcp/REA operations. These operations will expose the stream as [Generator][generator] that can be consumed using a simple `for` loop. The loop will terminate when the server no longer has any events to send and closes the -underlying connection. +underlying connection. + +The stream is also a [Context Manager][context-manager] and can be used with the `with` statement and will close the +underlying connection when the context is exited. ```python from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.chat.stream(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -]) +) as s: + res = s.chat.stream(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) -if res is not None: - for event in res: - # handle event - print(event, flush=True) + if res is not None: + with res as event_stream: + for event in event_stream: + # handle event + print(event, flush=True) ``` [mdn-sse]: https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events -[generator]: https://wiki.python.org/moin/Generators +[generator]: https://book.pythontips.com/en/latest/generators.html +[context-manager]: https://book.pythontips.com/en/latest/context_managers.html @@ -481,18 +492,17 @@ Certain SDK methods accept file objects as part of a request body or multi-part from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.upload(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), -}) +) as s: + res = s.files.upload(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -508,16 +518,15 @@ from mistral.utils import BackoffStrategy, RetryConfig from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.models.list(, + RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) -res = s.models.list(, - RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -527,16 +536,15 @@ from mistral.utils import BackoffStrategy, RetryConfig from mistralai import Mistral import os -s = Mistral( +with Mistral( retry_config=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False), api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.list() +) as s: + res = s.models.list() -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -568,24 +576,23 @@ When custom error responses are specified for an operation, the SDK may also rai from mistralai import Mistral, models import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = None -try: - res = s.models.list() - - if res is not None: - # handle response - pass - -except models.HTTPValidationError as e: - # handle e.data: models.HTTPValidationErrorData - raise(e) -except models.SDKError as e: - # handle exception - raise(e) +) as s: + res = None + try: + res = s.models.list() + + if res is not None: + # handle response + pass + + except models.HTTPValidationError as e: + # handle e.data: models.HTTPValidationErrorData + raise(e) + except models.SDKError as e: + # handle exception + raise(e) ``` @@ -606,16 +613,15 @@ You can override the default server globally by passing a server name to the `se from mistralai import Mistral import os -s = Mistral( +with Mistral( server="eu", api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.list() +) as s: + res = s.models.list() -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -626,16 +632,15 @@ The default server can also be overridden globally by passing a URL to the `serv from mistralai import Mistral import os -s = Mistral( +with Mistral( server_url="https://api.mistral.ai", api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.list() +) as s: + res = s.models.list() -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -737,15 +742,14 @@ To authenticate with the API the `api_key` parameter must be set when initializi from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.list() +) as s: + res = s.models.list() -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` diff --git a/USAGE.md b/USAGE.md index 7d9d2ce..e523aa9 100644 --- a/USAGE.md +++ b/USAGE.md @@ -8,20 +8,19 @@ This example shows how to create chat completions. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.chat.complete(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -]) - -if res is not None: - # handle response - pass +) as s: + res = s.chat.complete(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) + + if res is not None: + # handle response + pass ```
@@ -34,18 +33,19 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.chat.complete_async(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, - ]) - if res is not None: - # handle response - pass + ) as s: + res = await s.chat.complete_async(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -59,18 +59,17 @@ This example shows how to upload a file. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.upload(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), -}) +) as s: + res = s.files.upload(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -83,16 +82,17 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.files.upload_async(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), - }) - if res is not None: - # handle response - pass + ) as s: + res = await s.files.upload_async(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -106,20 +106,19 @@ This example shows how to create agents completions. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.agents.complete(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -], agent_id="") - -if res is not None: - # handle response - pass +) as s: + res = s.agents.complete(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") + + if res is not None: + # handle response + pass ```
@@ -132,18 +131,19 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.agents.complete_async(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, - ], agent_id="") - if res is not None: - # handle response - pass + ) as s: + res = await s.agents.complete_async(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") + + if res is not None: + # handle response + pass asyncio.run(main()) ``` @@ -157,18 +157,17 @@ This example shows how to create embedding request. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.embeddings.create(inputs=[ - "Embed this sentence.", - "As well as this one.", -], model="Wrangler") +) as s: + res = s.embeddings.create(inputs=[ + "Embed this sentence.", + "As well as this one.", + ], model="Wrangler") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ```
@@ -181,16 +180,17 @@ from mistralai import Mistral import os async def main(): - s = Mistral( + async with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), - ) - res = await s.embeddings.create_async(inputs=[ - "Embed this sentence.", - "As well as this one.", - ], model="Wrangler") - if res is not None: - # handle response - pass + ) as s: + res = await s.embeddings.create_async(inputs=[ + "Embed this sentence.", + "As well as this one.", + ], model="Wrangler") + + if res is not None: + # handle response + pass asyncio.run(main()) ``` diff --git a/docs/sdks/agents/README.md b/docs/sdks/agents/README.md index 3eb946a..792b796 100644 --- a/docs/sdks/agents/README.md +++ b/docs/sdks/agents/README.md @@ -20,20 +20,19 @@ Agents Completion from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.agents.complete(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -], agent_id="") - -if res is not None: - # handle response - pass +) as s: + res = s.agents.complete(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") + + if res is not None: + # handle response + pass ``` @@ -76,21 +75,21 @@ Mistral AI provides the ability to stream responses back to a client in order to from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.agents.stream(messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -], agent_id="") - -if res is not None: - for event in res: - # handle event - print(event, flush=True) +) as s: + res = s.agents.stream(messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ], agent_id="") + + if res is not None: + with res as event_stream: + for event in event_stream: + # handle event + print(event, flush=True) ``` @@ -114,7 +113,7 @@ if res is not None: ### Response -**[Union[Generator[models.CompletionEvent, None, None], AsyncGenerator[models.CompletionEvent, None]]](../../models/.md)** +**[Union[eventstreaming.EventStream[models.CompletionEvent], eventstreaming.EventStreamAsync[models.CompletionEvent]]](../../models/.md)** ### Errors diff --git a/docs/sdks/batch/README.md b/docs/sdks/batch/README.md index 55a9c13..ec7d834 100644 --- a/docs/sdks/batch/README.md +++ b/docs/sdks/batch/README.md @@ -1,2 +1,6 @@ # Batch -(*batch*) \ No newline at end of file +(*batch*) + +## Overview + +### Available Operations diff --git a/docs/sdks/chat/README.md b/docs/sdks/chat/README.md index d6f4a76..6e00d3d 100644 --- a/docs/sdks/chat/README.md +++ b/docs/sdks/chat/README.md @@ -20,20 +20,19 @@ Chat Completion from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.chat.complete(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -]) - -if res is not None: - # handle response - pass +) as s: + res = s.chat.complete(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) + + if res is not None: + # handle response + pass ``` @@ -79,21 +78,21 @@ Mistral AI provides the ability to stream responses back to a client in order to from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.chat.stream(model="mistral-small-latest", messages=[ - { - "content": "Who is the best French painter? Answer in one short sentence.", - "role": "user", - }, -]) - -if res is not None: - for event in res: - # handle event - print(event, flush=True) +) as s: + res = s.chat.stream(model="mistral-small-latest", messages=[ + { + "content": "Who is the best French painter? Answer in one short sentence.", + "role": "user", + }, + ]) + + if res is not None: + with res as event_stream: + for event in event_stream: + # handle event + print(event, flush=True) ``` @@ -120,7 +119,7 @@ if res is not None: ### Response -**[Union[Generator[models.CompletionEvent, None, None], AsyncGenerator[models.CompletionEvent, None]]](../../models/.md)** +**[Union[eventstreaming.EventStream[models.CompletionEvent], eventstreaming.EventStreamAsync[models.CompletionEvent]]](../../models/.md)** ### Errors diff --git a/docs/sdks/classifiers/README.md b/docs/sdks/classifiers/README.md index 05b8b7c..da90019 100644 --- a/docs/sdks/classifiers/README.md +++ b/docs/sdks/classifiers/README.md @@ -20,17 +20,16 @@ Moderations from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.classifiers.moderate(inputs=[ + "", + ]) -res = s.classifiers.moderate(inputs=[ - "", -]) - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -63,25 +62,24 @@ Moderations Chat from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.classifiers.moderate_chat(inputs=[ - [ - { - "content": [ - { - "text": "", - }, - ], - }, - ], -], model="V90") - -if res is not None: - # handle response - pass +) as s: + res = s.classifiers.moderate_chat(inputs=[ + [ + { + "content": [ + { + "text": "", + }, + ], + }, + ], + ], model="V90") + + if res is not None: + # handle response + pass ``` diff --git a/docs/sdks/embeddings/README.md b/docs/sdks/embeddings/README.md index 9f47e70..1f9f195 100644 --- a/docs/sdks/embeddings/README.md +++ b/docs/sdks/embeddings/README.md @@ -19,18 +19,17 @@ Embeddings from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.embeddings.create(inputs=[ - "Embed this sentence.", - "As well as this one.", -], model="Wrangler") - -if res is not None: - # handle response - pass +) as s: + res = s.embeddings.create(inputs=[ + "Embed this sentence.", + "As well as this one.", + ], model="Wrangler") + + if res is not None: + # handle response + pass ``` diff --git a/docs/sdks/files/README.md b/docs/sdks/files/README.md index 886d57e..ad2e0f0 100644 --- a/docs/sdks/files/README.md +++ b/docs/sdks/files/README.md @@ -28,18 +28,17 @@ Please contact us if you need to increase these storage limits. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.files.upload(file={ + "file_name": "example.file", + "content": open("example.file", "rb"), + }) -res = s.files.upload(file={ - "file_name": "example.file", - "content": open("example.file", "rb"), -}) - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -71,15 +70,14 @@ Returns a list of files that belong to the user's organization. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.list() +) as s: + res = s.files.list() -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -115,15 +113,14 @@ Returns information about a specific file. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.retrieve(file_id="") +) as s: + res = s.files.retrieve(file_id="") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -154,15 +151,14 @@ Delete a file. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.files.delete(file_id="") -res = s.files.delete(file_id="") - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -193,15 +189,14 @@ Download a file from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.download(file_id="") +) as s: + res = s.files.download(file_id="") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -232,15 +227,14 @@ Get Signed Url from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.files.get_signed_url(file_id="") +) as s: + res = s.files.get_signed_url(file_id="") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` diff --git a/docs/sdks/fim/README.md b/docs/sdks/fim/README.md index d981152..eed1893 100644 --- a/docs/sdks/fim/README.md +++ b/docs/sdks/fim/README.md @@ -20,15 +20,14 @@ FIM completion. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.fim.complete(model="codestral-2405", prompt="def", suffix="return a+b") -res = s.fim.complete(model="codestral-2405", prompt="def", suffix="return a+b") - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -69,16 +68,16 @@ Mistral AI provides the ability to stream responses back to a client in order to from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.fim.stream(model="codestral-2405", prompt="def", suffix="return a+b") +) as s: + res = s.fim.stream(model="codestral-2405", prompt="def", suffix="return a+b") -if res is not None: - for event in res: - # handle event - print(event, flush=True) + if res is not None: + with res as event_stream: + for event in event_stream: + # handle event + print(event, flush=True) ``` @@ -100,7 +99,7 @@ if res is not None: ### Response -**[Union[Generator[models.CompletionEvent, None, None], AsyncGenerator[models.CompletionEvent, None]]](../../models/.md)** +**[Union[eventstreaming.EventStream[models.CompletionEvent], eventstreaming.EventStreamAsync[models.CompletionEvent]]](../../models/.md)** ### Errors diff --git a/docs/sdks/finetuning/README.md b/docs/sdks/finetuning/README.md index fdcbd62..3e0f12c 100644 --- a/docs/sdks/finetuning/README.md +++ b/docs/sdks/finetuning/README.md @@ -1,2 +1,6 @@ # FineTuning -(*fine_tuning*) \ No newline at end of file +(*fine_tuning*) + +## Overview + +### Available Operations diff --git a/docs/sdks/jobs/README.md b/docs/sdks/jobs/README.md index 05b8b42..b477958 100644 --- a/docs/sdks/jobs/README.md +++ b/docs/sdks/jobs/README.md @@ -21,15 +21,14 @@ Get a list of fine-tuning jobs for your organization and user. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.fine_tuning.jobs.list() -res = s.fine_tuning.jobs.list() - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -68,15 +67,14 @@ Create a new fine-tuning job, it will be queued for processing. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.fine_tuning.jobs.create(model="codestral-latest", hyperparameters={}) +) as s: + res = s.fine_tuning.jobs.create(model="codestral-latest", hyperparameters={}) -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -114,15 +112,14 @@ Get a fine-tuned job details by its UUID. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.fine_tuning.jobs.get(job_id="b18d8d81-fd7b-4764-a31e-475cb1f36591") +) as s: + res = s.fine_tuning.jobs.get(job_id="b18d8d81-fd7b-4764-a31e-475cb1f36591") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -153,15 +150,14 @@ Request the cancellation of a fine tuning job. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.fine_tuning.jobs.cancel(job_id="03fa7112-315a-4072-a9f2-43f3f1ec962e") -res = s.fine_tuning.jobs.cancel(job_id="03fa7112-315a-4072-a9f2-43f3f1ec962e") - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -192,15 +188,14 @@ Request the start of a validated fine tuning job. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.fine_tuning.jobs.start(job_id="0eb0f807-fb9f-4e46-9c13-4e257df6e1ba") +) as s: + res = s.fine_tuning.jobs.start(job_id="0eb0f807-fb9f-4e46-9c13-4e257df6e1ba") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` diff --git a/docs/sdks/mistral/README.md b/docs/sdks/mistral/README.md index 0189a6c..4b9573d 100644 --- a/docs/sdks/mistral/README.md +++ b/docs/sdks/mistral/README.md @@ -2,4 +2,6 @@ ## Overview -Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it. \ No newline at end of file +Mistral AI API: Our Chat Completion and Embeddings APIs specification. Create your account on [La Plateforme](https://console.mistral.ai) to get access and read the [docs](https://docs.mistral.ai) to learn how to use it. + +### Available Operations diff --git a/docs/sdks/mistraljobs/README.md b/docs/sdks/mistraljobs/README.md index 5852c2c..1880c83 100644 --- a/docs/sdks/mistraljobs/README.md +++ b/docs/sdks/mistraljobs/README.md @@ -20,15 +20,14 @@ Get a list of batch jobs for your organization and user. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.batch.jobs.list() -res = s.batch.jobs.list() - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -65,17 +64,16 @@ Create a new batch job, it will be queued for processing. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.batch.jobs.create(input_files=[ - "a621cf02-1cd9-4cf5-8403-315211a509a3", -], endpoint="/v1/fim/completions", model="2") +) as s: + res = s.batch.jobs.create(input_files=[ + "a621cf02-1cd9-4cf5-8403-315211a509a3", + ], endpoint="/v1/fim/completions", model="2") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -110,15 +108,14 @@ Get a batch job details by its UUID. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.batch.jobs.get(job_id="b888f774-3e7c-4135-a18c-6b985523c4bc") -res = s.batch.jobs.get(job_id="b888f774-3e7c-4135-a18c-6b985523c4bc") - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -149,15 +146,14 @@ Request the cancellation of a batch job. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.batch.jobs.cancel(job_id="0f713502-9233-41c6-9ebd-c570b7edb496") +) as s: + res = s.batch.jobs.cancel(job_id="0f713502-9233-41c6-9ebd-c570b7edb496") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` diff --git a/docs/sdks/models/README.md b/docs/sdks/models/README.md index 2ad489e..7888494 100644 --- a/docs/sdks/models/README.md +++ b/docs/sdks/models/README.md @@ -24,15 +24,14 @@ List all models available to the user. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.models.list() -res = s.models.list() - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -63,15 +62,14 @@ Retrieve a model information. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.retrieve(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") +) as s: + res = s.models.retrieve(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -103,15 +101,14 @@ Delete a fine-tuned model. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.delete(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") +) as s: + res = s.models.delete(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -143,15 +140,14 @@ Update a model name or description. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) +) as s: + res = s.models.update(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") -res = s.models.update(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") - -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -184,15 +180,14 @@ Archive a fine-tuned model. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.archive(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") +) as s: + res = s.models.archive(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` @@ -223,15 +218,14 @@ Un-archive a fine-tuned model. from mistralai import Mistral import os -s = Mistral( +with Mistral( api_key=os.getenv("MISTRAL_API_KEY", ""), -) - -res = s.models.unarchive(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") +) as s: + res = s.models.unarchive(model_id="ft:open-mistral-7b:587a6b29:20240514:7e773925") -if res is not None: - # handle response - pass + if res is not None: + # handle response + pass ``` diff --git a/src/mistralai/agents.py b/src/mistralai/agents.py index a45bcec..246cab4 100644 --- a/src/mistralai/agents.py +++ b/src/mistralai/agents.py @@ -5,7 +5,7 @@ from mistralai._hooks import HookContext from mistralai.types import OptionalNullable, UNSET from mistralai.utils import eventstreaming, get_security_from_env -from typing import Any, AsyncGenerator, Generator, List, Optional, Union +from typing import Any, List, Optional, Union class Agents(BaseSDK): @@ -336,7 +336,7 @@ def stream( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[Generator[models.CompletionEvent, None, None]]: + ) -> Optional[eventstreaming.EventStream[models.CompletionEvent]]: r"""Stream Agents completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -428,7 +428,7 @@ def stream( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events( + return eventstreaming.EventStream( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", @@ -487,7 +487,7 @@ async def stream_async( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[AsyncGenerator[models.CompletionEvent, None]]: + ) -> Optional[eventstreaming.EventStreamAsync[models.CompletionEvent]]: r"""Stream Agents completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -579,7 +579,7 @@ async def stream_async( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events_async( + return eventstreaming.EventStreamAsync( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", diff --git a/src/mistralai/chat.py b/src/mistralai/chat.py index 53313ca..4b7aad3 100644 --- a/src/mistralai/chat.py +++ b/src/mistralai/chat.py @@ -5,7 +5,7 @@ from mistralai._hooks import HookContext from mistralai.types import Nullable, OptionalNullable, UNSET from mistralai.utils import eventstreaming, get_security_from_env -from typing import Any, AsyncGenerator, Generator, List, Optional, Union +from typing import Any, List, Optional, Union class Chat(BaseSDK): @@ -337,7 +337,7 @@ def stream( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[Generator[models.CompletionEvent, None, None]]: + ) -> Optional[eventstreaming.EventStream[models.CompletionEvent]]: r"""Stream chat completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -435,7 +435,7 @@ def stream( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events( + return eventstreaming.EventStream( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", @@ -497,7 +497,7 @@ async def stream_async( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[AsyncGenerator[models.CompletionEvent, None]]: + ) -> Optional[eventstreaming.EventStreamAsync[models.CompletionEvent]]: r"""Stream chat completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -595,7 +595,7 @@ async def stream_async( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events_async( + return eventstreaming.EventStreamAsync( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", diff --git a/src/mistralai/fim.py b/src/mistralai/fim.py index 8f8c852..6f03631 100644 --- a/src/mistralai/fim.py +++ b/src/mistralai/fim.py @@ -5,7 +5,7 @@ from mistralai._hooks import HookContext from mistralai.types import Nullable, OptionalNullable, UNSET from mistralai.utils import eventstreaming, get_security_from_env -from typing import Any, AsyncGenerator, Generator, Optional, Union +from typing import Any, Optional, Union class Fim(BaseSDK): @@ -278,7 +278,7 @@ def stream( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[Generator[models.CompletionEvent, None, None]]: + ) -> Optional[eventstreaming.EventStream[models.CompletionEvent]]: r"""Stream fim completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -360,7 +360,7 @@ def stream( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events( + return eventstreaming.EventStream( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", @@ -405,7 +405,7 @@ async def stream_async( retries: OptionalNullable[utils.RetryConfig] = UNSET, server_url: Optional[str] = None, timeout_ms: Optional[int] = None, - ) -> Optional[AsyncGenerator[models.CompletionEvent, None]]: + ) -> Optional[eventstreaming.EventStreamAsync[models.CompletionEvent]]: r"""Stream fim completion Mistral AI provides the ability to stream responses back to a client in order to allow partial results for certain requests. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. @@ -487,7 +487,7 @@ async def stream_async( data: Any = None if utils.match_response(http_res, "200", "text/event-stream"): - return eventstreaming.stream_events_async( + return eventstreaming.EventStreamAsync( http_res, lambda raw: utils.unmarshal_json(raw, models.CompletionEvent), sentinel="[DONE]", diff --git a/src/mistralai/httpclient.py b/src/mistralai/httpclient.py index 36b642a..167cea4 100644 --- a/src/mistralai/httpclient.py +++ b/src/mistralai/httpclient.py @@ -41,6 +41,9 @@ def build_request( ) -> httpx.Request: pass + def close(self) -> None: + pass + @runtime_checkable class AsyncHttpClient(Protocol): @@ -76,3 +79,6 @@ def build_request( extensions: Optional[httpx._types.RequestExtensions] = None, ) -> httpx.Request: pass + + async def aclose(self) -> None: + pass diff --git a/src/mistralai/models/agentscompletionrequest.py b/src/mistralai/models/agentscompletionrequest.py index bce326a..5f53ddd 100644 --- a/src/mistralai/models/agentscompletionrequest.py +++ b/src/mistralai/models/agentscompletionrequest.py @@ -13,23 +13,30 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag, model_serializer from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -AgentsCompletionRequestStopTypedDict = Union[str, List[str]] +AgentsCompletionRequestStopTypedDict = TypeAliasType( + "AgentsCompletionRequestStopTypedDict", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -AgentsCompletionRequestStop = Union[str, List[str]] +AgentsCompletionRequestStop = TypeAliasType( + "AgentsCompletionRequestStop", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -AgentsCompletionRequestMessagesTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +AgentsCompletionRequestMessagesTypedDict = TypeAliasType( + "AgentsCompletionRequestMessagesTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) AgentsCompletionRequestMessages = Annotated[ @@ -43,10 +50,15 @@ ] -AgentsCompletionRequestToolChoiceTypedDict = Union[ToolChoiceTypedDict, ToolChoiceEnum] +AgentsCompletionRequestToolChoiceTypedDict = TypeAliasType( + "AgentsCompletionRequestToolChoiceTypedDict", + Union[ToolChoiceTypedDict, ToolChoiceEnum], +) -AgentsCompletionRequestToolChoice = Union[ToolChoice, ToolChoiceEnum] +AgentsCompletionRequestToolChoice = TypeAliasType( + "AgentsCompletionRequestToolChoice", Union[ToolChoice, ToolChoiceEnum] +) class AgentsCompletionRequestTypedDict(TypedDict): diff --git a/src/mistralai/models/agentscompletionstreamrequest.py b/src/mistralai/models/agentscompletionstreamrequest.py index 94cc983..fdc1532 100644 --- a/src/mistralai/models/agentscompletionstreamrequest.py +++ b/src/mistralai/models/agentscompletionstreamrequest.py @@ -13,23 +13,30 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag, model_serializer from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -AgentsCompletionStreamRequestStopTypedDict = Union[str, List[str]] +AgentsCompletionStreamRequestStopTypedDict = TypeAliasType( + "AgentsCompletionStreamRequestStopTypedDict", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -AgentsCompletionStreamRequestStop = Union[str, List[str]] +AgentsCompletionStreamRequestStop = TypeAliasType( + "AgentsCompletionStreamRequestStop", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -AgentsCompletionStreamRequestMessagesTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +AgentsCompletionStreamRequestMessagesTypedDict = TypeAliasType( + "AgentsCompletionStreamRequestMessagesTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) AgentsCompletionStreamRequestMessages = Annotated[ @@ -43,12 +50,15 @@ ] -AgentsCompletionStreamRequestToolChoiceTypedDict = Union[ - ToolChoiceTypedDict, ToolChoiceEnum -] +AgentsCompletionStreamRequestToolChoiceTypedDict = TypeAliasType( + "AgentsCompletionStreamRequestToolChoiceTypedDict", + Union[ToolChoiceTypedDict, ToolChoiceEnum], +) -AgentsCompletionStreamRequestToolChoice = Union[ToolChoice, ToolChoiceEnum] +AgentsCompletionStreamRequestToolChoice = TypeAliasType( + "AgentsCompletionStreamRequestToolChoice", Union[ToolChoice, ToolChoiceEnum] +) class AgentsCompletionStreamRequestTypedDict(TypedDict): diff --git a/src/mistralai/models/assistantmessage.py b/src/mistralai/models/assistantmessage.py index d7b929b..c9a2894 100644 --- a/src/mistralai/models/assistantmessage.py +++ b/src/mistralai/models/assistantmessage.py @@ -6,13 +6,17 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -AssistantMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]] +AssistantMessageContentTypedDict = TypeAliasType( + "AssistantMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]] +) -AssistantMessageContent = Union[str, List[ContentChunk]] +AssistantMessageContent = TypeAliasType( + "AssistantMessageContent", Union[str, List[ContentChunk]] +) AssistantMessageRole = Literal["assistant"] diff --git a/src/mistralai/models/chatclassificationrequest.py b/src/mistralai/models/chatclassificationrequest.py index 6b4cc13..6f3967d 100644 --- a/src/mistralai/models/chatclassificationrequest.py +++ b/src/mistralai/models/chatclassificationrequest.py @@ -10,15 +10,18 @@ import pydantic from pydantic import Discriminator, Tag, model_serializer from typing import List, Union -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, TypeAliasType, TypedDict -TwoTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +TwoTypedDict = TypeAliasType( + "TwoTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) Two = Annotated[ @@ -32,12 +35,15 @@ ] -OneTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +OneTypedDict = TypeAliasType( + "OneTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) One = Annotated[ @@ -51,13 +57,16 @@ ] -ChatClassificationRequestInputsTypedDict = Union[ - List[OneTypedDict], List[List[TwoTypedDict]] -] +ChatClassificationRequestInputsTypedDict = TypeAliasType( + "ChatClassificationRequestInputsTypedDict", + Union[List[OneTypedDict], List[List[TwoTypedDict]]], +) r"""Chat to classify""" -ChatClassificationRequestInputs = Union[List[One], List[List[Two]]] +ChatClassificationRequestInputs = TypeAliasType( + "ChatClassificationRequestInputs", Union[List[One], List[List[Two]]] +) r"""Chat to classify""" diff --git a/src/mistralai/models/chatcompletionrequest.py b/src/mistralai/models/chatcompletionrequest.py index b3435d5..195ea59 100644 --- a/src/mistralai/models/chatcompletionrequest.py +++ b/src/mistralai/models/chatcompletionrequest.py @@ -13,23 +13,26 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag, model_serializer from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -StopTypedDict = Union[str, List[str]] +StopTypedDict = TypeAliasType("StopTypedDict", Union[str, List[str]]) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -Stop = Union[str, List[str]] +Stop = TypeAliasType("Stop", Union[str, List[str]]) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -MessagesTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +MessagesTypedDict = TypeAliasType( + "MessagesTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) Messages = Annotated[ @@ -43,10 +46,15 @@ ] -ChatCompletionRequestToolChoiceTypedDict = Union[ToolChoiceTypedDict, ToolChoiceEnum] +ChatCompletionRequestToolChoiceTypedDict = TypeAliasType( + "ChatCompletionRequestToolChoiceTypedDict", + Union[ToolChoiceTypedDict, ToolChoiceEnum], +) -ChatCompletionRequestToolChoice = Union[ToolChoice, ToolChoiceEnum] +ChatCompletionRequestToolChoice = TypeAliasType( + "ChatCompletionRequestToolChoice", Union[ToolChoice, ToolChoiceEnum] +) class ChatCompletionRequestTypedDict(TypedDict): diff --git a/src/mistralai/models/chatcompletionstreamrequest.py b/src/mistralai/models/chatcompletionstreamrequest.py index a98eb33..fee6509 100644 --- a/src/mistralai/models/chatcompletionstreamrequest.py +++ b/src/mistralai/models/chatcompletionstreamrequest.py @@ -13,23 +13,30 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag, model_serializer from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -ChatCompletionStreamRequestStopTypedDict = Union[str, List[str]] +ChatCompletionStreamRequestStopTypedDict = TypeAliasType( + "ChatCompletionStreamRequestStopTypedDict", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -ChatCompletionStreamRequestStop = Union[str, List[str]] +ChatCompletionStreamRequestStop = TypeAliasType( + "ChatCompletionStreamRequestStop", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -ChatCompletionStreamRequestMessagesTypedDict = Union[ - SystemMessageTypedDict, - UserMessageTypedDict, - AssistantMessageTypedDict, - ToolMessageTypedDict, -] +ChatCompletionStreamRequestMessagesTypedDict = TypeAliasType( + "ChatCompletionStreamRequestMessagesTypedDict", + Union[ + SystemMessageTypedDict, + UserMessageTypedDict, + AssistantMessageTypedDict, + ToolMessageTypedDict, + ], +) ChatCompletionStreamRequestMessages = Annotated[ @@ -43,12 +50,15 @@ ] -ChatCompletionStreamRequestToolChoiceTypedDict = Union[ - ToolChoiceTypedDict, ToolChoiceEnum -] +ChatCompletionStreamRequestToolChoiceTypedDict = TypeAliasType( + "ChatCompletionStreamRequestToolChoiceTypedDict", + Union[ToolChoiceTypedDict, ToolChoiceEnum], +) -ChatCompletionStreamRequestToolChoice = Union[ToolChoice, ToolChoiceEnum] +ChatCompletionStreamRequestToolChoice = TypeAliasType( + "ChatCompletionStreamRequestToolChoice", Union[ToolChoice, ToolChoiceEnum] +) class ChatCompletionStreamRequestTypedDict(TypedDict): diff --git a/src/mistralai/models/classificationrequest.py b/src/mistralai/models/classificationrequest.py index d2426c4..d18ffa6 100644 --- a/src/mistralai/models/classificationrequest.py +++ b/src/mistralai/models/classificationrequest.py @@ -5,14 +5,18 @@ import pydantic from pydantic import model_serializer from typing import List, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -ClassificationRequestInputsTypedDict = Union[str, List[str]] +ClassificationRequestInputsTypedDict = TypeAliasType( + "ClassificationRequestInputsTypedDict", Union[str, List[str]] +) r"""Text to classify.""" -ClassificationRequestInputs = Union[str, List[str]] +ClassificationRequestInputs = TypeAliasType( + "ClassificationRequestInputs", Union[str, List[str]] +) r"""Text to classify.""" diff --git a/src/mistralai/models/contentchunk.py b/src/mistralai/models/contentchunk.py index 717ba82..feeda7c 100644 --- a/src/mistralai/models/contentchunk.py +++ b/src/mistralai/models/contentchunk.py @@ -7,12 +7,13 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag from typing import Union -from typing_extensions import Annotated +from typing_extensions import Annotated, TypeAliasType -ContentChunkTypedDict = Union[ - TextChunkTypedDict, ImageURLChunkTypedDict, ReferenceChunkTypedDict -] +ContentChunkTypedDict = TypeAliasType( + "ContentChunkTypedDict", + Union[TextChunkTypedDict, ImageURLChunkTypedDict, ReferenceChunkTypedDict], +) ContentChunk = Annotated[ diff --git a/src/mistralai/models/deltamessage.py b/src/mistralai/models/deltamessage.py index 7a966e0..b46cf64 100644 --- a/src/mistralai/models/deltamessage.py +++ b/src/mistralai/models/deltamessage.py @@ -6,13 +6,15 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -ContentTypedDict = Union[str, List[ContentChunkTypedDict]] +ContentTypedDict = TypeAliasType( + "ContentTypedDict", Union[str, List[ContentChunkTypedDict]] +) -Content = Union[str, List[ContentChunk]] +Content = TypeAliasType("Content", Union[str, List[ContentChunk]]) class DeltaMessageTypedDict(TypedDict): diff --git a/src/mistralai/models/embeddingrequest.py b/src/mistralai/models/embeddingrequest.py index 61e181c..4de8c31 100644 --- a/src/mistralai/models/embeddingrequest.py +++ b/src/mistralai/models/embeddingrequest.py @@ -5,14 +5,14 @@ import pydantic from pydantic import model_serializer from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -InputsTypedDict = Union[str, List[str]] +InputsTypedDict = TypeAliasType("InputsTypedDict", Union[str, List[str]]) r"""Text to embed.""" -Inputs = Union[str, List[str]] +Inputs = TypeAliasType("Inputs", Union[str, List[str]]) r"""Text to embed.""" diff --git a/src/mistralai/models/fimcompletionrequest.py b/src/mistralai/models/fimcompletionrequest.py index 409aa25..fb72ba4 100644 --- a/src/mistralai/models/fimcompletionrequest.py +++ b/src/mistralai/models/fimcompletionrequest.py @@ -4,14 +4,18 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -FIMCompletionRequestStopTypedDict = Union[str, List[str]] +FIMCompletionRequestStopTypedDict = TypeAliasType( + "FIMCompletionRequestStopTypedDict", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -FIMCompletionRequestStop = Union[str, List[str]] +FIMCompletionRequestStop = TypeAliasType( + "FIMCompletionRequestStop", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" diff --git a/src/mistralai/models/fimcompletionstreamrequest.py b/src/mistralai/models/fimcompletionstreamrequest.py index 8f9c1da..5e16a17 100644 --- a/src/mistralai/models/fimcompletionstreamrequest.py +++ b/src/mistralai/models/fimcompletionstreamrequest.py @@ -4,14 +4,18 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -FIMCompletionStreamRequestStopTypedDict = Union[str, List[str]] +FIMCompletionStreamRequestStopTypedDict = TypeAliasType( + "FIMCompletionStreamRequestStopTypedDict", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" -FIMCompletionStreamRequestStop = Union[str, List[str]] +FIMCompletionStreamRequestStop = TypeAliasType( + "FIMCompletionStreamRequestStop", Union[str, List[str]] +) r"""Stop generation if this token is detected. Or if one of these tokens is detected when providing an array""" diff --git a/src/mistralai/models/functioncall.py b/src/mistralai/models/functioncall.py index a57d235..0cce622 100644 --- a/src/mistralai/models/functioncall.py +++ b/src/mistralai/models/functioncall.py @@ -3,13 +3,13 @@ from __future__ import annotations from mistralai.types import BaseModel from typing import Any, Dict, Union -from typing_extensions import TypedDict +from typing_extensions import TypeAliasType, TypedDict -ArgumentsTypedDict = Union[Dict[str, Any], str] +ArgumentsTypedDict = TypeAliasType("ArgumentsTypedDict", Union[Dict[str, Any], str]) -Arguments = Union[Dict[str, Any], str] +Arguments = TypeAliasType("Arguments", Union[Dict[str, Any], str]) class FunctionCallTypedDict(TypedDict): diff --git a/src/mistralai/models/imageurlchunk.py b/src/mistralai/models/imageurlchunk.py index f18c63a..498690f 100644 --- a/src/mistralai/models/imageurlchunk.py +++ b/src/mistralai/models/imageurlchunk.py @@ -4,13 +4,15 @@ from .imageurl import ImageURL, ImageURLTypedDict from mistralai.types import BaseModel from typing import Literal, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -ImageURLChunkImageURLTypedDict = Union[ImageURLTypedDict, str] +ImageURLChunkImageURLTypedDict = TypeAliasType( + "ImageURLChunkImageURLTypedDict", Union[ImageURLTypedDict, str] +) -ImageURLChunkImageURL = Union[ImageURL, str] +ImageURLChunkImageURL = TypeAliasType("ImageURLChunkImageURL", Union[ImageURL, str]) ImageURLChunkType = Literal["image_url"] diff --git a/src/mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py b/src/mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py index 1925a1a..d7a5d10 100644 --- a/src/mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py +++ b/src/mistralai/models/jobs_api_routes_fine_tuning_create_fine_tuning_jobop.py @@ -4,13 +4,18 @@ from .jobout import JobOut, JobOutTypedDict from .legacyjobmetadataout import LegacyJobMetadataOut, LegacyJobMetadataOutTypedDict from typing import Union +from typing_extensions import TypeAliasType -JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict = Union[ - LegacyJobMetadataOutTypedDict, JobOutTypedDict -] +JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict = TypeAliasType( + "JobsAPIRoutesFineTuningCreateFineTuningJobResponseTypedDict", + Union[LegacyJobMetadataOutTypedDict, JobOutTypedDict], +) r"""OK""" -JobsAPIRoutesFineTuningCreateFineTuningJobResponse = Union[LegacyJobMetadataOut, JobOut] +JobsAPIRoutesFineTuningCreateFineTuningJobResponse = TypeAliasType( + "JobsAPIRoutesFineTuningCreateFineTuningJobResponse", + Union[LegacyJobMetadataOut, JobOut], +) r"""OK""" diff --git a/src/mistralai/models/modellist.py b/src/mistralai/models/modellist.py index 97ae4c3..394cb3f 100644 --- a/src/mistralai/models/modellist.py +++ b/src/mistralai/models/modellist.py @@ -7,10 +7,12 @@ from mistralai.utils import get_discriminator from pydantic import Discriminator, Tag from typing import List, Optional, Union -from typing_extensions import Annotated, NotRequired, TypedDict +from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict -DataTypedDict = Union[BaseModelCardTypedDict, FTModelCardTypedDict] +DataTypedDict = TypeAliasType( + "DataTypedDict", Union[BaseModelCardTypedDict, FTModelCardTypedDict] +) Data = Annotated[ diff --git a/src/mistralai/models/retrieve_model_v1_models_model_id_getop.py b/src/mistralai/models/retrieve_model_v1_models_model_id_getop.py index dd4bccc..bfe6247 100644 --- a/src/mistralai/models/retrieve_model_v1_models_model_id_getop.py +++ b/src/mistralai/models/retrieve_model_v1_models_model_id_getop.py @@ -7,7 +7,7 @@ from mistralai.utils import FieldMetadata, PathParamMetadata, get_discriminator from pydantic import Discriminator, Tag from typing import Union -from typing_extensions import Annotated, TypedDict +from typing_extensions import Annotated, TypeAliasType, TypedDict class RetrieveModelV1ModelsModelIDGetRequestTypedDict(TypedDict): @@ -22,9 +22,10 @@ class RetrieveModelV1ModelsModelIDGetRequest(BaseModel): r"""The ID of the model to retrieve.""" -RetrieveModelV1ModelsModelIDGetResponseRetrieveModelV1ModelsModelIDGetTypedDict = Union[ - BaseModelCardTypedDict, FTModelCardTypedDict -] +RetrieveModelV1ModelsModelIDGetResponseRetrieveModelV1ModelsModelIDGetTypedDict = TypeAliasType( + "RetrieveModelV1ModelsModelIDGetResponseRetrieveModelV1ModelsModelIDGetTypedDict", + Union[BaseModelCardTypedDict, FTModelCardTypedDict], +) r"""Successful Response""" diff --git a/src/mistralai/models/systemmessage.py b/src/mistralai/models/systemmessage.py index f6f3074..7827ac4 100644 --- a/src/mistralai/models/systemmessage.py +++ b/src/mistralai/models/systemmessage.py @@ -4,13 +4,17 @@ from .textchunk import TextChunk, TextChunkTypedDict from mistralai.types import BaseModel from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -SystemMessageContentTypedDict = Union[str, List[TextChunkTypedDict]] +SystemMessageContentTypedDict = TypeAliasType( + "SystemMessageContentTypedDict", Union[str, List[TextChunkTypedDict]] +) -SystemMessageContent = Union[str, List[TextChunk]] +SystemMessageContent = TypeAliasType( + "SystemMessageContent", Union[str, List[TextChunk]] +) Role = Literal["system"] diff --git a/src/mistralai/models/toolmessage.py b/src/mistralai/models/toolmessage.py index c42f34e..bee9c70 100644 --- a/src/mistralai/models/toolmessage.py +++ b/src/mistralai/models/toolmessage.py @@ -5,13 +5,15 @@ from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -ToolMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]] +ToolMessageContentTypedDict = TypeAliasType( + "ToolMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]] +) -ToolMessageContent = Union[str, List[ContentChunk]] +ToolMessageContent = TypeAliasType("ToolMessageContent", Union[str, List[ContentChunk]]) ToolMessageRole = Literal["tool"] diff --git a/src/mistralai/models/usermessage.py b/src/mistralai/models/usermessage.py index af69895..dac2618 100644 --- a/src/mistralai/models/usermessage.py +++ b/src/mistralai/models/usermessage.py @@ -5,13 +5,15 @@ from mistralai.types import BaseModel, Nullable, UNSET_SENTINEL from pydantic import model_serializer from typing import List, Literal, Optional, Union -from typing_extensions import NotRequired, TypedDict +from typing_extensions import NotRequired, TypeAliasType, TypedDict -UserMessageContentTypedDict = Union[str, List[ContentChunkTypedDict]] +UserMessageContentTypedDict = TypeAliasType( + "UserMessageContentTypedDict", Union[str, List[ContentChunkTypedDict]] +) -UserMessageContent = Union[str, List[ContentChunk]] +UserMessageContent = TypeAliasType("UserMessageContent", Union[str, List[ContentChunk]]) UserMessageRole = Literal["user"] diff --git a/src/mistralai/models/validationerror.py b/src/mistralai/models/validationerror.py index 03ce984..e971e01 100644 --- a/src/mistralai/models/validationerror.py +++ b/src/mistralai/models/validationerror.py @@ -3,13 +3,13 @@ from __future__ import annotations from mistralai.types import BaseModel from typing import List, Union -from typing_extensions import TypedDict +from typing_extensions import TypeAliasType, TypedDict -LocTypedDict = Union[str, int] +LocTypedDict = TypeAliasType("LocTypedDict", Union[str, int]) -Loc = Union[str, int] +Loc = TypeAliasType("Loc", Union[str, int]) class ValidationErrorTypedDict(TypedDict): diff --git a/src/mistralai/sdk.py b/src/mistralai/sdk.py index 71c60fc..7778560 100644 --- a/src/mistralai/sdk.py +++ b/src/mistralai/sdk.py @@ -129,3 +129,17 @@ def _init_sdks(self): self.agents = Agents(self.sdk_configuration) self.embeddings = Embeddings(self.sdk_configuration) self.classifiers = Classifiers(self.sdk_configuration) + + def __enter__(self): + return self + + async def __aenter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.sdk_configuration.client is not None: + self.sdk_configuration.client.close() + + async def __aexit__(self, exc_type, exc_val, exc_tb): + if self.sdk_configuration.async_client is not None: + await self.sdk_configuration.async_client.aclose() diff --git a/src/mistralai/sdkconfiguration.py b/src/mistralai/sdkconfiguration.py index 9030b1b..1f8261a 100644 --- a/src/mistralai/sdkconfiguration.py +++ b/src/mistralai/sdkconfiguration.py @@ -29,8 +29,8 @@ class SDKConfiguration: language: str = "python" openapi_doc_version: str = "0.0.2" sdk_version: str = "1.2.4" - gen_version: str = "2.460.1" - user_agent: str = "speakeasy-sdk/python 1.2.4 2.460.1 0.0.2 mistralai" + gen_version: str = "2.470.1" + user_agent: str = "speakeasy-sdk/python 1.2.4 2.470.1 0.0.2 mistralai" retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET) timeout_ms: Optional[int] = None diff --git a/src/mistralai/utils/annotations.py b/src/mistralai/utils/annotations.py index 5b3bbb0..387874e 100644 --- a/src/mistralai/utils/annotations.py +++ b/src/mistralai/utils/annotations.py @@ -1,30 +1,55 @@ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from enum import Enum -from typing import Any +from typing import Any, Optional def get_discriminator(model: Any, fieldname: str, key: str) -> str: - if isinstance(model, dict): - try: - return f'{model.get(key)}' - except AttributeError as e: - raise ValueError(f'Could not find discriminator key {key} in {model}') from e + """ + Recursively search for the discriminator attribute in a model. - if hasattr(model, fieldname): - attr = getattr(model, fieldname) + Args: + model (Any): The model to search within. + fieldname (str): The name of the field to search for. + key (str): The key to search for in dictionaries. - if isinstance(attr, Enum): - return f'{attr.value}' + Returns: + str: The name of the discriminator attribute. - return f'{attr}' + Raises: + ValueError: If the discriminator attribute is not found. + """ + upper_fieldname = fieldname.upper() - fieldname = fieldname.upper() - if hasattr(model, fieldname): - attr = getattr(model, fieldname) + def get_field_discriminator(field: Any) -> Optional[str]: + """Search for the discriminator attribute in a given field.""" - if isinstance(attr, Enum): - return f'{attr.value}' + if isinstance(field, dict): + if key in field: + return f'{field[key]}' - return f'{attr}' + if hasattr(field, fieldname): + attr = getattr(field, fieldname) + if isinstance(attr, Enum): + return f'{attr.value}' + return f'{attr}' + + if hasattr(field, upper_fieldname): + attr = getattr(field, upper_fieldname) + if isinstance(attr, Enum): + return f'{attr.value}' + return f'{attr}' + + return None + + + if isinstance(model, list): + for field in model: + discriminator = get_field_discriminator(field) + if discriminator is not None: + return discriminator + + discriminator = get_field_discriminator(model) + if discriminator is not None: + return discriminator raise ValueError(f'Could not find discriminator field {fieldname} in {model}') diff --git a/src/mistralai/utils/eventstreaming.py b/src/mistralai/utils/eventstreaming.py index 553b386..74a63f7 100644 --- a/src/mistralai/utils/eventstreaming.py +++ b/src/mistralai/utils/eventstreaming.py @@ -2,12 +2,72 @@ import re import json -from typing import Callable, TypeVar, Optional, Generator, AsyncGenerator, Tuple +from typing import ( + Callable, + Generic, + TypeVar, + Optional, + Generator, + AsyncGenerator, + Tuple, +) import httpx T = TypeVar("T") +class EventStream(Generic[T]): + response: httpx.Response + generator: Generator[T, None, None] + + def __init__( + self, + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + ): + self.response = response + self.generator = stream_events(response, decoder, sentinel) + + def __iter__(self): + return self + + def __next__(self): + return next(self.generator) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.response.close() + + +class EventStreamAsync(Generic[T]): + response: httpx.Response + generator: AsyncGenerator[T, None] + + def __init__( + self, + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, + ): + self.response = response + self.generator = stream_events_async(response, decoder, sentinel) + + def __aiter__(self): + return self + + async def __anext__(self): + return await self.generator.__anext__() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.response.aclose() + + class ServerEvent: id: Optional[str] = None event: Optional[str] = None