1616from pydantic_ai .exceptions import ModelHTTPError , ModelRetry
1717from pydantic_ai .messages import (
1818 BinaryContent ,
19+ DocumentUrl ,
1920 ImageUrl ,
2021 ModelRequest ,
2122 ModelResponse ,
@@ -123,7 +124,7 @@ def completion_message(
123124 return MistralChatCompletionResponse (
124125 id = '123' ,
125126 choices = [MistralChatCompletionChoice (finish_reason = 'stop' , index = 0 , message = message )],
126- created = 1704067200 if with_created else None , # 2024-01-01
127+ created = 1704067200 if with_created else 0 , # 2024-01-01
127128 model = 'mistral-large-123' ,
128129 object = 'chat.completion' ,
129130 usage = usage or MistralUsageInfo (prompt_tokens = 1 , completion_tokens = 1 , total_tokens = 1 ),
@@ -142,7 +143,7 @@ def chunk(
142143 MistralCompletionResponseStreamChoice (index = index , delta = delta , finish_reason = finish_reason )
143144 for index , delta in enumerate (delta )
144145 ],
145- created = 1704067200 if with_created else None , # 2024-01-01
146+ created = 1704067200 if with_created else 0 , # 2024-01-01
146147 model = 'gpt-4' ,
147148 object = 'chat.completion.chunk' ,
148149 usage = MistralUsageInfo (prompt_tokens = 1 , completion_tokens = 1 , total_tokens = 1 ),
@@ -188,11 +189,13 @@ def test_init():
188189
189190async def test_multiple_completions (allow_model_requests : None ):
190191 completions = [
192+ # First completion: created is "now" (simulate IsNow)
191193 completion_message (
192194 MistralAssistantMessage (content = 'world' ),
193195 usage = MistralUsageInfo (prompt_tokens = 1 , completion_tokens = 1 , total_tokens = 1 ),
194196 with_created = False ,
195197 ),
198+ # Second completion: created is fixed 2024-01-01 00:00:00 UTC
196199 completion_message (MistralAssistantMessage (content = 'hello again' )),
197200 ]
198201 mock_client = MockMistralAI .create_mock (completions )
@@ -1909,6 +1912,87 @@ async def test_image_as_binary_content_input(allow_model_requests: None):
19091912 )
19101913
19111914
1915+ async def test_pdf_url_input (allow_model_requests : None ):
1916+ c = completion_message (MistralAssistantMessage (content = 'world' , role = 'assistant' ))
1917+ mock_client = MockMistralAI .create_mock (c )
1918+ m = MistralModel ('mistral-large-latest' , provider = MistralProvider (mistral_client = mock_client ))
1919+ agent = Agent (m )
1920+
1921+ result = await agent .run (
1922+ [
1923+ 'hello' ,
1924+ DocumentUrl (url = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf' ),
1925+ ]
1926+ )
1927+ assert result .all_messages () == snapshot (
1928+ [
1929+ ModelRequest (
1930+ parts = [
1931+ UserPromptPart (
1932+ content = [
1933+ 'hello' ,
1934+ DocumentUrl (url = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf' ),
1935+ ],
1936+ timestamp = IsDatetime (),
1937+ )
1938+ ]
1939+ ),
1940+ ModelResponse (
1941+ parts = [TextPart (content = 'world' )],
1942+ usage = Usage (requests = 1 , request_tokens = 1 , response_tokens = 1 , total_tokens = 1 ),
1943+ model_name = 'mistral-large-123' ,
1944+ timestamp = IsDatetime (),
1945+ vendor_id = '123' ,
1946+ ),
1947+ ]
1948+ )
1949+
1950+
1951+ async def test_pdf_as_binary_content_input (allow_model_requests : None ):
1952+ c = completion_message (MistralAssistantMessage (content = 'world' , role = 'assistant' ))
1953+ mock_client = MockMistralAI .create_mock (c )
1954+ m = MistralModel ('mistral-large-latest' , provider = MistralProvider (mistral_client = mock_client ))
1955+ agent = Agent (m )
1956+
1957+ base64_content = b'%PDF-1.\r trailer<</Root<</Pages<</Kids[<</MediaBox[0 0 3 3]>>>>>>>>>'
1958+
1959+ result = await agent .run (['hello' , BinaryContent (data = base64_content , media_type = 'application/pdf' )])
1960+ assert result .all_messages () == snapshot (
1961+ [
1962+ ModelRequest (
1963+ parts = [
1964+ UserPromptPart (
1965+ content = ['hello' , BinaryContent (data = base64_content , media_type = 'application/pdf' )],
1966+ timestamp = IsDatetime (),
1967+ )
1968+ ]
1969+ ),
1970+ ModelResponse (
1971+ parts = [TextPart (content = 'world' )],
1972+ usage = Usage (requests = 1 , request_tokens = 1 , response_tokens = 1 , total_tokens = 1 ),
1973+ model_name = 'mistral-large-123' ,
1974+ timestamp = IsDatetime (),
1975+ vendor_id = '123' ,
1976+ ),
1977+ ]
1978+ )
1979+
1980+
1981+ async def test_txt_url_input (allow_model_requests : None ):
1982+ c = completion_message (MistralAssistantMessage (content = 'world' , role = 'assistant' ))
1983+ mock_client = MockMistralAI .create_mock (c )
1984+ m = MistralModel ('mistral-large-latest' , provider = MistralProvider (mistral_client = mock_client ))
1985+ agent = Agent (m )
1986+
1987+ with pytest .raises (RuntimeError , match = 'DocumentUrl other than PDF is not supported in Mistral.' ):
1988+ await agent .run (
1989+ [
1990+ 'hello' ,
1991+ DocumentUrl (url = 'https://examplefiles.org/files/documents/plaintext-example-file-download.txt' ),
1992+ ]
1993+ )
1994+
1995+
19121996async def test_audio_as_binary_content_input (allow_model_requests : None ):
19131997 c = completion_message (MistralAssistantMessage (content = 'world' , role = 'assistant' ))
19141998 mock_client = MockMistralAI .create_mock (c )
@@ -1917,7 +2001,7 @@ async def test_audio_as_binary_content_input(allow_model_requests: None):
19172001
19182002 base64_content = b'//uQZ'
19192003
1920- with pytest .raises (RuntimeError , match = 'Only image binary content is supported for Mistral.' ):
2004+ with pytest .raises (RuntimeError , match = 'BinaryContent other than image or PDF is not supported in Mistral.' ):
19212005 await agent .run (['hello' , BinaryContent (data = base64_content , media_type = 'audio/wav' )])
19222006
19232007
0 commit comments