From f20b6fd48060897cc1f5ca65fefff6faa48f249b Mon Sep 17 00:00:00 2001 From: jiazeng Date: Fri, 11 Aug 2023 15:45:49 +0800 Subject: [PATCH 1/3] update --- src/promptflow-tools/promptflow/tools/aoai.py | 3 ++- .../promptflow/tools/common.py | 3 ++- .../promptflow/tools/openai.py | 3 ++- src/promptflow-tools/tests/test_aoai.py | 26 +++++++++++++++++++ 4 files changed, 32 insertions(+), 3 deletions(-) diff --git a/src/promptflow-tools/promptflow/tools/aoai.py b/src/promptflow-tools/promptflow/tools/aoai.py index 1dc4edd2c51..5c6d8afb527 100644 --- a/src/promptflow-tools/promptflow/tools/aoai.py +++ b/src/promptflow-tools/promptflow/tools/aoai.py @@ -85,7 +85,8 @@ def completion( if stream: def generator(): for chunk in response: - yield chunk.choices[0].text + if chunk.choices: + yield getattr(chunk.choices[0], "text", "") # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. diff --git a/src/promptflow-tools/promptflow/tools/common.py b/src/promptflow-tools/promptflow/tools/common.py index c51239506d5..a6226122d87 100644 --- a/src/promptflow-tools/promptflow/tools/common.py +++ b/src/promptflow-tools/promptflow/tools/common.py @@ -260,7 +260,8 @@ def post_process_chat_api_response(completion, stream, functions): def generator(): for chunk in completion: - yield getattr(chunk.choices[0]["delta"], "content", "") + if chunk.choices: + yield getattr(chunk.choices[0]["delta"], "content", "") # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. diff --git a/src/promptflow-tools/promptflow/tools/openai.py b/src/promptflow-tools/promptflow/tools/openai.py index 8809827fe35..e97a00ee4c9 100644 --- a/src/promptflow-tools/promptflow/tools/openai.py +++ b/src/promptflow-tools/promptflow/tools/openai.py @@ -80,7 +80,8 @@ def completion( if stream: def generator(): for chunk in response: - yield chunk.choices[0].text + if chunk.choices: + yield getattr(chunk.choices[0], "text", "") # We must return the generator object, not using yield directly here. # Otherwise, the function itself will become a generator, despite whether stream is True or False. diff --git a/src/promptflow-tools/tests/test_aoai.py b/src/promptflow-tools/tests/test_aoai.py index 7bc82e21399..f2dd894e3a2 100644 --- a/src/promptflow-tools/tests/test_aoai.py +++ b/src/promptflow-tools/tests/test_aoai.py @@ -16,6 +16,14 @@ def test_aoai_completion(self, aoai_provider): aoai_provider.completion( prompt=prompt_template, deployment_name="text-ada-001", stop=[], logit_bias={} ) + + def test_aoai_stream_completion(self, aoai_provider): + prompt_template = "please complete this sentence: world war II " + # test whether tool can handle param "stop" with value empty list in stream mode + # as openai raises "[] is not valid under any of the given schemas - 'stop'" + aoai_provider.completion( + prompt=prompt_template, deployment_name="text-ada-001", stop=[], logit_bias={}, stream=True + ) def test_aoai_chat(self, aoai_provider, example_prompt_template, chat_history): result = aoai_provider.chat( @@ -69,6 +77,24 @@ def test_aoai_chat_message_with_no_content(self, aoai_provider): prompt = "user:\n" aoai_provider.chat(prompt=prompt, deployment_name="gpt-35-turbo") + def test_aoai_stream_chat(self, aoai_provider, example_prompt_template, chat_history): + result = aoai_provider.chat( + prompt=example_prompt_template, + deployment_name="gpt-35-turbo", + max_tokens="32", + temperature=0, + user_input="Fill in more detalis about trend 2.", + chat_history=chat_history, + stream=True, + ) + answer = "" + while True: + try: + answer += next(result) + except Exception: + break + assert "details about trend 2" in answer.lower() + @pytest.mark.parametrize( "params, expected", [ From 291ac90b49b18f0bc07b6ac8edf1067936f7a9e8 Mon Sep 17 00:00:00 2001 From: jiazeng Date: Fri, 11 Aug 2023 15:49:29 +0800 Subject: [PATCH 2/3] update --- src/promptflow-tools/tests/test_aoai.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/promptflow-tools/tests/test_aoai.py b/src/promptflow-tools/tests/test_aoai.py index f2dd894e3a2..932ec12c83e 100644 --- a/src/promptflow-tools/tests/test_aoai.py +++ b/src/promptflow-tools/tests/test_aoai.py @@ -16,7 +16,7 @@ def test_aoai_completion(self, aoai_provider): aoai_provider.completion( prompt=prompt_template, deployment_name="text-ada-001", stop=[], logit_bias={} ) - + def test_aoai_stream_completion(self, aoai_provider): prompt_template = "please complete this sentence: world war II " # test whether tool can handle param "stop" with value empty list in stream mode From 3f0cd7c722b0666456e477e3f458165a1948b189 Mon Sep 17 00:00:00 2001 From: jiazeng Date: Fri, 11 Aug 2023 16:58:15 +0800 Subject: [PATCH 3/3] update --- src/promptflow-tools/tests/test_openai.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/promptflow-tools/tests/test_openai.py b/src/promptflow-tools/tests/test_openai.py index 5cff3b914fd..56e97474b01 100644 --- a/src/promptflow-tools/tests/test_openai.py +++ b/src/promptflow-tools/tests/test_openai.py @@ -16,6 +16,10 @@ def test_openai_completion(self, openai_provider): prompt_template = "please complete this sentence: world war II " openai_provider.completion(prompt=prompt_template) + def test_openai_stream_completion(self, openai_provider): + prompt_template = "please complete this sentence: world war II " + openai_provider.completion(prompt=prompt_template, stream=True) + def test_openai_completion_api(self, open_ai_connection): prompt_template = "please complete this sentence: world war II " completion(open_ai_connection, prompt=prompt_template) @@ -31,6 +35,24 @@ def test_openai_chat(self, openai_provider, example_prompt_template, chat_histor ) assert "details about trend 2" in result.lower() + def test_openai_stream_chat(self, openai_provider, example_prompt_template, chat_history): + result = openai_provider.chat( + prompt=example_prompt_template, + model="gpt-3.5-turbo", + max_tokens=32, + temperature=0, + user_input="Fill in more detalis about trend 2.", + chat_history=chat_history, + stream=True, + ) + answer = "" + while True: + try: + answer += next(result) + except Exception: + break + assert "details about trend 2" in answer.lower() + def test_openai_chat_api(self, open_ai_connection, example_prompt_template, chat_history): result = chat( connection=open_ai_connection,