Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add configuration override to prompty example #2873

Merged
merged 6 commits into from
Apr 23, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 77 additions & 50 deletions examples/prompty/basic/prompty-quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -79,78 +79,105 @@
"metadata": {},
"outputs": [],
"source": [
"from promptflow.core import Prompty, AzureOpenAIModelConfiguration, OpenAIModelConfiguration\n",
"from promptflow.core import Prompty\n",
"\n",
"# load prompty as a flow\n",
"f = Prompty.load(source=\"basic.prompty\")\n",
lalala123123 marked this conversation as resolved.
Show resolved Hide resolved
"\n",
"# execute the flow as function\n",
"result = f(\n",
" first_name=\"John\", last_name=\"Doe\", question=\"What is the capital of France?\"\n",
")\n",
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Or you can override model configuration of prompty with dict."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# override model configuration of prompty with dict\n",
"override_model = {\n",
" \"configuration\": {\n",
" \"type\": \"azure_openai\",\n",
" \"azure_deployment\": \"gpt-35-turbo\",\n",
" \"api_key\": \"${env:AZURE_OPENAI_API_KEY}\",\n",
lalala123123 marked this conversation as resolved.
Show resolved Hide resolved
" \"azure_endpoint\": \"${env:AZURE_OPENAI_ENDPOINT}\",\n",
" },\n",
" \"parameters\": {\n",
" \"max_token\": 512\n",
" }\n",
"}\n",
"\n",
"# override configuration with open ai\n",
"# override_model = {\n",
"# \"configuration\": {\n",
"# \"type\": \"azure_openai\",\n",
"# \"azure_deployment\": \"gpt-35-turbo\",\n",
"# \"api_key\": \"${env:AZURE_OPENAI_API_KEY}\",\n",
"# \"azure_endpoint\": \"${env:AZURE_OPENAI_ENDPOINT}\",\n",
"# \"type\": \"openai\",\n",
"# \"azure_deployment\": \"gpt-3.5-turbo\",\n",
"# \"api_key\": \"${env:OPENAI_API_KEY}\",\n",
"# \"base_url\": \"${env:OPENAI_BASE_URL}\",\n",
"# },\n",
"# \"parameters\": {\n",
"# \"max_token\": 512\n",
"# }\n",
"# }\n",
"# f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"\n",
"# override configuration with AzureOpenAIModelConfiguration\n",
"# configuration = AzureOpenAIModelConfiguration(\n",
"# azure_endpoint=\"${env:AZURE_OPENAI_ENDPOINT}\",\n",
"# api_key=\"${env:AZURE_OPENAI_API_KEY}\",\n",
"# azure_deployment=\"gpt-35-turbo\"\n",
"# )\n",
"# override_model = {\n",
"# \"configuration\": configuration,\n",
"# \"parameters\": {\n",
"# \"max_token\": 512\n",
"# }\n",
"# }\n",
"# f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"# load prompty as a flow\n",
"f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"\n",
"# override configuration with created connection in AzureOpenAIModelConfiguration\n",
"# configuration = AzureOpenAIModelConfiguration(\n",
"# connection=\"azure_openai_connection\",\n",
"# azure_deployment=\"gpt-35-turbo\"\n",
"# )\n",
"# override_model = {\n",
"# \"configuration\": configuration,\n",
"# \"parameters\": {\n",
"# \"max_token\": 512\n",
"# }\n",
"# }\n",
"# f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"# execute the flow as function\n",
"result = f(\n",
" first_name=\"John\", last_name=\"Doe\", question=\"What is the capital of France?\"\n",
")\n",
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Or you can override configuration with `AzureOpenAIModelConfiguration` and `OpenAIModelConfiguration`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from promptflow.core import AzureOpenAIModelConfiguration, OpenAIModelConfiguration\n",
"\n",
"# override configuration with AzureOpenAIModelConfiguration\n",
"configuration = AzureOpenAIModelConfiguration(\n",
" azure_endpoint=\"${env:AZURE_OPENAI_ENDPOINT}\",\n",
" api_key=\"${env:AZURE_OPENAI_API_KEY}\",\n",
" azure_deployment=\"gpt-35-turbo\"\n",
")\n",
"\n",
"# override configuration with OpenAIModelConfiguration\n",
"# configuration = OpenAIModelConfiguration(\n",
"# base_url=\"${env:OPENAI_BASE_URL}\",\n",
"# api_key=\"${env:OPENAI_API_KEY}\",\n",
"# model=\"gpt-3.5-turbo\"\n",
"# )\n",
"# override_model = {\n",
"# \"configuration\": configuration,\n",
"# \"parameters\": {\n",
"# \"max_token\": 512\n",
"# }\n",
"# }\n",
"# f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"\n",
"# override configuration with created connection in OpenAIModelConfiguration\n",
"# configuration = OpenAIModelConfiguration(\n",
"# connection=\"openai_connection\",\n",
"# model=\"gpt-3.5-turbo\"\n",
"# )\n",
"# override_model = {\n",
"# \"configuration\": configuration,\n",
"# \"parameters\": {\n",
"# \"max_token\": 512\n",
"# }\n",
"# }\n",
"# f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"override_model = {\n",
" \"configuration\": configuration,\n",
" \"parameters\": {\n",
" \"max_token\": 512\n",
" }\n",
"}\n",
"\n",
"# load prompty as a flow\n",
"f = Prompty.load(source=\"basic.prompty\", model=override_model)\n",
"\n",
"# execute the flow as function\n",
"result = f(\n",
Expand Down
79 changes: 79 additions & 0 deletions examples/prompty/chat-basic/chat-with-prompty.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,85 @@
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Or you can override connection with dict"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# override azure openai connection of prompty with dict\n",
"override_model = {\n",
" \"configuration\": {\n",
" \"type\": \"azure_openai\",\n",
" \"azure_deployment\": \"gpt-35-turbo\",\n",
" \"connection\": connection\n",
" },\n",
"}\n",
"\n",
"# override openai connection with dict\n",
"# override_model = {\n",
"# \"configuration\": {\n",
"# \"type\": \"openai\",\n",
"# \"moel\": \"gpt-3.5-turbo\",\n",
"# \"connection\": connection\n",
"# },\n",
"# }\n",
"\n",
"# load prompty as a flow\n",
"f = Prompty.load(\"chat.prompty\", model=override_model)\n",
"# execute the flow as function\n",
"question = \"What is the capital of France?\"\n",
"result = f(first_name=\"John\", last_name=\"Doe\", question=question)\n",
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Or you can override connection with `AzureOpenAIModelConfiguration` and `OpenAIModelConfiguration`."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from promptflow.core import AzureOpenAIModelConfiguration, OpenAIModelConfiguration\n",
"\n",
"\n",
"# override configuration with created connection in AzureOpenAIModelConfiguration\n",
"configuration = AzureOpenAIModelConfiguration(\n",
" connection=connection,\n",
" azure_deployment=\"gpt-35-turbo\"\n",
")\n",
"\n",
"# override openai connection with OpenAIModelConfiguration\n",
"# configuration = OpenAIModelConfiguration(\n",
"# connection=connection,\n",
"# model=\"gpt-3.5-turbo\"\n",
"# )\n",
"\n",
"override_model = {\n",
" \"configuration\": configuration,\n",
"}\n",
"\n",
"# load prompty as a flow\n",
"f = Prompty.load(\"chat.prompty\", model=override_model)\n",
"# execute the flow as function\n",
"question = \"What is the capital of France?\"\n",
"result = f(first_name=\"John\", last_name=\"Doe\", question=question)\n",
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down
Loading