Skip to content

Commit 7fd6719

Browse files
stainless-app[bot]Stainless Bot
authored and
Stainless Bot
committed
feat(api): update via SDK Studio (#227)
1 parent fedb55e commit 7fd6719

16 files changed

+105
-112
lines changed

.stats.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
configured_endpoints: 22
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-6c87a6d2f0a1447fab78657f8b44e2d1ea2c282d2c9f92458bcd25f543944c6e.yml
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-9cff8ea13f14bd0899df69243fe78b4f88d4d0172263aa260af1ea66a7d0484e.yml

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,7 @@ response = client.completion.with_raw_response.create(
339339
print(response.headers.get('X-My-Header'))
340340

341341
completion = response.parse() # get the object that `completion.create()` would have returned
342-
print(completion.provider)
342+
print(completion.message)
343343
```
344344

345345
These methods return an [`APIResponse`](https://github.com/prompt-foundry/python-sdk/tree/main/src/prompt_foundry_python_sdk/_response.py) object.

api.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ Types:
1616

1717
```python
1818
from prompt_foundry_python_sdk.types import (
19-
ModelParameters,
19+
Parameters,
2020
PromptConfiguration,
2121
PromptListResponse,
2222
PromptDeleteResponse,
@@ -30,7 +30,7 @@ Methods:
3030
- <code title="get /sdk/v1/prompts">client.prompts.<a href="./src/prompt_foundry_python_sdk/resources/prompts.py">list</a>() -> <a href="./src/prompt_foundry_python_sdk/types/prompt_list_response.py">PromptListResponse</a></code>
3131
- <code title="delete /sdk/v1/prompts/{id}">client.prompts.<a href="./src/prompt_foundry_python_sdk/resources/prompts.py">delete</a>(id) -> <a href="./src/prompt_foundry_python_sdk/types/prompt_delete_response.py">PromptDeleteResponse</a></code>
3232
- <code title="get /sdk/v1/prompts/{id}">client.prompts.<a href="./src/prompt_foundry_python_sdk/resources/prompts.py">get</a>(id) -> <a href="./src/prompt_foundry_python_sdk/types/prompt_configuration.py">PromptConfiguration</a></code>
33-
- <code title="post /sdk/v1/prompts/{id}">client.prompts.<a href="./src/prompt_foundry_python_sdk/resources/prompts.py">get_parameters</a>(id, \*\*<a href="src/prompt_foundry_python_sdk/types/prompt_get_parameters_params.py">params</a>) -> <a href="./src/prompt_foundry_python_sdk/types/model_parameters.py">ModelParameters</a></code>
33+
- <code title="post /sdk/v1/prompts/{id}">client.prompts.<a href="./src/prompt_foundry_python_sdk/resources/prompts.py">get_parameters</a>(id, \*\*<a href="src/prompt_foundry_python_sdk/types/prompt_get_parameters_params.py">params</a>) -> <a href="./src/prompt_foundry_python_sdk/types/parameters.py">Parameters</a></code>
3434

3535
# Tools
3636

src/prompt_foundry_python_sdk/resources/prompts.py

+13-15
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
async_to_streamed_response_wrapper,
2222
)
2323
from .._base_client import make_request_options
24-
from ..types.model_parameters import ModelParameters
24+
from ..types.parameters import Parameters
2525
from ..types.prompt_configuration import PromptConfiguration
2626
from ..types.prompt_list_response import PromptListResponse
2727
from ..types.prompt_delete_response import PromptDeleteResponse
@@ -228,12 +228,11 @@ def get_parameters(
228228
extra_query: Query | None = None,
229229
extra_body: Body | None = None,
230230
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
231-
) -> ModelParameters:
231+
) -> Parameters:
232232
"""
233-
Fetches the configured model parameters and messages rendered with the provided
234-
variables mapped to the set LLM provider. This endpoint abstracts the need to
235-
handle mapping between different providers, while still allowing direct calls to
236-
the providers.
233+
Fetches the model configuration parameters for a specified prompt, including
234+
penalty settings, response format, and the model messages rendered with the
235+
given variables mapped to the set LLM provider.
237236
238237
Args:
239238
append_messages: Appended the the end of the configured prompt messages before running the
@@ -257,7 +256,7 @@ def get_parameters(
257256
if not id:
258257
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
259258
return cast(
260-
ModelParameters,
259+
Parameters,
261260
self._post(
262261
f"/sdk/v1/prompts/{id}",
263262
body=maybe_transform(
@@ -272,7 +271,7 @@ def get_parameters(
272271
options=make_request_options(
273272
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
274273
),
275-
cast_to=cast(Any, ModelParameters), # Union types cannot be passed in as arguments in the type system
274+
cast_to=cast(Any, Parameters), # Union types cannot be passed in as arguments in the type system
276275
),
277276
)
278277

@@ -476,12 +475,11 @@ async def get_parameters(
476475
extra_query: Query | None = None,
477476
extra_body: Body | None = None,
478477
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
479-
) -> ModelParameters:
478+
) -> Parameters:
480479
"""
481-
Fetches the configured model parameters and messages rendered with the provided
482-
variables mapped to the set LLM provider. This endpoint abstracts the need to
483-
handle mapping between different providers, while still allowing direct calls to
484-
the providers.
480+
Fetches the model configuration parameters for a specified prompt, including
481+
penalty settings, response format, and the model messages rendered with the
482+
given variables mapped to the set LLM provider.
485483
486484
Args:
487485
append_messages: Appended the the end of the configured prompt messages before running the
@@ -505,7 +503,7 @@ async def get_parameters(
505503
if not id:
506504
raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
507505
return cast(
508-
ModelParameters,
506+
Parameters,
509507
await self._post(
510508
f"/sdk/v1/prompts/{id}",
511509
body=await async_maybe_transform(
@@ -520,7 +518,7 @@ async def get_parameters(
520518
options=make_request_options(
521519
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
522520
),
523-
cast_to=cast(Any, ModelParameters), # Union types cannot be passed in as arguments in the type system
521+
cast_to=cast(Any, Parameters), # Union types cannot be passed in as arguments in the type system
524522
),
525523
)
526524

src/prompt_foundry_python_sdk/types/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from .tool import Tool as Tool
66
from .evaluation import Evaluation as Evaluation
7-
from .model_parameters import ModelParameters as ModelParameters
7+
from .parameters import Parameters as Parameters
88
from .tool_create_params import ToolCreateParams as ToolCreateParams
99
from .tool_list_response import ToolListResponse as ToolListResponse
1010
from .tool_update_params import ToolUpdateParams as ToolUpdateParams

src/prompt_foundry_python_sdk/types/completion_create_params.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@
1111
"CompletionCreateParams",
1212
"AppendMessage",
1313
"AppendMessageContent",
14-
"AppendMessageContentTextContentBlock",
14+
"AppendMessageContentTextContentBlockSchema",
1515
"AppendMessageContentImageBase64ContentBlock",
1616
"AppendMessageContentToolCallContentBlock",
1717
"AppendMessageContentToolCallContentBlockToolCall",
1818
"AppendMessageContentToolCallContentBlockToolCallFunction",
1919
"AppendMessageContentToolResultContentBlock",
2020
"OverrideMessage",
2121
"OverrideMessageContent",
22-
"OverrideMessageContentTextContentBlock",
22+
"OverrideMessageContentTextContentBlockSchema",
2323
"OverrideMessageContentImageBase64ContentBlock",
2424
"OverrideMessageContentToolCallContentBlock",
2525
"OverrideMessageContentToolCallContentBlockToolCall",
@@ -48,7 +48,7 @@ class CompletionCreateParams(TypedDict, total=False):
4848
"""The template variables added to the prompt when executing the prompt."""
4949

5050

51-
class AppendMessageContentTextContentBlock(TypedDict, total=False):
51+
class AppendMessageContentTextContentBlockSchema(TypedDict, total=False):
5252
text: Required[str]
5353

5454
type: Required[Literal["TEXT"]]
@@ -100,7 +100,7 @@ class AppendMessageContentToolResultContentBlock(TypedDict, total=False):
100100

101101

102102
AppendMessageContent: TypeAlias = Union[
103-
AppendMessageContentTextContentBlock,
103+
AppendMessageContentTextContentBlockSchema,
104104
AppendMessageContentImageBase64ContentBlock,
105105
AppendMessageContentToolCallContentBlock,
106106
AppendMessageContentToolResultContentBlock,
@@ -113,7 +113,7 @@ class AppendMessage(TypedDict, total=False):
113113
role: Required[Literal["assistant", "system", "tool", "user"]]
114114

115115

116-
class OverrideMessageContentTextContentBlock(TypedDict, total=False):
116+
class OverrideMessageContentTextContentBlockSchema(TypedDict, total=False):
117117
text: Required[str]
118118

119119
type: Required[Literal["TEXT"]]
@@ -165,7 +165,7 @@ class OverrideMessageContentToolResultContentBlock(TypedDict, total=False):
165165

166166

167167
OverrideMessageContent: TypeAlias = Union[
168-
OverrideMessageContentTextContentBlock,
168+
OverrideMessageContentTextContentBlockSchema,
169169
OverrideMessageContentImageBase64ContentBlock,
170170
OverrideMessageContentToolCallContentBlock,
171171
OverrideMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/completion_create_response.py

+3-8
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"CompletionCreateResponse",
1313
"Message",
1414
"MessageContent",
15-
"MessageContentTextContentBlock",
15+
"MessageContentTextContentBlockSchema",
1616
"MessageContentImageBase64ContentBlock",
1717
"MessageContentToolCallContentBlock",
1818
"MessageContentToolCallContentBlockToolCall",
@@ -22,7 +22,7 @@
2222
]
2323

2424

25-
class MessageContentTextContentBlock(BaseModel):
25+
class MessageContentTextContentBlockSchema(BaseModel):
2626
text: str
2727

2828
type: Literal["TEXT"]
@@ -75,7 +75,7 @@ class MessageContentToolResultContentBlock(BaseModel):
7575

7676
MessageContent: TypeAlias = Annotated[
7777
Union[
78-
MessageContentTextContentBlock,
78+
MessageContentTextContentBlockSchema,
7979
MessageContentImageBase64ContentBlock,
8080
MessageContentToolCallContentBlock,
8181
MessageContentToolResultContentBlock,
@@ -108,9 +108,4 @@ class CompletionCreateResponse(BaseModel):
108108
message: Message
109109
"""The completion message generated by the model."""
110110

111-
name: str
112-
113-
provider: Literal["ANTHROPIC", "OPENAI"]
114-
"""The LLM model provider."""
115-
116111
stats: Stats

src/prompt_foundry_python_sdk/types/evaluation.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"Evaluation",
1313
"AppendedMessage",
1414
"AppendedMessageContent",
15-
"AppendedMessageContentTextContentBlock",
15+
"AppendedMessageContentTextContentBlockSchema",
1616
"AppendedMessageContentImageBase64ContentBlock",
1717
"AppendedMessageContentToolCallContentBlock",
1818
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -21,7 +21,7 @@
2121
]
2222

2323

24-
class AppendedMessageContentTextContentBlock(BaseModel):
24+
class AppendedMessageContentTextContentBlockSchema(BaseModel):
2525
text: str
2626

2727
type: Literal["TEXT"]
@@ -74,7 +74,7 @@ class AppendedMessageContentToolResultContentBlock(BaseModel):
7474

7575
AppendedMessageContent: TypeAlias = Annotated[
7676
Union[
77-
AppendedMessageContentTextContentBlock,
77+
AppendedMessageContentTextContentBlockSchema,
7878
AppendedMessageContentImageBase64ContentBlock,
7979
AppendedMessageContentToolCallContentBlock,
8080
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/evaluation_create_params.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"EvaluationCreateParams",
1212
"AppendedMessage",
1313
"AppendedMessageContent",
14-
"AppendedMessageContentTextContentBlock",
14+
"AppendedMessageContentTextContentBlockSchema",
1515
"AppendedMessageContentImageBase64ContentBlock",
1616
"AppendedMessageContentToolCallContentBlock",
1717
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -36,7 +36,7 @@ class EvaluationCreateParams(TypedDict, total=False):
3636
"""How heavily to weigh the evaluation within the prompt."""
3737

3838

39-
class AppendedMessageContentTextContentBlock(TypedDict, total=False):
39+
class AppendedMessageContentTextContentBlockSchema(TypedDict, total=False):
4040
text: Required[str]
4141

4242
type: Required[Literal["TEXT"]]
@@ -88,7 +88,7 @@ class AppendedMessageContentToolResultContentBlock(TypedDict, total=False):
8888

8989

9090
AppendedMessageContent: TypeAlias = Union[
91-
AppendedMessageContentTextContentBlock,
91+
AppendedMessageContentTextContentBlockSchema,
9292
AppendedMessageContentImageBase64ContentBlock,
9393
AppendedMessageContentToolCallContentBlock,
9494
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/evaluation_update_params.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"EvaluationUpdateParams",
1212
"AppendedMessage",
1313
"AppendedMessageContent",
14-
"AppendedMessageContentTextContentBlock",
14+
"AppendedMessageContentTextContentBlockSchema",
1515
"AppendedMessageContentImageBase64ContentBlock",
1616
"AppendedMessageContentToolCallContentBlock",
1717
"AppendedMessageContentToolCallContentBlockToolCall",
@@ -36,7 +36,7 @@ class EvaluationUpdateParams(TypedDict, total=False):
3636
"""How heavily to weigh the evaluation within the prompt."""
3737

3838

39-
class AppendedMessageContentTextContentBlock(TypedDict, total=False):
39+
class AppendedMessageContentTextContentBlockSchema(TypedDict, total=False):
4040
text: Required[str]
4141

4242
type: Required[Literal["TEXT"]]
@@ -88,7 +88,7 @@ class AppendedMessageContentToolResultContentBlock(TypedDict, total=False):
8888

8989

9090
AppendedMessageContent: TypeAlias = Union[
91-
AppendedMessageContentTextContentBlock,
91+
AppendedMessageContentTextContentBlockSchema,
9292
AppendedMessageContentImageBase64ContentBlock,
9393
AppendedMessageContentToolCallContentBlock,
9494
AppendedMessageContentToolResultContentBlock,

src/prompt_foundry_python_sdk/types/model_parameters.py renamed to src/prompt_foundry_python_sdk/types/parameters.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from .._models import BaseModel
77

88
__all__ = [
9-
"ModelParameters",
9+
"Parameters",
1010
"AnthropicModelParameters",
1111
"AnthropicModelParametersParameters",
1212
"AnthropicModelParametersParametersMessage",
@@ -425,4 +425,4 @@ class OpenAIModelParameters(BaseModel):
425425
provider: Literal["openai"]
426426

427427

428-
ModelParameters: TypeAlias = Union[AnthropicModelParameters, OpenAIModelParameters]
428+
Parameters: TypeAlias = Union[AnthropicModelParameters, OpenAIModelParameters]

src/prompt_foundry_python_sdk/types/prompt_configuration.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
"PromptConfiguration",
1313
"Message",
1414
"MessageContent",
15-
"MessageContentTextContentBlock",
15+
"MessageContentTextContentBlockSchema",
1616
"MessageContentImageBase64ContentBlock",
1717
"MessageContentToolCallContentBlock",
1818
"MessageContentToolCallContentBlockToolCall",
@@ -23,7 +23,7 @@
2323
]
2424

2525

26-
class MessageContentTextContentBlock(BaseModel):
26+
class MessageContentTextContentBlockSchema(BaseModel):
2727
text: str
2828

2929
type: Literal["TEXT"]
@@ -76,7 +76,7 @@ class MessageContentToolResultContentBlock(BaseModel):
7676

7777
MessageContent: TypeAlias = Annotated[
7878
Union[
79-
MessageContentTextContentBlock,
79+
MessageContentTextContentBlockSchema,
8080
MessageContentImageBase64ContentBlock,
8181
MessageContentToolCallContentBlock,
8282
MessageContentToolResultContentBlock,
@@ -98,17 +98,17 @@ class Parameters(BaseModel):
9898
max_tokens: Optional[float] = FieldInfo(alias="maxTokens", default=None)
9999
"""Example: 100"""
100100

101-
name: str
102-
"""The name of the model for the provider."""
101+
api_model_name: str = FieldInfo(alias="modelName")
102+
"""Example: "gpt-3.5-turbo" """
103+
104+
api_model_provider: Literal["ANTHROPIC", "OPENAI"] = FieldInfo(alias="modelProvider")
105+
"""The provider of the provided model."""
103106

104107
parallel_tool_calls: bool = FieldInfo(alias="parallelToolCalls")
105108

106109
presence_penalty: float = FieldInfo(alias="presencePenalty")
107110
"""Example: 0"""
108111

109-
provider: Literal["ANTHROPIC", "OPENAI"]
110-
"""The LLM model provider."""
111-
112112
response_format: Literal["JSON", "TEXT"] = FieldInfo(alias="responseFormat")
113113
"""Example: PromptResponseFormat.TEXT"""
114114

0 commit comments

Comments
 (0)