Skip to content

Commit 299fd1b

Browse files
feat(api): update via SDK Studio
1 parent 9a45427 commit 299fd1b

File tree

2 files changed

+38
-13
lines changed

2 files changed

+38
-13
lines changed

.stats.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 67
22
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradientai-e40feaac59c85aace6aa42d2749b20e0955dbbae58b06c3a650bc03adafcd7b5.yml
33
openapi_spec_hash: 825c1a4816938e9f594b7a8c06692667
4-
config_hash: a5bfbbd032355b26ddd41d659c93495b
4+
config_hash: fc55dd4870b7f5b1f319fffe9a0c5b74

README.md

Lines changed: 37 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -31,10 +31,16 @@ client = GradientAI(
3131
api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted
3232
)
3333

34-
versions = client.agents.versions.list(
35-
uuid="REPLACE_ME",
34+
completion = client.chat.completions.create(
35+
messages=[
36+
{
37+
"content": "string",
38+
"role": "system",
39+
}
40+
],
41+
model="llama3-8b-instruct",
3642
)
37-
print(versions.agent_versions)
43+
print(completion.id)
3844
```
3945

4046
While you can provide an `api_key` keyword argument,
@@ -57,10 +63,16 @@ client = AsyncGradientAI(
5763

5864

5965
async def main() -> None:
60-
versions = await client.agents.versions.list(
61-
uuid="REPLACE_ME",
66+
completion = await client.chat.completions.create(
67+
messages=[
68+
{
69+
"content": "string",
70+
"role": "system",
71+
}
72+
],
73+
model="llama3-8b-instruct",
6274
)
63-
print(versions.agent_versions)
75+
print(completion.id)
6476

6577

6678
asyncio.run(main())
@@ -93,10 +105,16 @@ async def main() -> None:
93105
api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted
94106
http_client=DefaultAioHttpClient(),
95107
) as client:
96-
versions = await client.agents.versions.list(
97-
uuid="REPLACE_ME",
108+
completion = await client.chat.completions.create(
109+
messages=[
110+
{
111+
"content": "string",
112+
"role": "system",
113+
}
114+
],
115+
model="llama3-8b-instruct",
98116
)
99-
print(versions.agent_versions)
117+
print(completion.id)
100118

101119

102120
asyncio.run(main())
@@ -120,10 +138,17 @@ from do_gradientai import GradientAI
120138

121139
client = GradientAI()
122140

123-
evaluation_test_case = client.agents.evaluation_test_cases.create(
124-
star_metric={},
141+
completion = client.chat.completions.create(
142+
messages=[
143+
{
144+
"content": "string",
145+
"role": "system",
146+
}
147+
],
148+
model="llama3-8b-instruct",
149+
stream_options={},
125150
)
126-
print(evaluation_test_case.star_metric)
151+
print(completion.stream_options)
127152
```
128153

129154
## Handling errors

0 commit comments

Comments
 (0)