@@ -31,10 +31,16 @@ client = GradientAI(
3131 api_key = os.environ.get(" GRADIENTAI_API_KEY" ), # This is the default and can be omitted
3232)
3333
34- versions = client.agents.versions.list(
35- uuid = " REPLACE_ME" ,
34+ completion = client.chat.completions.create(
35+ messages = [
36+ {
37+ " content" : " string" ,
38+ " role" : " system" ,
39+ }
40+ ],
41+ model = " llama3-8b-instruct" ,
3642)
37- print (versions.agent_versions )
43+ print (completion.id )
3844```
3945
4046While you can provide an ` api_key ` keyword argument,
@@ -57,10 +63,16 @@ client = AsyncGradientAI(
5763
5864
5965async def main () -> None :
60- versions = await client.agents.versions.list(
61- uuid = " REPLACE_ME" ,
66+ completion = await client.chat.completions.create(
67+ messages = [
68+ {
69+ " content" : " string" ,
70+ " role" : " system" ,
71+ }
72+ ],
73+ model = " llama3-8b-instruct" ,
6274 )
63- print (versions.agent_versions )
75+ print (completion.id )
6476
6577
6678asyncio.run(main())
@@ -93,10 +105,16 @@ async def main() -> None:
93105 api_key = os.environ.get(" GRADIENTAI_API_KEY" ), # This is the default and can be omitted
94106 http_client = DefaultAioHttpClient(),
95107 ) as client:
96- versions = await client.agents.versions.list(
97- uuid = " REPLACE_ME" ,
108+ completion = await client.chat.completions.create(
109+ messages = [
110+ {
111+ " content" : " string" ,
112+ " role" : " system" ,
113+ }
114+ ],
115+ model = " llama3-8b-instruct" ,
98116 )
99- print (versions.agent_versions )
117+ print (completion.id )
100118
101119
102120asyncio.run(main())
@@ -120,10 +138,17 @@ from do_gradientai import GradientAI
120138
121139client = GradientAI()
122140
123- evaluation_test_case = client.agents.evaluation_test_cases.create(
124- star_metric = {},
141+ completion = client.chat.completions.create(
142+ messages = [
143+ {
144+ " content" : " string" ,
145+ " role" : " system" ,
146+ }
147+ ],
148+ model = " llama3-8b-instruct" ,
149+ stream_options = {},
125150)
126- print (evaluation_test_case.star_metric )
151+ print (completion.stream_options )
127152```
128153
129154## Handling errors
0 commit comments