-
Notifications
You must be signed in to change notification settings - Fork 32
/
Copy pathllm-provider-utils.el
772 lines (658 loc) · 34 KB
/
llm-provider-utils.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
;;; llm-provider-utils.el --- Functions to make building providers easier -*- lexical-binding: t; package-lint-main-file: "llm.el"; ; byte-compile-docstring-max-column: 200-*-
;; Copyright (c) 2023-2025 Free Software Foundation, Inc.
;; This program is free software; you can redistribute it and/or
;; modify it under the terms of the GNU General Public License as
;; published by the Free Software Foundation; either version 3 of the
;; License, or (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful, but
;; WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; General Public License for more details.
;;
;; You should have received a copy of the GNU General Public License
;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
;;; Commentary:
;; This file provides functions to help build providers. It should only be used
;; by modules implementing an LLM provider.
;;; Code:
(require 'llm)
(require 'llm-request-plz)
(require 'llm-models)
(require 'seq)
(cl-defstruct llm-standard-provider
"A struct indicating that this is a standard provider.
This is for dispatch purposes, so this contains no actual data.
This represents any provider, regardless of what it implements.
This should not be used outside of this file.")
(cl-defstruct (llm-standard-chat-provider (:include llm-standard-provider))
"A struct for indicating a provider that implements chat.
DEFAULT-CHAT-TEMPERATURE is the default temperature for chats
with the provider. Any `temperature' specified in the chat
prompt will override this. This is optional, and if not set,
when not overridden, the default value chosen by the provider
will be used.
DEFAULT-CHAT-MAX-TOKENS is the default maxmimum number of tokens
for chats with the provider. Any value for `max-tokens'
specified in the chat prompt will override this. This is
optional, and if not set, when not overriden, no maximum will be
specified to the provider.
DEFAULT-CHAT-NON-STANDARD-PARAMS are per-provider params that
will override and `non-standard-params' that are part of the
prompt. This is an alist of parameters, whose name and possible
values will be different for each provider. The overriding here
is on a per-parameter basis, so the final value used in the chat
can be a mix of these default parameters and others in the
prompt.
These values will be set as parameters on the prompt, so changing
values after the initial call in the chat will not have an
effect. New values will have an effect, however."
default-chat-temperature default-chat-max-tokens
default-chat-non-standard-params)
(cl-defstruct (llm-standard-full-provider (:include llm-standard-chat-provider))
"A struct for providers that implements chat and embeddings.")
(cl-defstruct llm-provider-utils-tool-use
"A struct to hold information about a tool use.
ID is a call ID, which is optional.
NAME is the tool name.
ARG is an alist of arguments to their values."
id name args)
;; Methods necessary for both embedding and chat requests.
(cl-defgeneric llm-provider-request-prelude (provider)
"Execute any prelude code necessary before running a request.
PROVIDER is the provider that will be used to make the request.")
(cl-defmethod llm-provider-request-prelude ((_ llm-standard-provider))
"Do nothing for the standard provider."
nil)
(cl-defgeneric llm-provider-headers (provider)
"Return the headers for the PROVIDER.")
(cl-defmethod llm-provider-headers ((_ llm-standard-provider))
"By default, the standard provider has no headers."
nil)
;; Methods for embeddings
(cl-defgeneric llm-provider-embedding-url (provider &optional batch)
"Return the URL for embeddings for the PROVIDER.
BATCH is true if this is a batch request.")
(cl-defgeneric llm-provider-embedding-request (provider string)
"Return the request for the PROVIDER for STRING.")
(cl-defgeneric llm-provider-batch-embeddings-request (provider string-list)
"Return the request for the PROVIDER for STRING-LIST.")
(cl-defgeneric llm-provider-embedding-extract-error (provider response)
"Return an error message from RESPONSE for the PROVIDER.
RESPONSE is a parsed JSON object.
Return nil if there is no error.")
(cl-defmethod llm-provider-embedding-extract-error ((_ llm-standard-full-provider) _)
"By default, the standard provider has no error extractor."
nil)
(cl-defgeneric llm-provider-embedding-extract-result (provider response)
"Return the result from RESPONSE for the PROVIDER.")
(cl-defgeneric llm-provider-batch-embeddings-extract-result (provider response)
"Return the result from RESPONSE for the PROVIDER for a batch request.")
;; Methods for chat
(cl-defgeneric llm-provider-chat-url (provider)
"Return the URL for chat for the PROVIDER.")
(cl-defgeneric llm-provider-chat-streaming-url (provider)
"Return the URL for streaming chat for the PROVIDER.")
(cl-defmethod llm-provider-chat-streaming-url ((provider llm-standard-chat-provider))
"By default, use the same URL as normal chat.
PROVIDER is the standard chat provider that is used to make the
request."
(llm-provider-chat-url provider))
(cl-defgeneric llm-provider-chat-timeout (provider)
"Return the seconds of timeout for PROVIDER.
Return nil for the standard timeout.")
(cl-defmethod llm-provider-chat-timeout ((_ llm-standard-provider))
"By default, the standard provider has the standard timeout."
nil)
(cl-defmethod llm-provider-chat-request :before ((provider llm-standard-chat-provider) prompt _)
"Set PROVIDER default parameters where they do not existe in the PROMPT."
(setf (llm-chat-prompt-temperature prompt)
(or (llm-chat-prompt-temperature prompt)
(llm-standard-chat-provider-default-chat-temperature provider))
(llm-chat-prompt-max-tokens prompt)
(or (llm-chat-prompt-max-tokens prompt)
(llm-standard-chat-provider-default-chat-max-tokens provider))
(llm-chat-prompt-non-standard-params prompt)
;; We need to merge the parameters individually.
;; Lists as values should be turned into vectors.
(mapcar (lambda (c)
(if (listp (cdr c))
(cons (car c) (vconcat (cdr c)))
c))
(seq-union (llm-chat-prompt-non-standard-params prompt)
(llm-standard-chat-provider-default-chat-non-standard-params provider)
(lambda (a b)
(equal (car a) (car b)))))))
(cl-defgeneric llm-provider-chat-request (provider prompt streaming)
"Return the request for the PROVIDER for PROMPT.
STREAMING is true if this is a streaming request.")
(cl-defgeneric llm-provider-chat-extract-error (provider response)
"Return an error message from RESPONSE for the PROVIDER.")
(cl-defmethod llm-provider-chat-extract-error ((_ llm-standard-chat-provider) _)
"By default, the standard provider has no error extractor."
nil)
(cl-defgeneric llm-provider-chat-extract-result (provider response)
"Return the result from RESPONSE for the PROVIDER.")
(cl-defgeneric llm-provider-append-to-prompt (provider prompt result &optional tool-results)
"Append RESULT to PROMPT for the PROVIDER.
PROMPT is the prompt that was already sent to the provider.
TOOL-RESULTS is a list of function results, if any.")
(cl-defmethod llm-provider-append-to-prompt ((_ llm-standard-chat-provider) prompt
result &optional tool-results)
;; By default, we just append to the prompt.
(llm-provider-utils-append-to-prompt prompt result tool-results))
(cl-defgeneric llm-provider-streaming-media-handler (provider msg-receiver fc-receiver err-receiver)
"Define how to handle streaming media for the PROVIDER.
This should return a cons of the media type and an instance that
handle objects of that type.
The handlers defined can call MSG-RECEIVER when they receive part
of a text message for the client (a chat response). If they
receive a function call, they should call FC-RECEIVER with the
function call. If they receive an error, they should call
ERR-RECEIVER with the error message.")
(cl-defmethod llm-provider-streaming-media-handler ((_ llm-standard-chat-provider) _ _ _)
"By default, the standard provider has no streaming media handler."
nil)
;; Methods for chat function calling
(cl-defgeneric llm-provider-extract-tool-uses (provider response)
"Return the tool-uses from RESPONSE for the PROVIDER.
If there are no tool uses, return nil. If there are tool uses, return a
list of `llm-provider-utils-tool-use'.")
(cl-defmethod llm-provider-extract-tool-uses ((_ llm-standard-chat-provider) _)
"By default, the standard provider has no function call extractor."
nil)
(cl-defgeneric llm-provider-populate-tool-uses (provider prompt tool-uses)
"For PROVIDER, in PROMPT, record TOOL-USES.
This is the recording before the function calls were executed, in the prompt.
CALLS are a list of `llm-provider-utils-tool-use'.")
(cl-defgeneric llm-provider-collect-streaming-tool-uses (provider data)
"Transform a list of streaming tool-uses DATA responses.
PROVIDER is the struct specifying the LLM provider and its configuration.
The DATA responses are a list of whatever is sent to the tool
use handler in `llm-provider-streaming-media-handler'. This should
return a list of `llm-chat-prompt-tool-use' structs.")
(cl-defmethod llm-provider-collect-streaming-tool-uses ((_ llm-standard-chat-provider) _)
;; by default, there is no function calling
nil)
;; Standard provider implementations of llm functionality
(cl-defmethod llm-embedding ((provider llm-standard-full-provider) string)
(llm-provider-request-prelude provider)
(let ((response (llm-request-plz-sync
(llm-provider-embedding-url provider nil)
:timeout (llm-provider-chat-timeout provider)
:headers (llm-provider-headers provider)
:data (llm-provider-embedding-request provider string))))
(if-let ((err-msg (llm-provider-embedding-extract-error provider response)))
(error err-msg)
(llm-provider-embedding-extract-result provider response))))
(cl-defmethod llm-embedding-async ((provider llm-standard-full-provider) string vector-callback error-callback)
(llm-provider-request-prelude provider)
(let ((buf (current-buffer)))
(llm-request-plz-async
(llm-provider-embedding-url provider nil)
:headers (llm-provider-headers provider)
:data (llm-provider-embedding-request provider string)
:on-success (lambda (data)
(if-let ((err-msg (llm-provider-embedding-extract-error provider data)))
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
err-msg)
(llm-provider-utils-callback-in-buffer
buf vector-callback
(llm-provider-embedding-extract-result provider data))))
:on-error (lambda (_ data)
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
(if (stringp data)
data
(or (llm-provider-embedding-extract-error
provider data)
"Unknown error")))))))
(cl-defmethod llm-batch-embeddings ((provider llm-standard-full-provider) string-list)
(llm-provider-request-prelude provider)
(let ((response (llm-request-plz-sync
(llm-provider-embedding-url provider t)
:timeout (llm-provider-chat-timeout provider)
:headers (llm-provider-headers provider)
:data (llm-provider-batch-embeddings-request provider string-list))))
(if-let ((err-msg (llm-provider-embedding-extract-error provider response)))
(error err-msg)
(llm-provider-batch-embeddings-extract-result provider response))))
(cl-defmethod llm-batch-embeddings-async ((provider llm-standard-full-provider) string-list vector-callback error-callback)
(llm-provider-request-prelude provider)
(let ((buf (current-buffer)))
(llm-request-plz-async
(llm-provider-embedding-url provider t)
:headers (llm-provider-headers provider)
:data (llm-provider-batch-embeddings-request provider string-list)
:on-success (lambda (data)
(if-let ((err-msg (llm-provider-embedding-extract-error provider data)))
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
err-msg)
(llm-provider-utils-callback-in-buffer
buf vector-callback
(llm-provider-embedding-extract-result provider data))))
:on-error (lambda (_ data)
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
(if (stringp data)
data
(or (llm-provider-embedding-extract-error
provider data)
"Unknown error")))))))
(cl-defmethod llm-chat ((provider llm-standard-chat-provider) prompt)
(llm-provider-request-prelude provider)
(let ((response (llm-request-plz-sync (llm-provider-chat-url provider)
:headers (llm-provider-headers provider)
:data (llm-provider-chat-request provider prompt nil)))
(final-result nil))
(if-let ((err-msg (llm-provider-chat-extract-error provider response)))
(error err-msg)
(llm-provider-utils-process-result provider prompt
(llm-provider-chat-extract-result
provider response)
(llm-provider-extract-tool-uses
provider response)
(lambda (result)
(setq final-result result))))
;; In most cases, final-result will be available immediately. However, when
;; executing tools, we need to wait for their callbacks, and only after
;; those are called with this be ready.
(while (not final-result)
(sleep-for 0.1))
final-result))
(cl-defmethod llm-chat-async ((provider llm-standard-chat-provider) prompt success-callback
error-callback)
(llm-provider-request-prelude provider)
(let ((buf (current-buffer)))
(llm-request-plz-async
(llm-provider-chat-url provider)
:headers (llm-provider-headers provider)
:data (llm-provider-chat-request provider prompt nil)
:on-success (lambda (data)
(if-let ((err-msg (llm-provider-chat-extract-error provider data)))
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
err-msg)
(llm-provider-utils-process-result
provider prompt
(llm-provider-chat-extract-result provider data)
(llm-provider-extract-tool-uses provider data)
(lambda (result)
(llm-provider-utils-callback-in-buffer
buf success-callback result)))))
:on-error (lambda (_ data)
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
(if (stringp data)
data
(or (llm-provider-chat-extract-error
provider data)
"Unknown error")))))))
(cl-defmethod llm-chat-streaming ((provider llm-standard-chat-provider) prompt partial-callback
response-callback error-callback)
(llm-provider-request-prelude provider)
(let ((buf (current-buffer))
(current-text "")
(fc nil))
(llm-request-plz-async
(llm-provider-chat-streaming-url provider)
:headers (llm-provider-headers provider)
:data (llm-provider-chat-request provider prompt t)
:media-type (llm-provider-streaming-media-handler
provider
(lambda (s)
(when (> (length s) 0)
(setq current-text
(concat current-text s))
(when partial-callback
(llm-provider-utils-callback-in-buffer
buf partial-callback current-text))))
(lambda (fc-new) (push fc-new fc))
(lambda (err)
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
err)))
:on-success
(lambda (_)
;; We don't need the data at the end of streaming, so we can ignore it.
(llm-provider-utils-process-result
provider prompt
current-text
(when fc
(llm-provider-collect-streaming-tool-uses
provider (nreverse fc)))
(lambda (result)
(llm-provider-utils-callback-in-buffer
buf response-callback result))))
:on-error (lambda (_ data)
(llm-provider-utils-callback-in-buffer
buf error-callback 'error
(if (stringp data)
data
(or (llm-provider-chat-extract-error
provider data)
"Unknown error")))))))
(defun llm-provider-utils-get-system-prompt (prompt &optional example-prelude)
"From PROMPT, turn the context and examples into a string.
EXAMPLE-PRELUDE is a string to prepend to the examples."
(concat
(llm-chat-prompt-context prompt)
(when (llm-chat-prompt-context prompt) "\n")
(when (llm-chat-prompt-examples prompt) (or example-prelude
(concat
(if (= (length (llm-chat-prompt-examples prompt)) 1)
"Here is an example"
(format "Here are %d examples"
(length (llm-chat-prompt-examples prompt))))
" of how to respond:\n")))
(when (llm-chat-prompt-examples prompt) "\n")
(mapconcat (lambda (example)
(format "User: %s\nAssistant: %s"
(car example)
(cdr example)))
(llm-chat-prompt-examples prompt) "\n")))
(defun llm-provider-utils-combine-to-system-prompt (prompt &optional example-prelude)
"Add context and examples to a system prompt in PROMPT.
This should be used for providers that have a notion of a system prompt.
If there is a system prompt, and no assistant response, add to it.
If there is no system prompt, create one.
If there is an assistance response, do nothing.
EXAMPLE-PRELUDE is the text to introduce any examples with."
(let ((system-prompt (seq-find
(lambda (interaction)
(eq (llm-chat-prompt-interaction-role interaction) 'system))
(llm-chat-prompt-interactions prompt)))
(system-content (llm-provider-utils-get-system-prompt prompt example-prelude)))
(when (and system-content (> (length system-content) 0))
(if system-prompt
(setf (llm-chat-prompt-interaction-content system-prompt)
(concat (llm-chat-prompt-interaction-content system-prompt)
"\n"
system-content))
(push (make-llm-chat-prompt-interaction
:role 'system
:content system-content)
(llm-chat-prompt-interactions prompt))
(setf (llm-chat-prompt-context prompt) nil
(llm-chat-prompt-examples prompt) nil)))))
(defun llm-provider-utils-combine-to-user-prompt (prompt &optional example-prelude)
"Add context and examples to a user prompt in PROMPT.
This should be used for providers that do not have a notion of a system prompt.
EXAMPLE-PRELUDE is the text to introduce any examples with."
(let ((system-content (llm-provider-utils-get-system-prompt prompt example-prelude)))
(when (> (length system-content) 0)
(setf (llm-chat-prompt-interaction-content (car (llm-chat-prompt-interactions prompt)))
(let ((initial-content (llm-chat-prompt-interaction-content (car (llm-chat-prompt-interactions prompt)))))
(if (llm-multipart-p initial-content)
(make-llm-multipart
:parts (cons system-content
(llm-multipart-parts initial-content)))
(concat system-content
"\n"
initial-content)))
(llm-chat-prompt-context prompt) nil
(llm-chat-prompt-examples prompt) nil))))
(defun llm-provider-utils-collapse-history (prompt &optional history-prelude)
"Collapse history to a single PROMPT.
This is useful for providers that cannot handle conversations.
Essentially it's a way to fake conversation. aution: tokens will
eventually run out, though, so this isn't a sustainable way to do
things. Providers should probably issue a warning when using this.
HISTORY-PRELUDE is the text to use to tell the LLM that
conversation history will follow."
(when (> (length (llm-chat-prompt-interactions prompt)) 1)
(setf (llm-chat-prompt-interactions prompt)
(list (make-llm-chat-prompt-interaction
:role 'user
:content
(concat (or history-prelude "Previous interactions:") "\n\n"
(mapconcat (lambda (interaction)
(format "%s: %s" (pcase (llm-chat-prompt-interaction-role interaction)
('user "User")
('assistant "Assistant"))
(llm-chat-prompt-interaction-content interaction)))
(butlast (llm-chat-prompt-interactions prompt)) "\n")
"\n\nThe current conversation follows:\n\n"
(llm-chat-prompt-interaction-content (car (last (llm-chat-prompt-interactions prompt))))))))))
(defun llm-provider-utils-model-token-limit (model &optional default)
"Return the token limit for MODEL.
If MODEL cannot be found, warn and return DEFAULT, which by default is 4096."
(let ((model (llm-models-match model)))
(if model
(llm-model-context-length model)
(warn "No model predefined for model %s, using restrictive defaults" model)
(or default 4096))))
(defun llm-provider-utils--encolon (s)
"Turn S into a symbol preceded by a colon."
(intern (format ":%s" s)))
(defun llm-provider-utils-non-standard-params-plist (prompt)
"Return non-standard-paramters from PROMPT as a plist."
(mapcan (lambda (pcons) (list (llm-provider-utils--encolon (car pcons))
(cdr pcons)))
(llm-chat-prompt-non-standard-params prompt)))
(defun llm-provider-utils--decolon (sym)
"Remove a colon from the beginnging of SYM."
(let ((s (symbol-name sym)))
(if (string-prefix-p ":" s)
(intern (substring s 1))
sym)))
(defun llm-provider-utils-convert-to-serializable (plist)
"Convert PLIST to a serializable form.
The expectation is that any symbol values will be converted to strings
for plist and any nested plists."
(mapcan (lambda (elem-pair)
(cond ((symbolp (nth 1 elem-pair))
(list (car elem-pair)
(symbol-name (nth 1 elem-pair))))
((consp (nth 1 elem-pair))
(list (car elem-pair)
(llm-provider-utils-convert-to-serializable (nth 1 elem-pair))))
(t elem-pair)))
(seq-partition plist 2)))
(defun llm-provider-utils-openai-arguments (args)
"Convert ARGS to the OpenAI function calling spec.
ARGS is a list of llm argument plists.
Each plist has the structure:
(:name STRING
:type SYMBOL
:description STRING
:optional BOOLEAN
:properties PLIST
:enum VECTOR
:items (PLIST :type SYMBOL :enum VECTOR :properties PLIST))
:type is a symbol, one of `string', `number', `boolean', `object', or
`array'."
(let ((properties '())
(required-names '()))
(dolist (arg args)
(let* ((arg-name (plist-get arg :name))
(type (symbol-name (plist-get arg :type)))
(description (plist-get arg :description))
(required (not (plist-get arg :optional)))
(enum (plist-get arg :enum))
(items (plist-get arg :items))
(obj-properties (llm-provider-utils-convert-to-serializable
(plist-get arg :properties)))
(schema (list :type type)))
;; Add :description if present
(when description
(setq schema (plist-put schema :description description)))
;; Add :enum if present
(when enum
;; Vectors generally serialize nicely to JSON arrays, but a list is fine too.
(setq schema (plist-put schema :enum enum)))
(when items
(setq schema (plist-put schema
:items
(llm-provider-utils-convert-to-serializable items))))
(when obj-properties
(setq schema (plist-put schema :properties obj-properties)))
;; Track required argument names if :required is t
(when required
(push (if (symbolp arg-name)
(symbol-name arg-name)
arg-name) required-names))
;; Finally, put this schema into the :properties
(setq properties
(plist-put properties (llm-provider-utils--encolon arg-name)
schema))))
;; Build the final spec
(let ((spec `(:type "object" :properties ,properties)))
(when required-names
(setq spec (plist-put spec :required (apply #'vector
(nreverse required-names)))))
spec)))
(cl-defgeneric llm-provider-utils-openai-tool-spec (tool)
"Convert TOOL to an Open AI function spec.")
;; The Open AI tool spec follows the JSON schema spec. See
;; https://json-schema.org/understanding-json-schema.
(cl-defmethod llm-provider-utils-openai-tool-spec ((tool llm-tool))
"Convert TOOL to an Open AI function spec.
Open AI's function spec is a standard way to do this, and will be
applicable to many endpoints.
This returns a JSON object (a list that can be converted to JSON)."
`(:type "function"
:function
(:name ,(llm-tool-name tool)
:description ,(llm-tool-description tool)
:parameters ,(llm-provider-utils-openai-arguments
(llm-tool-args tool)))))
(defun llm-provider-utils-openai-collect-streaming-tool-uses (data)
"Read Open AI compatible streaming output DATA to collect tool-uses."
(let* ((num-index (+ 1 (assoc-default 'index (aref (car (last data)) 0))))
(cvec (make-vector num-index nil)))
(dotimes (i num-index)
(setf (aref cvec i) (make-llm-provider-utils-tool-use)))
(cl-loop for part in data do
(cl-loop for call in (append part nil) do
(let* ((index (assoc-default 'index call))
(id (assoc-default 'id call))
(function (assoc-default 'function call))
(name (assoc-default 'name function))
(arguments (assoc-default 'arguments function)))
(when id
(setf (llm-provider-utils-tool-use-id (aref cvec index)) id))
(when name
(setf (llm-provider-utils-tool-use-name (aref cvec index)) name))
(setf (llm-provider-utils-tool-use-args (aref cvec index))
(concat (llm-provider-utils-tool-use-args (aref cvec index))
arguments)))))
(cl-loop for call in (append cvec nil)
do (setf (llm-provider-utils-tool-use-args call)
(json-parse-string (llm-provider-utils-tool-use-args call)
:object-type 'alist))
finally return (when (> (length cvec) 0)
(append cvec nil)))))
(defun llm-provider-utils-append-to-prompt (prompt output &optional tool-results role)
"Append OUTPUT to PROMPT as an assistant interaction.
OUTPUT can be a string or a structure in the case of function calls.
TOOL-RESULTS is a list of results from the LLM output, if any.
ROLE will be `assistant' by default, but can be passed in for other roles."
(setf (llm-chat-prompt-interactions prompt)
(append (llm-chat-prompt-interactions prompt)
(list (make-llm-chat-prompt-interaction
:role (or role
(if tool-results 'tool-results 'assistant))
;; If it is a structure, it will get converted to JSON,
;; otherwise make sure it is a string. For tool uses, we
;; want it to be nil.
:content (if (or (not output)
(and (not (stringp output))
(not tool-results)))
output
(format "%s" output))
:tool-results tool-results)))))
(defun llm-provider-utils-process-result (provider prompt text tool-uses success-callback)
"Process the RESPONSE from the provider for PROMPT.
This execute function calls if there are any, does any result
appending to the prompt, and returns an appropriate response for
the client.
PROVIDER is the struct that configures the use of the LLM.
TOOL-USES is a list of tool uses in the result.
TEXT is the text output from the provider, if any. There should
be either FUNCALLS or TEXT.
SUCCESS-CALLBACK is the callback that will be run when all functions
complete."
(if tool-uses
;; If we have tool uses, execute them, and on the callback, we will
;; populate the results. We don't execute the callback here because it
;; will be done inside `llm-provider-utils-execute-tool-uses'.
(llm-provider-utils-execute-tool-uses
provider prompt tool-uses success-callback)
;; We probably shouldn't be called if text is nil, but if we do,
;; we shouldn't add something invalid to the prompt.
(when text
(llm-provider-append-to-prompt provider prompt text))
(funcall success-callback text)))
(defun llm-provider-utils-populate-tool-uses (provider prompt results-alist)
"Append the results in RESULTS-ALIST to the prompt.
PROMPT is the prompt to populate into.
RESULTS-ALIST is a list of cons of function
calls (`llm-provider-utils-tool-use' structs) and their
results.
PROVIDER is the struct that configures the user of the LLM."
(llm-provider-append-to-prompt
provider prompt nil
(mapcar (lambda (c) (make-llm-chat-prompt-tool-result
:call-id (llm-provider-utils-tool-use-id (car c))
:tool-name (llm-provider-utils-tool-use-name (car c))
:result (cdr c)))
results-alist)))
(defun llm-provider-utils-execute-tool-uses (provider prompt tool-uses success-callback)
"Execute TOOL-USES, a list of `llm-provider-utils-tool-use'.
A response suitable for returning to the client will be returned.
PROVIDER is the provider that supplied the response.
PROMPT was the prompt given to the provider, which will get
updated with the response from the LLM, and if there is a
function call, the result.
SUCCESS-CALLBACK is the callback that will be run when all functions
have returned results."
(llm-provider-populate-tool-uses provider prompt tool-uses)
(let (results tool-use-and-results)
(cl-loop
for tool-use in tool-uses do
(let* ((name (llm-provider-utils-tool-use-name tool-use))
(arguments (llm-provider-utils-tool-use-args tool-use))
(tool (seq-find
(lambda (f) (equal name (llm-tool-name f)))
(llm-chat-prompt-tools prompt)))
(call-args (cl-loop for arg in (llm-tool-args tool)
collect (cdr (seq-find (lambda (a)
(eq (intern (plist-get arg :name))
(car a)))
arguments))))
(end-func (lambda (result)
(llm--log
'api-funcall
:provider provider
:msg (format "%s --> %s"
(format "%S" (cons name call-args))
(format "%s" result)))
(push (cons name result) tool-use-and-results)
(push (cons tool-use result) results)
(when (= (length results) (length tool-uses))
(llm-provider-utils-populate-tool-uses
provider prompt results)
(funcall success-callback tool-use-and-results)))))
(if (llm-tool-async tool)
(apply (llm-tool-function tool)
(append (list end-func) call-args))
(funcall end-func (apply (llm-tool-function tool) call-args)))))))
;; This is a useful method for getting out of the request buffer when it's time
;; to make callbacks.
(defun llm-provider-utils-callback-in-buffer (buf f &rest args)
"Run F with ARGS in the context of BUF.
But if BUF has been killed, use a temporary buffer instead.
If F is nil, nothing is done."
(when f
(if (buffer-live-p buf)
(with-current-buffer buf (apply f args))
(with-temp-buffer (apply f args)))))
(defun llm-provider-utils-json-val (val)
"Return VAL if it is not nil, otherwise return nil."
(when (and val (not (eq val :null)))
val))
(provide 'llm-provider-utils)
;;; llm-provider-utils.el ends here