-
Notifications
You must be signed in to change notification settings - Fork 32
/
Copy pathllm-provider-utils-test.el
142 lines (129 loc) · 7.84 KB
/
llm-provider-utils-test.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
;;; llm-provider-utils-test.el --- Tests for llm-provider-utils -*- lexical-binding: t; package-lint-main-file: "llm.el"; -*-
;; Copyright (c) 2023-2025 Free Software Foundation, Inc.
;; This program is free software; you can redistribute it and/or
;; modify it under the terms of the GNU General Public License as
;; published by the Free Software Foundation; either version 3 of the
;; License, or (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful, but
;; WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;; General Public License for more details.
;;
;; You should have received a copy of the GNU General Public License
;; along with GNU Emacs. If not, see <http://www.gnu.org/licenses/>.
;;; Commentary:
;; This file provides functions to help build providers. It should only be used
;; by modules implementing an LLM provider.
;;; Code:
(require 'cl-macs)
(require 'llm-provider-utils)
(ert-deftest llm-provider-utils-openai-arguments ()
(let* ((args
(list
;; A required string arg
'(:name "location"
:type string
:description "The city and state, e.g. San Francisco, CA")
;; A string arg with an name
'(:name "unit"
:type string
:description "The unit of temperature, either 'celsius' or 'fahrenheit'"
:enum ["celsius" "fahrenheit"]
:optional t)
'(:name "postal_codes"
:type array
:description "Specific postal codes"
:items (:type string)
:optional t)))
(result (llm-provider-utils-openai-arguments args))
(expected
'(:type "object"
:properties
(:location
(:type "string"
:description "The city and state, e.g. San Francisco, CA")
:unit
(:type "string"
:description "The unit of temperature, either 'celsius' or 'fahrenheit'"
:enum ["celsius" "fahrenheit"])
:postal_codes (:type "array"
:description "Specific postal codes"
:items (:type "string")))
:required ["location"])))
(should (equal result expected))))
(ert-deftest llm-provider-utils-convert-to-serializable ()
(should (equal (llm-provider-utils-convert-to-serializable '(:a 1 :b 2))
'(:a 1 :b 2)))
(should (equal (llm-provider-utils-convert-to-serializable '(:a "1" :b foo))
'(:a "1" :b "foo")))
(should (equal (llm-provider-utils-convert-to-serializable '(:inner '(:a foo :b bar)))
'(:inner '(:a "foo" :b "bar")))))
(ert-deftest llm-provider-utils-combine-to-system-prompt ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
(example1 (cons "Request 1" "Response 1"))
(example2 (cons "Request 2" "Response 2"))
(prompt-for-first-request
(make-llm-chat-prompt
:context "Example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1))
:examples (list example1 example2)))
(prompt-with-existing-system-prompt
(make-llm-chat-prompt
:context "Example context"
:interactions (list
(make-llm-chat-prompt-interaction :role 'system :content "Existing system prompt.")
(copy-llm-chat-prompt-interaction interaction1))
:examples (list example1 example2))))
(llm-provider-utils-combine-to-system-prompt prompt-for-first-request)
(should (= 2 (length (llm-chat-prompt-interactions prompt-for-first-request))))
(should (equal "Example context\nHere are 2 examples of how to respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: Response 2"
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-first-request)))))
(should (equal "Hello" (llm-chat-prompt-interaction-content (nth 1 (llm-chat-prompt-interactions prompt-for-first-request)))))
(should-not (llm-chat-prompt-context prompt-for-first-request))
(should-not (llm-chat-prompt-examples prompt-for-first-request))
;; On the request with the existing system prompt, it should append the new
;; text to the existing system prompt.
(llm-provider-utils-combine-to-system-prompt prompt-with-existing-system-prompt)
(should (= 2 (length (llm-chat-prompt-interactions prompt-with-existing-system-prompt))))
(should (equal "Existing system prompt.\nExample context\nHere are 2 examples of how to respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: Response 2"
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-with-existing-system-prompt)))))))
(ert-deftest llm-provider-utils-combine-to-user-prompt ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
(example1 (cons "Request 1" "Response 1"))
(example2 (cons "Request 2" "Response 2"))
(prompt-for-first-request
(make-llm-chat-prompt
:context "Example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1))
:examples (list example1 example2))))
;; In the first request, the system prompt should be prepended to the user request.
(llm-provider-utils-combine-to-user-prompt prompt-for-first-request)
(should (= 1 (length (llm-chat-prompt-interactions prompt-for-first-request))))
(should-not (llm-chat-prompt-context prompt-for-first-request))
(should-not (llm-chat-prompt-examples prompt-for-first-request))
(should (equal "Example context\nHere are 2 examples of how to respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: Response 2\nHello"
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-first-request)))))))
(ert-deftest llm-provider-utils-collapse-history ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
(interaction2 (make-llm-chat-prompt-interaction :role 'assistant :content "Hi! How can I assist you?"))
(interaction3 (make-llm-chat-prompt-interaction :role 'assistant :content "Earl Grey, hot."))
(prompt-for-first-request
(make-llm-chat-prompt
:interactions (list (copy-llm-chat-prompt-interaction interaction1))))
(prompt-for-second-request
(make-llm-chat-prompt
:interactions (list (copy-llm-chat-prompt-interaction interaction1)
(copy-llm-chat-prompt-interaction interaction2)
(copy-llm-chat-prompt-interaction interaction3)))))
;; In the first request, there's no history, so nothing should be done.
(llm-provider-utils-collapse-history prompt-for-first-request)
(should (= 1 (length (llm-chat-prompt-interactions prompt-for-first-request))))
(should (equal interaction1 (nth 0 (llm-chat-prompt-interactions prompt-for-first-request))))
;; In the second request we should have the history prepended.
(llm-provider-utils-collapse-history prompt-for-second-request)
(should (= 1 (length (llm-chat-prompt-interactions prompt-for-first-request))))
(should (equal "Previous interactions:\n\nUser: Hello\nAssistant: Hi! How can I assist you?\n\nThe current conversation follows:\n\nEarl Grey, hot."
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-second-request)))))))
(provide 'llm-provider-utils-test)
;;; llm-provider-utils-test.el ends here