-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathLLMAPIChat.class.st
More file actions
114 lines (96 loc) · 2.54 KB
/
LLMAPIChat.class.st
File metadata and controls
114 lines (96 loc) · 2.54 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
"
# Example with ollama
```st
api := LLMAPI chat.
api host: '127.0.0.1'.
api port: 11434.
api apiKey: nil.
api payload
temperature: 0.5;
model: 'devstral';
top_p: 1;
max_tokens: 250;
messages: {
LLMAPIChatObjectMessage role: 'system' content: 'You are a usefull assistant'.
LLMAPIChatObjectMessage role: 'user' content: 'How to write hello world in Pharo?'.
}.
result := api performRequest.
```
"
Class {
#name : 'LLMAPIChat',
#superclass : 'LLMAPI',
#classInstVars : [
'chatModel'
],
#category : 'LLM-API',
#package : 'LLM-API'
}
{ #category : 'accessing' }
LLMAPIChat class >> chatModel [
^ chatModel
]
{ #category : 'accessing' }
LLMAPIChat class >> chatModel: anObject [
chatModel := anObject
]
{ #category : 'as yet unclassified' }
LLMAPIChat class >> llmSettingOn: aBuilder [
<systemsettings>
(aBuilder group: #LLMChat)
parent: super settingName;
name: 'LLM Chat';
with: [
(aBuilder setting: #chatModel)
order: 0;
label: 'LLM Chat default model';
target: self;
default: 'devstral:latest';
ghostHelp: 'devstral:latest' ]
]
{ #category : 'initialization' }
LLMAPIChat >> initialize [
super initialize.
self payload model: self class chatModel
]
{ #category : 'accessing' }
LLMAPIChat >> path [
^ 'v1/chat/completions'
]
{ #category : 'accessing' }
LLMAPIChat >> payloadClass [
^ LLMAPIChatObjectPayload
]
{ #category : 'api' }
LLMAPIChat >> performRequest [
| intermediateResult |
self prepareRequest.
self client entity: (ZnEntity json:
(NeoJSONWriter toString: self content asJsonableObject)).
intermediateResult := self client post.
^ self processIntermediateResult: intermediateResult
]
{ #category : 'accessing' }
LLMAPIChat >> processIntermediateResult: intermediateResult [
| message messages |
message := intermediateResult choices anyOne message.
messages := self payload messages asOrderedCollection.
messages addLast: (LLMAPIChatObjectMessage fromMessage: message).
message at: #tool_calls ifPresent: [ :tool_calls |
tool_calls ifNotNil: [
tool_calls do: [ :tool_call |
| tool answer |
tool := self payload tools detect: [ :t |
t name = tool_call function name ].
answer := tool executeWithArguments:
(NeoJSONReader fromString: tool_call function arguments)
asArray.
messages addLast:
((LLMAPIChatObjectMessage role: 'tool' content: answer asString)
name: tool name;
tool_call_id: tool_call id;
yourself).
self payload messages: messages asArray ].
^ self performRequest ] ].
^ message content
]