Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
sagemathinc
GitHub Repository: sagemathinc/cocalc
Path: blob/master/src/packages/frontend/account/user-defined-llm.tsx
1503 views
1
import {
2
Alert,
3
Button,
4
Flex,
5
Form,
6
Input,
7
List,
8
Modal,
9
Popconfirm,
10
Select,
11
Skeleton,
12
Space,
13
Tooltip,
14
} from "antd";
15
import { useWatch } from "antd/es/form/Form";
16
import { sortBy } from "lodash";
17
import { FormattedMessage, useIntl } from "react-intl";
18
19
import {
20
useEffect,
21
useState,
22
useTypedRedux,
23
} from "@cocalc/frontend/app-framework";
24
import {
25
A,
26
HelpIcon,
27
Icon,
28
RawPrompt,
29
Text,
30
Title,
31
} from "@cocalc/frontend/components";
32
import { LanguageModelVendorAvatar } from "@cocalc/frontend/components/language-model-icon";
33
import { webapp_client } from "@cocalc/frontend/webapp-client";
34
import { OTHER_SETTINGS_USERDEFINED_LLM as KEY } from "@cocalc/util/db-schema/defaults";
35
import {
36
LLM_PROVIDER,
37
SERVICES,
38
UserDefinedLLM,
39
UserDefinedLLMService,
40
isLLMServiceName,
41
toUserLLMModelName,
42
} from "@cocalc/util/db-schema/llm-utils";
43
import { trunc, unreachable } from "@cocalc/util/misc";
44
45
// @cspell:ignore mixtral userdefined
46
47
interface Props {
48
on_change: (name: string, value: any) => void;
49
}
50
51
export function UserDefinedLLMComponent({ on_change }: Props) {
52
const intl = useIntl();
53
const user_defined_llm = useTypedRedux("customize", "user_defined_llm");
54
const other_settings = useTypedRedux("account", "other_settings");
55
const [form] = Form.useForm();
56
const [editLLM, setEditLLM] = useState<UserDefinedLLM | null>(null);
57
const [tmpLLM, setTmpLLM] = useState<UserDefinedLLM | null>(null);
58
const [loading, setLoading] = useState(false);
59
const [llms, setLLMs] = useState<UserDefinedLLM[]>([]);
60
const [error, setError] = useState<string | null>(null);
61
62
const [needAPIKey, setNeedAPIKey] = useState(false);
63
const [needEndpoint, setNeedEndpoint] = useState(false);
64
65
const service: UserDefinedLLMService = useWatch("service", form);
66
useEffect(() => {
67
const v = service === "custom_openai" || service === "ollama";
68
setNeedAPIKey(!v);
69
setNeedEndpoint(v);
70
}, [service]);
71
72
useEffect(() => {
73
setLoading(true);
74
const val = other_settings?.get(KEY) ?? "[]";
75
try {
76
const data: UserDefinedLLM[] = JSON.parse(val);
77
setLLMs(sortBy(data, "id"));
78
} catch (e) {
79
setError(`Error parsing custom LLMs: ${e}`);
80
setLLMs([]);
81
}
82
setLoading(false);
83
}, [other_settings?.get(KEY)]);
84
85
useEffect(() => {
86
if (editLLM != null) {
87
form.setFieldsValue(editLLM);
88
} else {
89
form.resetFields();
90
}
91
}, [editLLM]);
92
93
function getNextID(): number {
94
let id = 0;
95
llms.forEach((m) => (m.id > id ? (id = m.id) : null));
96
return id + 1;
97
}
98
99
function save(next: UserDefinedLLM, oldID: number) {
100
// trim each field in next
101
for (const key in next) {
102
if (typeof next[key] === "string") {
103
next[key] = next[key].trim();
104
}
105
}
106
// set id if not set
107
next.id ??= getNextID();
108
109
const { service, display, model, endpoint } = next;
110
if (
111
!display ||
112
!model ||
113
(needEndpoint && !endpoint) ||
114
(needAPIKey && !next.apiKey)
115
) {
116
setError("Please fill all fields – click the add button and fix it!");
117
return;
118
}
119
if (!SERVICES.includes(service as any)) {
120
setError(`Invalid service: ${service}`);
121
return;
122
}
123
try {
124
// replace an entry with the same ID, if it exists
125
const newModels = llms.filter((m) => m.id !== oldID);
126
newModels.push(next);
127
on_change(KEY, JSON.stringify(newModels));
128
setEditLLM(null);
129
} catch (err) {
130
setError(`Error saving custom LLM: ${err}`);
131
}
132
}
133
134
function deleteLLM(model: string) {
135
try {
136
const newModels = llms.filter((m) => m.model !== model);
137
on_change(KEY, JSON.stringify(newModels));
138
} catch (err) {
139
setError(`Error deleting custom LLM: ${err}`);
140
}
141
}
142
143
function addLLM() {
144
return (
145
<Button
146
block
147
icon={<Icon name="plus-circle-o" />}
148
onClick={() => {
149
if (!error) {
150
setEditLLM({
151
id: getNextID(),
152
service: "custom_openai",
153
display: "",
154
endpoint: "",
155
model: "",
156
apiKey: "",
157
});
158
} else {
159
setEditLLM(tmpLLM);
160
setError(null);
161
}
162
}}
163
>
164
<FormattedMessage
165
id="account.user-defined-llm.add_button.label"
166
defaultMessage="Add your own Language Model"
167
/>
168
</Button>
169
);
170
}
171
172
async function test(llm: UserDefinedLLM) {
173
setLoading(true);
174
Modal.info({
175
closable: true,
176
title: `Test ${llm.display} (${llm.model})`,
177
content: <TestCustomLLM llm={llm} />,
178
okText: "Close",
179
});
180
setLoading(false);
181
}
182
183
function renderList() {
184
return (
185
<List
186
loading={loading}
187
itemLayout="horizontal"
188
dataSource={llms}
189
renderItem={(item: UserDefinedLLM) => {
190
const { display, model, endpoint, service } = item;
191
if (!isLLMServiceName(service)) return null;
192
193
return (
194
<List.Item
195
actions={[
196
<Button
197
icon={<Icon name="pen" />}
198
type="link"
199
onClick={() => {
200
setEditLLM(item);
201
}}
202
>
203
Edit
204
</Button>,
205
<Popconfirm
206
title={`Are you sure you want to delete the LLM ${display} (${model})?`}
207
onConfirm={() => deleteLLM(model)}
208
okText="Yes"
209
cancelText="No"
210
>
211
<Button icon={<Icon name="trash" />} type="link" danger>
212
Delete
213
</Button>
214
</Popconfirm>,
215
<Button
216
icon={<Icon name="play-circle" />}
217
type="link"
218
onClick={() => test(item)}
219
>
220
Test
221
</Button>,
222
]}
223
>
224
<Skeleton avatar title={false} loading={false} active>
225
<Tooltip
226
title={
227
<>
228
Model: {model}
229
<br />
230
Endpoint: {endpoint}
231
<br />
232
Service: {service}
233
</>
234
}
235
>
236
<List.Item.Meta
237
avatar={
238
<LanguageModelVendorAvatar
239
model={toUserLLMModelName(item)}
240
/>
241
}
242
title={display}
243
/>
244
</Tooltip>
245
</Skeleton>
246
</List.Item>
247
);
248
}}
249
/>
250
);
251
}
252
253
function renderExampleModel() {
254
switch (service) {
255
case "custom_openai":
256
case "openai":
257
return "'gpt-4o'";
258
case "ollama":
259
return "'llama3:latest', 'phi3:instruct', ...";
260
case "anthropic":
261
return "'claude-3-sonnet-20240229'";
262
case "mistralai":
263
return "'open-mixtral-8x22b'";
264
case "google":
265
return "'gemini-2.0-flash'";
266
default:
267
unreachable(service);
268
return "'llama3:latest'";
269
}
270
}
271
272
function renderForm() {
273
if (!editLLM) return null;
274
return (
275
<Modal
276
open={editLLM != null}
277
title="Edit Language Model"
278
onOk={() => {
279
const vals = form.getFieldsValue(true);
280
setTmpLLM(vals);
281
save(vals, editLLM.id);
282
setEditLLM(null);
283
}}
284
onCancel={() => {
285
setEditLLM(null);
286
}}
287
>
288
<Form
289
form={form}
290
layout="horizontal"
291
labelCol={{ span: 8 }}
292
wrapperCol={{ span: 16 }}
293
>
294
<Form.Item
295
label="Display Name"
296
name="display"
297
rules={[{ required: true }]}
298
help="e.g. 'MyLLM'"
299
>
300
<Input />
301
</Form.Item>
302
<Form.Item
303
label="Service"
304
name="service"
305
rules={[{ required: true }]}
306
help="Select the kind of server to talk to. Probably 'OpenAI API' or 'Ollama'"
307
>
308
<Select popupMatchSelectWidth={false}>
309
{SERVICES.map((option) => {
310
const { name, desc } = LLM_PROVIDER[option];
311
return (
312
<Select.Option key={option} value={option}>
313
<Tooltip title={desc} placement="right">
314
<Text strong>{name}</Text>: {trunc(desc, 50)}
315
</Tooltip>
316
</Select.Option>
317
);
318
})}
319
</Select>
320
</Form.Item>
321
<Form.Item
322
label="Model Name"
323
name="model"
324
rules={[{ required: true }]}
325
help={`This depends on the available models. e.g. ${renderExampleModel()}.`}
326
>
327
<Input />
328
</Form.Item>
329
<Form.Item
330
label="Endpoint URL"
331
name="endpoint"
332
rules={[{ required: needEndpoint }]}
333
help={
334
needEndpoint
335
? "e.g. 'https://your.ollama.server:11434/' or 'https://api.openai.com/v1'"
336
: "This setting is ignored."
337
}
338
>
339
<Input disabled={!needEndpoint} />
340
</Form.Item>
341
<Form.Item
342
label="API Key"
343
name="apiKey"
344
help="A secret string, which you got from the service provider."
345
rules={[{ required: needAPIKey }]}
346
>
347
<Input />
348
</Form.Item>
349
</Form>
350
</Modal>
351
);
352
}
353
354
function renderError() {
355
if (!error) return null;
356
return <Alert message={error} type="error" closable />;
357
}
358
359
const title = intl.formatMessage({
360
id: "account.user-defined-llm.title",
361
defaultMessage: "Bring your own Language Model",
362
});
363
364
function renderContent() {
365
if (user_defined_llm) {
366
return (
367
<>
368
{renderForm()}
369
{renderList()}
370
{addLLM()}
371
{renderError()}
372
</>
373
);
374
} else {
375
return <Alert banner type="info" message="This feature is disabled." />;
376
}
377
}
378
379
return (
380
<>
381
<Title level={5}>
382
{title}{" "}
383
<HelpIcon style={{ float: "right" }} maxWidth="300px" title={title}>
384
<FormattedMessage
385
id="account.user-defined-llm.info"
386
defaultMessage={`This allows you to call a {llm} of your own.
387
You either need an API key or run it on your own server.
388
Make sure to click on "Test" to check, that the communication to the API actually works.
389
Most likely, the type you are looking for is "Custom OpenAI" or "Ollama".`}
390
values={{
391
llm: (
392
<A href={"https://en.wikipedia.org/wiki/Large_language_model"}>
393
Large Language Model
394
</A>
395
),
396
}}
397
/>
398
</HelpIcon>
399
</Title>
400
401
{renderContent()}
402
</>
403
);
404
}
405
406
function TestCustomLLM({ llm }: { llm: UserDefinedLLM }) {
407
const [querying, setQuerying] = useState<boolean>(false);
408
const [prompt, setPrompt] = useState<string>("Capital city of Australia?");
409
const [reply, setReply] = useState<string>("");
410
const [error, setError] = useState<string>("");
411
412
async function doQuery() {
413
setQuerying(true);
414
setError("");
415
setReply("");
416
try {
417
const llmStream = webapp_client.openai_client.queryStream({
418
input: prompt,
419
project_id: null,
420
tag: "userdefined-llm-test",
421
model: toUserLLMModelName(llm),
422
system: "This is a test. Reply briefly.",
423
maxTokens: 100,
424
});
425
426
let reply = "";
427
llmStream.on("token", (token) => {
428
if (token) {
429
reply += token;
430
setReply(reply);
431
} else {
432
setQuerying(false);
433
}
434
});
435
436
llmStream.on("error", (err) => {
437
setError(err?.toString());
438
setQuerying(false);
439
});
440
} catch (e) {
441
setError(e.message);
442
setReply("");
443
setQuerying(false);
444
}
445
}
446
447
// TODO implement a button (or whatever) to query the backend and show the response in real time
448
return (
449
<Space direction="vertical">
450
<Flex vertical={false} align="center" gap={5}>
451
<Flex>Prompt: </Flex>
452
<Input
453
value={prompt}
454
onChange={(e) => setPrompt(e.target.value)}
455
onPressEnter={doQuery}
456
/>
457
<Button loading={querying} type="primary" onClick={doQuery}>
458
Test
459
</Button>
460
</Flex>
461
{reply ? (
462
<>
463
Reply:
464
<RawPrompt input={reply} />
465
</>
466
) : null}
467
{error ? <Alert banner message={error} type="error" /> : null}
468
</Space>
469
);
470
}
471
472