blob: 1e4d57ddf2b80220cc1db8dd116459fa2c676829 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
|
import type { AsyncRes } from "sortug";
export type ChatMessage = {
author: string;
text: string;
sent: number;
reasoning?: string;
};
export type InputToken = { text: string } | { img: string };
// me
export type RequestOptions = {
textOutput: boolean;
};
export const defaultOptions: RequestOptions = {
textOutput: true,
};
// openai
export type ContentType = { text: string } | { audio: Response };
export interface AIModelAPI {
setModel: (model: string) => void;
tokenizer: (text: string) => number;
maxTokens: number;
send: (
input: string | InputToken[],
systemPrompt?: string,
) => AsyncRes<string>;
stream: (
input: string | InputToken[],
handler: (data: string) => void,
systemPrompt?: string,
) => void;
}
export type LLMChoice =
| { gemini: string }
| { claude: string }
| { chatgpt: string }
| { grok: string }
| { deepseek: string }
| { kimi: string }
| { openai: { url: string; apiKey: string; model: string } };
|