summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorpolwex <polwex@sortug.com>2025-10-19 12:54:25 +0700
committerpolwex <polwex@sortug.com>2025-10-19 12:54:25 +0700
commit8815d3c1d40550470c5bc972bc16bd4966735154 (patch)
tree92ef606b568035b9e88d89286be3330f4b84af1e /src
parentba16ebcbe36c1a1cbdb1d1379cb3f9c3a086acdf (diff)
new openai responses api and some claude made testsHEADmaster
Diffstat (limited to 'src')
-rw-r--r--src/openai-responses.ts160
-rw-r--r--src/types/index.ts8
2 files changed, 167 insertions, 1 deletions
diff --git a/src/openai-responses.ts b/src/openai-responses.ts
new file mode 100644
index 0000000..71c535c
--- /dev/null
+++ b/src/openai-responses.ts
@@ -0,0 +1,160 @@
+import OpenAI from "openai";
+import { MAX_TOKENS, RESPONSE_LENGTH } from "./logic/constants";
+import type { AIModelAPI, ChatMessage, InputToken } from "./types";
+import type { AsyncRes } from "sortug";
+import type {
+ ResponseContent,
+ ResponseInput,
+ ResponseInputContent,
+ ResponseInputItem,
+ ResponseOutputItem,
+ ResponseOutputMessage,
+} from "openai/resources/responses/responses";
+import type { ResponseCreateAndStreamParams } from "openai/lib/responses/ResponseStream";
+
+type Props = {
+ baseURL: string;
+ apiKey: string;
+ model?: string;
+ maxTokens?: number;
+ tokenizer?: (text: string) => number;
+ allowBrowser?: boolean;
+};
+export default class OpenAIAPI implements AIModelAPI {
+ private apiKey;
+ private baseURL;
+ private api;
+ maxTokens: number = MAX_TOKENS;
+ tokenizer: (text: string) => number = (text) => text.length / 3;
+ model;
+
+ constructor(props: Props) {
+ this.apiKey = props.apiKey;
+ this.baseURL = props.baseURL;
+ this.api = new OpenAI({
+ baseURL: this.baseURL,
+ apiKey: this.apiKey,
+ dangerouslyAllowBrowser: props.allowBrowser || false,
+ });
+ this.model = props.model || "";
+ if (props.maxTokens) this.maxTokens = props.maxTokens;
+ if (props.tokenizer) this.tokenizer = props.tokenizer;
+ }
+ public setModel(model: string) {
+ this.model = model;
+ }
+ // response input items are text, image, file, conversation state or function cals
+ private buildInput(tokens: InputToken[]): ResponseInputItem[] {
+ const content: ResponseInputContent[] = tokens.map((t) => {
+ if ("text" in t) return { type: "input_text" as const, text: t.text };
+ // image_url or file_id
+ else if ("img" in t)
+ return {
+ type: "input_image" as const,
+ image_url: t.img,
+ detail: "auto",
+ };
+ // file_data or file_id or file_url or filename
+ else if ("file" in t)
+ return { type: "input_file" as const, file_data: t.file.file_data };
+ // TODO obviously
+ else return { type: "input_text" as const, text: "oy vey" };
+ });
+ // role can be user, developer, or system
+ return [{ role: "user" as const, content }];
+ }
+
+ public async send(
+ userInput: string | InputToken[],
+ sys?: string,
+ ): AsyncRes<string> {
+ const input: string | ResponseInput =
+ typeof userInput === "string" ? userInput : this.buildInput(userInput);
+ // const messages = this.mapMessages(input);
+ const res = await this.apiCall({ instructions: sys, input });
+ if ("error" in res) return res;
+ else {
+ try {
+ // TODO type this properly
+ const resText = res.ok.reduce((acc, item) => {
+ if (item.type === "message" && item.status === "completed") {
+ const outputText = this.getOutputText(item.content);
+ return `${acc}\n${outputText}`;
+ }
+ // TODO else
+ return acc;
+ }, "");
+ return { ok: resText };
+ } catch (e) {
+ return { error: `${e}` };
+ }
+ }
+ }
+ getOutputText(content: ResponseOutputMessage["content"]): string {
+ let text = "";
+ for (const c of content) {
+ if (c.type === "refusal") text += `\nRefused to respond: ${c.refusal}\n`;
+ else text += `\n${c.text}\n`;
+ }
+ return text;
+ }
+
+ public async stream(
+ userInput: string | InputToken[],
+ handle: (c: string) => void,
+ sys?: string,
+ ) {
+ const input: string | ResponseInput =
+ typeof userInput === "string" ? userInput : this.buildInput(userInput);
+ await this.apiCallStream({ instructions: sys, input }, handle);
+ }
+
+ // TODO custom temperature?dune exec -- ./test/test_nock.exe --verbose
+ private async apiCall(
+ params: OpenAI.Responses.ResponseCreateParamsNonStreaming,
+ ): AsyncRes<ResponseOutputItem[]> {
+ // console.log({ messages }, "at the very end");
+ try {
+ const response = await this.api.responses.create({
+ ...params,
+ model: this.model,
+ // max_output_tokens: RESPONSE_LENGTH,
+ });
+ if (response.status !== "completed")
+ return {
+ error:
+ response.incomplete_details?.reason || response.status || "error",
+ };
+
+ return { ok: response.output };
+ } catch (e) {
+ console.log(e, "error in openai api");
+ return { error: `${e}` };
+ }
+ }
+
+ private async apiCallStream(
+ params: ResponseCreateAndStreamParams,
+ handle: (c: string) => void,
+ ): Promise<void> {
+ try {
+ const stream = await this.api.responses.create({
+ // temperature: 1.3,
+ ...params,
+ stream: true,
+ model: this.model,
+ max_output_tokens: RESPONSE_LENGTH,
+ });
+
+ for await (const chunk of stream) {
+ console.log("stream reponse", chunk);
+ if (chunk.type === "response.output_text.done") handle(chunk.text);
+ // TODO else
+ }
+ } catch (e) {
+ console.log(e, "error in openai api");
+ // TODO
+ // handle(`Error streaming OpenAI, ${e}`);
+ }
+ }
+}
diff --git a/src/types/index.ts b/src/types/index.ts
index f6df6bf..7c76bfc 100644
--- a/src/types/index.ts
+++ b/src/types/index.ts
@@ -1,3 +1,4 @@
+import type { ResponseInputFile } from "openai/resources/responses/responses.js";
import type { AsyncRes } from "sortug";
export type ChatMessage = {
author: string;
@@ -6,7 +7,12 @@ export type ChatMessage = {
reasoning?: string;
};
-export type InputToken = { text: string } | { img: string };
+export type InputToken =
+ | { text: string }
+ | { img: string }
+ | { file: ResponseInputFile }
+ | { tools: ToolUseInput[] };
+export type ToolUseInput = any; // TODO
// me
export type RequestOptions = {
textOutput: boolean;