From 8815d3c1d40550470c5bc972bc16bd4966735154 Mon Sep 17 00:00:00 2001 From: polwex Date: Sun, 19 Oct 2025 12:54:25 +0700 Subject: new openai responses api and some claude made tests --- src/openai-responses.ts | 160 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 160 insertions(+) create mode 100644 src/openai-responses.ts (limited to 'src/openai-responses.ts') diff --git a/src/openai-responses.ts b/src/openai-responses.ts new file mode 100644 index 0000000..71c535c --- /dev/null +++ b/src/openai-responses.ts @@ -0,0 +1,160 @@ +import OpenAI from "openai"; +import { MAX_TOKENS, RESPONSE_LENGTH } from "./logic/constants"; +import type { AIModelAPI, ChatMessage, InputToken } from "./types"; +import type { AsyncRes } from "sortug"; +import type { + ResponseContent, + ResponseInput, + ResponseInputContent, + ResponseInputItem, + ResponseOutputItem, + ResponseOutputMessage, +} from "openai/resources/responses/responses"; +import type { ResponseCreateAndStreamParams } from "openai/lib/responses/ResponseStream"; + +type Props = { + baseURL: string; + apiKey: string; + model?: string; + maxTokens?: number; + tokenizer?: (text: string) => number; + allowBrowser?: boolean; +}; +export default class OpenAIAPI implements AIModelAPI { + private apiKey; + private baseURL; + private api; + maxTokens: number = MAX_TOKENS; + tokenizer: (text: string) => number = (text) => text.length / 3; + model; + + constructor(props: Props) { + this.apiKey = props.apiKey; + this.baseURL = props.baseURL; + this.api = new OpenAI({ + baseURL: this.baseURL, + apiKey: this.apiKey, + dangerouslyAllowBrowser: props.allowBrowser || false, + }); + this.model = props.model || ""; + if (props.maxTokens) this.maxTokens = props.maxTokens; + if (props.tokenizer) this.tokenizer = props.tokenizer; + } + public setModel(model: string) { + this.model = model; + } + // response input items are text, image, file, conversation state or function cals + private buildInput(tokens: InputToken[]): ResponseInputItem[] { + const content: ResponseInputContent[] = tokens.map((t) => { + if ("text" in t) return { type: "input_text" as const, text: t.text }; + // image_url or file_id + else if ("img" in t) + return { + type: "input_image" as const, + image_url: t.img, + detail: "auto", + }; + // file_data or file_id or file_url or filename + else if ("file" in t) + return { type: "input_file" as const, file_data: t.file.file_data }; + // TODO obviously + else return { type: "input_text" as const, text: "oy vey" }; + }); + // role can be user, developer, or system + return [{ role: "user" as const, content }]; + } + + public async send( + userInput: string | InputToken[], + sys?: string, + ): AsyncRes { + const input: string | ResponseInput = + typeof userInput === "string" ? userInput : this.buildInput(userInput); + // const messages = this.mapMessages(input); + const res = await this.apiCall({ instructions: sys, input }); + if ("error" in res) return res; + else { + try { + // TODO type this properly + const resText = res.ok.reduce((acc, item) => { + if (item.type === "message" && item.status === "completed") { + const outputText = this.getOutputText(item.content); + return `${acc}\n${outputText}`; + } + // TODO else + return acc; + }, ""); + return { ok: resText }; + } catch (e) { + return { error: `${e}` }; + } + } + } + getOutputText(content: ResponseOutputMessage["content"]): string { + let text = ""; + for (const c of content) { + if (c.type === "refusal") text += `\nRefused to respond: ${c.refusal}\n`; + else text += `\n${c.text}\n`; + } + return text; + } + + public async stream( + userInput: string | InputToken[], + handle: (c: string) => void, + sys?: string, + ) { + const input: string | ResponseInput = + typeof userInput === "string" ? userInput : this.buildInput(userInput); + await this.apiCallStream({ instructions: sys, input }, handle); + } + + // TODO custom temperature?dune exec -- ./test/test_nock.exe --verbose + private async apiCall( + params: OpenAI.Responses.ResponseCreateParamsNonStreaming, + ): AsyncRes { + // console.log({ messages }, "at the very end"); + try { + const response = await this.api.responses.create({ + ...params, + model: this.model, + // max_output_tokens: RESPONSE_LENGTH, + }); + if (response.status !== "completed") + return { + error: + response.incomplete_details?.reason || response.status || "error", + }; + + return { ok: response.output }; + } catch (e) { + console.log(e, "error in openai api"); + return { error: `${e}` }; + } + } + + private async apiCallStream( + params: ResponseCreateAndStreamParams, + handle: (c: string) => void, + ): Promise { + try { + const stream = await this.api.responses.create({ + // temperature: 1.3, + ...params, + stream: true, + model: this.model, + max_output_tokens: RESPONSE_LENGTH, + }); + + for await (const chunk of stream) { + console.log("stream reponse", chunk); + if (chunk.type === "response.output_text.done") handle(chunk.text); + // TODO else + } + } catch (e) { + console.log(e, "error in openai api"); + // TODO + // handle(`Error streaming OpenAI, ${e}`); + } + } +} -- cgit v1.2.3