diff options
| author | polwex <polwex@sortug.com> | 2025-02-13 12:29:32 +0700 |
|---|---|---|
| committer | polwex <polwex@sortug.com> | 2025-02-13 12:29:32 +0700 |
| commit | 4c6913644b362b28f15b125c2fbe48165f1e048c (patch) | |
| tree | 147f5e16f18e956fe55959ac3e03e5afb03b9b52 | |
init
| -rw-r--r-- | .gitignore | 175 | ||||
| -rw-r--r-- | README.md | 15 | ||||
| -rw-r--r-- | bun.lock | 205 | ||||
| -rw-r--r-- | index.ts | 13 | ||||
| -rw-r--r-- | package.json | 19 | ||||
| -rw-r--r-- | src/claude.ts | 157 | ||||
| -rw-r--r-- | src/gemini.ts | 137 | ||||
| -rw-r--r-- | src/logic/constants.ts | 3 | ||||
| -rw-r--r-- | src/model.ts | 124 | ||||
| -rw-r--r-- | src/openai.ts | 266 | ||||
| -rw-r--r-- | src/openai_tools.ts | 66 | ||||
| -rw-r--r-- | src/prompts.ts | 14 | ||||
| -rw-r--r-- | src/types/index.ts | 15 | ||||
| -rw-r--r-- | src/types/mtproto.ts | 0 | ||||
| -rw-r--r-- | tsconfig.json | 27 |
15 files changed, 1236 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9b1ee42 --- /dev/null +++ b/.gitignore @@ -0,0 +1,175 @@ +# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore + +# Logs + +logs +_.log +npm-debug.log_ +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Caches + +.cache + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +*.lcov + +# nyc test coverage + +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) + +.grunt + +# Bower dependency directory (https://bower.io/) + +bower_components + +# node-waf configuration + +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) + +build/Release + +# Dependency directories + +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) + +web_modules/ + +# TypeScript cache + +*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# Microbundle cache + +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history + +.node_repl_history + +# Output of 'npm pack' + +*.tgz + +# Yarn Integrity file + +.yarn-integrity + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) + +.parcel-cache + +# Next.js build output + +.next +out + +# Nuxt.js build / generate output + +.nuxt +dist + +# Gatsby files + +# Comment in the public line in if your project uses Gatsby and not Next.js + +# https://nextjs.org/blog/next-9-1#public-directory-support + +# public + +# vuepress build output + +.vuepress/dist + +# vuepress v2.x temp and cache directory + +.temp + +# Docusaurus cache and generated files + +.docusaurus + +# Serverless directories + +.serverless/ + +# FuseBox cache + +.fusebox/ + +# DynamoDB Local files + +.dynamodb/ + +# TernJS port file + +.tern-port + +# Stores VSCode versions used for testing VSCode extensions + +.vscode-test + +# yarn v2 + +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 0000000..b89d245 --- /dev/null +++ b/README.md @@ -0,0 +1,15 @@ +# models + +To install dependencies: + +```bash +bun install +``` + +To run: + +```bash +bun run index.ts +``` + +This project was created using `bun init` in bun v1.2.2. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime. diff --git a/bun.lock b/bun.lock new file mode 100644 index 0000000..37789a0 --- /dev/null +++ b/bun.lock @@ -0,0 +1,205 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "models", + "dependencies": { + "@anthropic-ai/sdk": "^0.36.3", + "@google/generative-ai": "^0.21.0", + "groq-sdk": "^0.15.0", + "openai": "^4.84.0", + "playht": "^0.16.0", + "replicate": "^1.0.1", + }, + "devDependencies": { + "@types/bun": "latest", + }, + "peerDependencies": { + "typescript": "^5.0.0", + }, + }, + }, + "packages": { + "@anthropic-ai/sdk": ["@anthropic-ai/sdk@0.36.3", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-+c0mMLxL/17yFZ4P5+U6bTWiCSFZUKJddrv01ud2aFBWnTPLdRncYV76D3q1tqfnL7aCnhRtykFnoCFzvr4U3Q=="], + + "@google/generative-ai": ["@google/generative-ai@0.21.0", "", {}, "sha512-7XhUbtnlkSEZK15kN3t+tzIMxsbKm/dSkKBFalj+20NvPKe1kBY7mR2P7vuijEn+f06z5+A8bVGKO0v39cr6Wg=="], + + "@grpc/grpc-js": ["@grpc/grpc-js@1.12.6", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-JXUj6PI0oqqzTGvKtzOkxtpsyPRNsrmhh41TtIz/zEB6J+AUiZZ0dxWzcMwO9Ns5rmSPuMdghlTbUuqIM48d3Q=="], + + "@grpc/proto-loader": ["@grpc/proto-loader@0.7.13", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw=="], + + "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], + + "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="], + + "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="], + + "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="], + + "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="], + + "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="], + + "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="], + + "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="], + + "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="], + + "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="], + + "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="], + + "@tokenizer/token": ["@tokenizer/token@0.3.0", "", {}, "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A=="], + + "@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="], + + "@types/node": ["@types/node@18.19.75", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-UIksWtThob6ZVSyxcOqCLOUNg/dyO1Qvx4McgeuhrEtHTLFTf7BBhEazaE4K806FGTPtzd/2sE90qn4fVr7cyw=="], + + "@types/node-fetch": ["@types/node-fetch@2.6.12", "", { "dependencies": { "@types/node": "*", "form-data": "^4.0.0" } }, "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA=="], + + "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + + "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="], + + "agentkeepalive": ["agentkeepalive@4.6.0", "", { "dependencies": { "humanize-ms": "^1.2.1" } }, "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ=="], + + "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + + "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], + + "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], + + "axios": ["axios@1.7.9", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw=="], + + "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + + "buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="], + + "bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="], + + "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], + + "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], + + "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], + + "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], + + "cross-fetch": ["cross-fetch@4.1.0", "", { "dependencies": { "node-fetch": "^2.7.0" } }, "sha512-uKm5PU+MHTootlWEY+mZ4vvXoCn4fLQxT9dSc1sXVMSFkINTJVN8cAQROpwcKm8bJ/c7rgZVIBWzH5T78sNZZw=="], + + "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], + + "emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], + + "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], + + "event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="], + + "events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="], + + "file-type": ["file-type@18.7.0", "", { "dependencies": { "readable-web-to-node-stream": "^3.0.2", "strtok3": "^7.0.0", "token-types": "^5.0.1" } }, "sha512-ihHtXRzXEziMrQ56VSgU7wkxh55iNchFkosu7Y9/S+tXHdKyrGjVK0ujbqNnsxzea+78MaLhN6PGmfYSAv1ACw=="], + + "follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="], + + "form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="], + + "form-data-encoder": ["form-data-encoder@1.7.2", "", {}, "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A=="], + + "formdata-node": ["formdata-node@4.4.1", "", { "dependencies": { "node-domexception": "1.0.0", "web-streams-polyfill": "4.0.0-beta.3" } }, "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ=="], + + "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], + + "groq-sdk": ["groq-sdk@0.15.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" } }, "sha512-aYDEdr4qczx3cLCRRe+Beb37I7g/9bD5kHF+EEDxcrREWw1vKoRcfP3vHEkJB7Ud/8oOuF0scRwDpwWostTWuQ=="], + + "humanize-ms": ["humanize-ms@1.2.1", "", { "dependencies": { "ms": "^2.0.0" } }, "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ=="], + + "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + + "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], + + "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], + + "long": ["long@5.3.0", "", {}, "sha512-5vvY5yF1zF/kXk+L94FRiTDa1Znom46UjPCH6/XbSvS8zBKMFBHTJk8KDMqJ+2J6QezQFi7k1k8v21ClJYHPaw=="], + + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], + + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="], + + "node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="], + + "openai": ["openai@4.84.0", "", { "dependencies": { "@types/node": "^18.11.18", "@types/node-fetch": "^2.6.4", "abort-controller": "^3.0.0", "agentkeepalive": "^4.2.1", "form-data-encoder": "1.7.2", "formdata-node": "^4.3.2", "node-fetch": "^2.6.7" }, "peerDependencies": { "ws": "^8.18.0", "zod": "^3.23.8" }, "optionalPeers": ["ws", "zod"], "bin": { "openai": "bin/cli" } }, "sha512-Smu45QjqWcPXkTHAI7GSeW+sI9ZOqB9VW4wiuhBvWLGHYC9dn/3rIpG8PUysbCT+ciVyDdEdsx0zkgKQDoOx9Q=="], + + "peek-readable": ["peek-readable@5.4.2", "", {}, "sha512-peBp3qZyuS6cNIJ2akRNG1uo1WJ1d0wTxg/fxMdZ0BqCVhx242bSFHM9eNqflfJVS9SsgkzgT/1UgnsurBOTMg=="], + + "playht": ["playht@0.16.0", "", { "dependencies": { "@grpc/grpc-js": "^1.9.4", "axios": "^1.4.0", "cross-fetch": "^4.0.0", "file-type": "^18.5.0", "protobufjs": "^7.2.5", "tslib": "^2.1.0" } }, "sha512-gwKqGcmUwrd3NaG6B2z5RZCjxPM0CI915Bmej+GXWZU2PSdN2g4hXsDMnjts+uakLaqGEY8YaIqNokyYH7SnvQ=="], + + "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], + + "protobufjs": ["protobufjs@7.4.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw=="], + + "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], + + "readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="], + + "readable-web-to-node-stream": ["readable-web-to-node-stream@3.0.3", "", { "dependencies": { "process": "^0.11.10", "readable-stream": "^4.7.0" } }, "sha512-In3boYjBnbGVrLuuRu/Ath/H6h1jgk30nAsk/71tCare1dTVoe1oMBGRn5LGf0n3c1BcHwwAqpraxX4AUAP5KA=="], + + "replicate": ["replicate@1.0.1", "", { "optionalDependencies": { "readable-stream": ">=4.0.0" } }, "sha512-EY+rK1YR5bKHcM9pd6WyaIbv6m2aRIvHfHDh51j/LahlHTLKemTYXF6ptif2sLa+YospupAsIoxw8Ndt5nI3vg=="], + + "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], + + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + + "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], + + "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], + + "strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + + "strtok3": ["strtok3@7.1.1", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^5.1.3" } }, "sha512-mKX8HA/cdBqMKUr0MMZAFssCkIGoZeSCMXgnt79yKxNFguMLVFgRe6wB+fsL0NmoHDbeyZXczy7vEPSoo3rkzg=="], + + "token-types": ["token-types@5.0.1", "", { "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" } }, "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg=="], + + "tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], + + "undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="], + + "web-streams-polyfill": ["web-streams-polyfill@4.0.0-beta.3", "", {}, "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug=="], + + "webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="], + + "whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="], + + "wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + + "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], + + "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], + + "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], + + "@types/node-fetch/@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + + "@types/ws/@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + + "bun-types/@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + + "protobufjs/@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + + "@types/node-fetch/@types/node/undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + + "@types/ws/@types/node/undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + + "bun-types/@types/node/undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + + "protobufjs/@types/node/undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + } +} diff --git a/index.ts b/index.ts new file mode 100644 index 0000000..c2fc78e --- /dev/null +++ b/index.ts @@ -0,0 +1,13 @@ +import openai from "./src/openai"; +import claude from "./src/claude"; +import gemini from "./src/gemini"; +import generic from "./src/model"; +import type { ChatMessage } from "./src/types"; + +export { + openai as OpenAI, + claude as Claude, + gemini as Gemini, + generic as Model, +}; +export type { ChatMessage }; diff --git a/package.json b/package.json new file mode 100644 index 0000000..fa8ad67 --- /dev/null +++ b/package.json @@ -0,0 +1,19 @@ +{ + "name": "models", + "module": "index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "latest" + }, + "peerDependencies": { + "typescript": "^5.0.0" + }, + "dependencies": { + "@anthropic-ai/sdk": "^0.36.3", + "@google/generative-ai": "^0.21.0", + "groq-sdk": "^0.15.0", + "openai": "^4.84.0", + "playht": "^0.16.0", + "replicate": "^1.0.1" + } +}
\ No newline at end of file diff --git a/src/claude.ts b/src/claude.ts new file mode 100644 index 0000000..377316e --- /dev/null +++ b/src/claude.ts @@ -0,0 +1,157 @@ +import Claude from "@anthropic-ai/sdk"; +import { RESPONSE_LENGTH } from "./logic/constants"; +import type { AResult, ChatMessage, OChoice, OChunk, OMessage } from "./types"; +import { BOOKWORM_SYS } from "./prompts"; + +type Message = Claude.Messages.MessageParam; + +export default class Conversation { + private tokenizer: (text: string) => number; + private maxTokens: number; + model: string = "claude-3-5-sonnet-20241022"; + constructor( + maxTokens = 200_000, + tokenizer: (text: string) => number = (text) => text.length / 3, + ) { + this.maxTokens = maxTokens; + this.tokenizer = tokenizer; + } + public setModel(model: string) { + this.model = model; + } + private mapMessages(input: ChatMessage[]): Message[] { + return input.map((m) => { + const role = m.author === "claude" ? "assistant" : "user"; + return { role, content: m.text }; + }); + } + + private mapMessagesR1(input: ChatMessage[]): Message[] { + return input.reduce((acc: Message[], m, i) => { + const prev = acc[i - 1]; + const role: any = m.author === "claude" ? "assistant" : "user"; + const msg = { role, content: m.text }; + if (prev?.role === role) acc[i - 1] = msg; + else acc = [...acc, msg]; + return acc; + }, []); + } + + public async send(sys: string, input: ChatMessage[]) { + const messages = this.mapMessages(input); + const truncated = this.truncateHistory(messages); + const res = await this.apiCall(sys, truncated); + return res; + } + + public async sendR1(input: ChatMessage[]) { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + const res = await this.apiCall("", truncated, true); + return res; + } + public async sendDoc(data: string) { + const sys = BOOKWORM_SYS; + const msg: Message = { + role: "user", + content: [ + { + type: "document", + source: { type: "base64", data, media_type: "application/pdf" }, + }, + { + type: "text", + text: "Please analyze this according to your system prompt. Be thorough.", + }, + ], + }; + const res = await this.apiCall(sys, [msg]); + return res; + } + + public async stream( + sys: string, + input: ChatMessage[], + handle: (c: any) => void, + ) { + const messages = this.mapMessages(input); + const truncated = this.truncateHistory(messages); + await this.apiCallStream(sys, truncated, handle); + } + + public async streamR1(input: ChatMessage[], handle: (c: any) => void) { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + await this.apiCallStream("", truncated, handle, true); + } + + private truncateHistory(messages: Message[]): Message[] { + const totalTokens = messages.reduce((total, message) => { + return total + this.tokenizer(message.content as string); + }, 0); + while (totalTokens > this.maxTokens && messages.length > 1) { + messages.splice(0, 1); + } + return messages; + } + + // TODO + // https://docs.anthropic.com/en/api/messages-examples#putting-words-in-claudes-mouth + private async apiCall( + system: string, + messages: Message[], + isR1: boolean = false, + ): Promise<AResult<string[]>> { + try { + const claud = new Claude(); + // const list = await claud.models.list(); + // console.log(list.data); + const res = await claud.messages.create({ + model: this.model, + max_tokens: RESPONSE_LENGTH, + system, + messages, + }); + return { + ok: res.content.reduce((acc: string[], item) => { + if (item.type === "tool_use") return acc; + else return [...acc, item.text]; + }, []), + }; + } catch (e) { + console.log(e, "error in claude api"); + return { error: `${e}` }; + } + } + + private async apiCallStream( + system: string, + messages: Message[], + handle: (c: any) => void, + isR1: boolean = false, + ): Promise<void> { + try { + const claud = new Claude(); + const stream = await claud.messages.create({ + model: this.model, + max_tokens: RESPONSE_LENGTH, + system, + messages, + stream: true, + }); + + for await (const part of stream) { + if (part.type === "message_start") continue; + if (part.type === "content_block_start") continue; + if (part.type === "content_block_delta") { + console.log("delta", part.delta); + const delta: any = part.delta; + handle(delta.text); + } + } + } catch (e) { + console.log(e, "error in claude api"); + handle(`Error streaming Claude, ${e}`); + } + } +} diff --git a/src/gemini.ts b/src/gemini.ts new file mode 100644 index 0000000..2f685a2 --- /dev/null +++ b/src/gemini.ts @@ -0,0 +1,137 @@ +import { + GenerativeModel, + GoogleGenerativeAI, + type Content, + type GenerateContentResult, +} from "@google/generative-ai"; +import { RESPONSE_LENGTH } from "./logic/constants"; +import type { AResult, ChatMessage, OChoice, OChunk, OMessage } from "./types"; + +export default class Conversation { + private tokenizer: (text: string) => number; + private maxTokens: number; + private model: GenerativeModel; + + constructor( + maxTokens = 200_000, + tokenizer: (text: string) => number = (text) => text.length / 3, + ) { + this.maxTokens = maxTokens; + this.tokenizer = tokenizer; + + const gem = new GoogleGenerativeAI(Bun.env["GEMINI_API_KEY"]!); + this.model = gem.getGenerativeModel({ + model: "gemini-2.0-flash-exp", + generationConfig: { maxOutputTokens: RESPONSE_LENGTH }, + }); + } + + public setModel(model: string) { + const gem = new GoogleGenerativeAI(Bun.env["GEMINI_API_KEY"]!); + this.model = gem.getGenerativeModel({ + model, + generationConfig: { maxOutputTokens: RESPONSE_LENGTH }, + }); + } + private mapMessages(input: ChatMessage[]): Content[] { + return input.map((m) => ({ + role: m.author === "gemini" ? "model" : "user", + parts: [{ text: m.text }], + })); + } + + private mapMessagesR1(input: ChatMessage[]): Content[] { + return input.reduce((acc: Content[], m, i) => { + const prev = acc[i - 1]; + const role = m.author === "gemini" ? "model" : "user"; + const msg = { role, parts: [{ text: m.text }] }; + if (prev?.role === role) acc[i - 1] = msg; + else acc = [...acc, msg]; + return acc; + }, []); + } + + private async apiCall( + messages: Content[], + isR1: boolean = false, + ): Promise<AResult<string[]>> { + try { + const chat = this.model.startChat({ history: messages }); + const res = await chat.sendMessage(""); + return { ok: [res.response.text()] }; + } catch (e) { + console.log(e, "error in gemini api"); + return { error: `${e}` }; + } + } + + private async apiCallStream( + messages: Content[], + handle: (c: any) => void, + isR1: boolean = false, + ): Promise<void> { + try { + const chat = this.model.startChat({ history: messages }); + const res = await chat.sendMessage(""); + // for await (const chunk of res.stream()) { + // handle(chunk.text()); + // } + } catch (e) { + console.log(e, "error in gemini api"); + handle(`Error streaming Gemini, ${e}`); + } + } + + public async send(sys: string, input: ChatMessage[]) { + const messages = this.mapMessages(input); + const truncated = this.truncateHistory(messages); + const res = await this.apiCall(truncated); + return res; + } + + public async sendR1(input: ChatMessage[]) { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + const res = await this.apiCall(truncated, true); + return res; + } + + public async stream( + sys: string, + input: ChatMessage[], + handle: (c: any) => void, + ) { + const messages = this.mapMessages(input); + const truncated = this.truncateHistory(messages); + await this.apiCallStream(truncated, handle); + } + + public async streamR1(input: ChatMessage[], handle: (c: any) => void) { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + await this.apiCallStream(truncated, handle, true); + } + + public async sendDoc(data: ArrayBuffer, mimeType: string, prompt: string) { + const res = await this.model.generateContent([ + { + inlineData: { + data: Buffer.from(data).toString("base64"), + mimeType, + }, + }, + prompt, + ]); + return res; + } + + private truncateHistory(messages: Content[]): Content[] { + const totalTokens = messages.reduce((total, message) => { + return total + this.tokenizer(message.parts[0].text || ""); + }, 0); + while (totalTokens > this.maxTokens && messages.length > 1) { + messages.splice(0, 1); + } + return messages; + } +} diff --git a/src/logic/constants.ts b/src/logic/constants.ts new file mode 100644 index 0000000..170477d --- /dev/null +++ b/src/logic/constants.ts @@ -0,0 +1,3 @@ +// export const RESPONSE_LENGTH = 1024; +export const RESPONSE_LENGTH = 256; +export const MAX_TOKENS = 64_000; diff --git a/src/model.ts b/src/model.ts new file mode 100644 index 0000000..39b42dc --- /dev/null +++ b/src/model.ts @@ -0,0 +1,124 @@ +import OpenAI from "openai"; +import { MAX_TOKENS, RESPONSE_LENGTH } from "./logic/constants"; +import type { AResult, ChatMessage, OChoice } from "./types"; + +type Message = OpenAI.Chat.Completions.ChatCompletionMessageParam; + +type Props = { + baseURL: string; + apiKey: string; + model: string; + maxTokens?: number; + tokenizer?: (text: string) => number; +}; +export default class Conversation { + private apiKey; + private baseURL; + private maxTokens: number = MAX_TOKENS; + private tokenizer: (text: string) => number = (text) => text.length / 3; + private api; + private model; + + constructor(props: Props) { + this.apiKey = props.apiKey; + this.baseURL = props.baseURL; + this.api = new OpenAI({ baseURL: this.baseURL, apiKey: this.apiKey }); + this.model = props.model; + if (props.maxTokens) this.maxTokens = props.maxTokens; + if (props.tokenizer) this.tokenizer = props.tokenizer; + } + public setModel(model: string) { + this.model = model; + } + private mapMessages(input: ChatMessage[]): Message[] { + return input.map((m) => { + return { role: m.author as any, content: m.text, name: m.author }; + }); + } + + public async send(sys: string, input: ChatMessage[]): AResult<string[]> { + const messages = this.mapMessages(input); + const sysMsg: Message = { role: "system", content: sys }; + const allMessages = [sysMsg, ...messages]; + console.log("before truncation", allMessages); + const truncated = this.truncateHistory(allMessages); + const res = await this.apiCall(truncated); + if ("error" in res) return res; + else + try { + return { ok: res.ok.map((c) => c.message.content!) }; + } catch (e) { + return { error: `${e}` }; + } + } + + public async stream( + sys: string, + input: ChatMessage[], + handle: (c: string) => void, + ) { + const messages = this.mapMessages(input); + const sysMsg: Message = { role: "system", content: sys }; + const allMessages = [sysMsg, ...messages]; + const truncated = this.truncateHistory(allMessages); + await this.apiCallStream(truncated, handle); + } + + private truncateHistory(messages: Message[]): Message[] { + const totalTokens = messages.reduce((total, message) => { + return total + this.tokenizer(message.content as string); + }, 0); + while (totalTokens > this.maxTokens && messages.length > 1) { + // Always keep the system message if it exists + const startIndex = messages[0].role === "system" ? 1 : 0; + messages.splice(startIndex, 1); + } + return messages; + } + + // TODO custom temperature? + private async apiCall(messages: Message[]): AResult<OChoice[]> { + console.log({ messages }, "at the very end"); + try { + const completion = await this.api.chat.completions.create({ + temperature: 1.3, + model: this.model, + messages, + max_tokens: RESPONSE_LENGTH, + }); + if (!completion) return { error: "null response from openai" }; + return { ok: completion.choices }; + } catch (e) { + console.log(e, "error in openai api"); + return { error: `${e}` }; + } + } + + private async apiCallStream( + messages: Message[], + handle: (c: string) => void, + ): Promise<void> { + try { + const stream = await this.api.chat.completions.create({ + temperature: 1.3, + model: this.model, + messages, + max_tokens: RESPONSE_LENGTH, + stream: true, + }); + + for await (const chunk of stream) { + for (const choice of chunk.choices) { + console.log({ choice }); + if (!choice.delta) continue; + const cont = choice.delta.content; + if (!cont) continue; + handle(cont); + } + } + } catch (e) { + console.log(e, "error in openai api"); + handle(`Error streaming OpenAI, ${e}`); + } + } +} diff --git a/src/openai.ts b/src/openai.ts new file mode 100644 index 0000000..2e15dcf --- /dev/null +++ b/src/openai.ts @@ -0,0 +1,266 @@ +import fs from "fs"; +import OpenAI from "openai"; +import { RESPONSE_LENGTH } from "./logic/constants"; +import type { + AResult, + ChatMessage, + OChoice, + OChunk, + OMessage, + Result, +} from "./types"; +import OpenAIToolUse from "./openai_tools"; +import type { FileObject } from "openai/src/resources/files.js"; + +type Message = OpenAI.Chat.Completions.ChatCompletionMessageParam; + +type Props = { + maxTokens?: number; + baseURL?: string; + apiKey?: string; + tokenizer?: (text: string) => number; +}; +export default class Conversation { + private maxTokens: number = 128_000; + private apiKey: string = Bun.env["OPENAI_API_KEY"] || ""; + private baseURL: string = "https://api.openai.com/v1"; + private tokenizer: (text: string) => number = (text) => text.length / 3; + openai; + private model: string = "chatgpt-4o-latest"; + + constructor(props: Props) { + if (props.apiKey) this.apiKey = props.apiKey; + if (props.baseURL) this.baseURL = props.baseURL; + this.openai = new OpenAI({ baseURL: this.baseURL, apiKey: this.apiKey }); + if (props.maxTokens) this.maxTokens = props.maxTokens; + if (props.tokenizer) this.tokenizer = props.tokenizer; + } + public setModel(model: string) { + this.model = model; + } + private mapMessages(input: ChatMessage[]): Message[] { + return input.map((m) => { + const role = m.author === "openai" ? "assistant" : "user"; + return { role, content: m.text, name: m.author }; + }); + } + + private mapMessagesR1(input: ChatMessage[]): Message[] { + return input.reduce((acc: Message[], m, i) => { + const prev = acc[i - 1]; + const role = m.author === "openai" ? "assistant" : "user"; + const msg: Message = { role, content: m.text, name: m.author }; + if (prev?.role === role) acc[i - 1] = msg; + else acc = [...acc, msg]; + return acc; + }, []); + } + + public async send(sys: string, input: ChatMessage[]): AResult<OChoice[]> { + const messages = this.mapMessages(input); + const sysMsg: Message = { role: "system", content: sys }; + const allMessages = [sysMsg, ...messages]; + const truncated = this.truncateHistory(allMessages); + const res = await this.apiCall(truncated); + return res; + } + + public async sendR1(input: ChatMessage[]): AResult<OChoice[]> { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + const res = await this.apiCall(truncated); + return res; + } + + public async stream( + sys: string, + input: ChatMessage[], + handle: (c: any) => void, + ) { + const messages = this.mapMessages(input); + const sysMsg: Message = { role: "system", content: sys }; + const allMessages = [sysMsg, ...messages]; + const truncated = this.truncateHistory(allMessages); + await this.apiCallStream(truncated, handle); + } + + public async streamR1(input: ChatMessage[], handle: (c: any) => void) { + const messages = this.mapMessagesR1(input); + const truncated = this.truncateHistory(messages); + await this.apiCallStream(truncated, handle); + } + + private truncateHistory(messages: Message[]): Message[] { + const totalTokens = messages.reduce((total, message) => { + return total + this.tokenizer(message.content as string); + }, 0); + while (totalTokens > this.maxTokens && messages.length > 1) { + // Always keep the system message if it exists + const startIndex = messages[0].role === "system" ? 1 : 0; + messages.splice(startIndex, 1); + } + return messages; + } + + private async apiCall(messages: Message[]): AResult<OChoice[]> { + try { + const completion = await this.openai.chat.completions.create({ + temperature: 1.3, + model: this.model, + messages, + max_tokens: RESPONSE_LENGTH, + }); + if (!completion) return { error: "null response from openai" }; + return { ok: completion.choices }; + } catch (e) { + console.log(e, "error in openai api"); + return { error: `${e}` }; + } + } + + private async apiCallStream( + messages: Message[], + handle: (c: string) => void, + ): Promise<void> { + try { + const stream = await this.openai.chat.completions.create({ + temperature: 1.3, + model: this.model, + messages, + max_tokens: RESPONSE_LENGTH, + stream: true, + }); + + for await (const chunk of stream) { + for (const choice of chunk.choices) { + console.log({ choice }); + if (!choice.delta) continue; + const cont = choice.delta.content; + if (!cont) continue; + handle(cont); + } + } + } catch (e) { + console.log(e, "error in openai api"); + handle(`Error streaming OpenAI, ${e}`); + } + } + + // assistant + async assistant() { + const assistant = await this.openai.beta.assistants.create({ + name: "Literature professor", + instructions: + "You are a professor of literature. Use your knowledge to analyze large pieces of text and answer questions from your users.", + model: this.model, + tools: [{ type: "file_search" }], + temperature: 0.7, + response_format: { type: "text" }, + }); + const vector_store = await this.openai.beta.vectorStores.create({ + name: "docs", + }); + const tool_resources = { + file_search: { vector_store_ids: [vector_store.id] }, + }; + const tant = await this.openai.beta.assistants.update(assistant.id, { + tool_resources, + }); + const thread = await this.openai.beta.threads.create(); + const msg = await this.openai.beta.threads.messages.create(thread.id, { + role: "user", + content: + "Greetings, pleasure to meet. Let's get started if you don't mind", + }); + const run = await this.openai.beta.threads.runs.create(thread.id, { + assistant_id: assistant.id, + instructions: "be nice", + }); + while (run.status === "in_progress") { + console.log({ run }); + } + } + async lookatFile(fo: FileObject) { + const tant = await this.openai.beta.assistants.create({ + name: "Literature professor", + instructions: + "You are a professor of literature. Use your knowledge to analyze large pieces of text and answer questions from your users.", + model: this.model, + tools: [{ type: "file_search" }], + temperature: 0.7, + response_format: { type: "text" }, + }); + const thread = await this.openai.beta.threads.create(); + await this.openai.beta.threads.messages.create(thread.id, { + role: "user", + content: + "Greetings, pleasure to meet. Let's get started if you don't mind. Look at this file and summarize its contents", + attachments: [{ file_id: fo.id, tools: [{ type: "file_search" }] }], + }); + const run = await this.openai.beta.threads.runs.createAndPoll(thread.id, { + assistant_id: tant.id, + }); + console.log({ run }); + const msgs = await this.openai.beta.threads.messages.list(run.thread_id); + console.log({ msgs }); + for (let m of msgs.data) { + console.log(m, "message on thread"); + } + } + + async uploadFile(res: Response) { + // const ff = fs.createReadStream("./lol") + const file = await this.openai.files.create({ + file: res, + purpose: "assistants", + }); + console.log({ file }, "uploaded"); + return file; + + // { + // "id": "file-abc123", + // "object": "file", + // "bytes": 120000, + // "created_at": 1677610602, + // "filename": "mydata.jsonl", + // "purpose": "fine-tune", + // } + } + + // async analyzeFile(){ + // const huh = await this.openai.beta.vectorStores.files.uploadAndPoll() + // } + + // mcp + + async mcp() { + const res = await fetch("http://localhost:8900/list"); + const list = await res.json(); + this.tryTools(list); + } + + async tryTools(tools: OpenAI.Chat.Completions.ChatCompletionTool[]) { + const messages: Message[] = [ + { role: "user", content: "What's on my twitter timeline right now?" }, + ]; + const completion = await this.openai.chat.completions.create({ + model: "gpt-4o-2024-11-20", + messages, + tools, + }); + if (!completion) return { error: "null response from openai" }; + + for (let choice of completion.choices) { + console.log({ choice }); + if (choice.message.tool_calls) { + const instance = new OpenAIToolUse( + this.openai, + "gpt-4o-2024-11-20", + tools, + choice.message, + choice.message.tool_calls, + ); + } + } + } +} diff --git a/src/openai_tools.ts b/src/openai_tools.ts new file mode 100644 index 0000000..feb2e4a --- /dev/null +++ b/src/openai_tools.ts @@ -0,0 +1,66 @@ +import type OpenAI from "openai"; +import type { Result } from "./types"; +type ToolCall = OpenAI.Chat.Completions.ChatCompletionMessageToolCall; + +type Tool = OpenAI.Chat.Completions.ChatCompletionTool; +type ToolMsg = OpenAI.Chat.Completions.ChatCompletionToolMessageParam; + +type Message = OpenAI.Chat.Completions.ChatCompletionMessage; + +export default class OpenAIToolUse { + api; + model; + socket; + tools; + message; + calls; + res: ToolMsg | null = null; + constructor( + api: OpenAI, + model: string, + tools: Tool[], + message: Message, + calls: ToolCall[], + ) { + this.api = api; + this.model = model; + this.socket = new WebSocket("http://localhost:8900"); + this.tools = tools; + this.message = message; + this.calls = calls; + for (let c of calls) { + console.log({ c }); + } + this.wsHandlers(); + } + wsHandlers() { + this.socket.addEventListener("open", (_data) => { + this.handleToolCalls(); + }); + this.socket.addEventListener("message", (ev) => { + const j = JSON.parse(ev.data); + if ("functionRes" in j) this.handleRes(j.functionRes); + }); + } + handleToolCalls() { + for (let c of this.calls) this.socket.send(JSON.stringify({ call: c })); + } + async handleRes(res: Result<ToolMsg>) { + if ("error" in res) { + console.log("TODO"); + return; + } + this.res = res.ok; + const messages = [this.message, res.ok]; + console.log({ messages }, "almost there"); + const completion = await this.api.chat.completions.create({ + model: this.model, + messages, + tools: this.tools, + }); + console.log({ completion }); + for (let choice of completion.choices) { + console.log({ choice }); + } + } +} diff --git a/src/prompts.ts b/src/prompts.ts new file mode 100644 index 0000000..60e8c0d --- /dev/null +++ b/src/prompts.ts @@ -0,0 +1,14 @@ +export const yagoSys = + "You are a helpful assistant of humans engaged in high stakes work. We call you Yagobot. Your user's name will appear in the 'name' field of this message. Please be brief but intelligent in your answers. Be civil but not overly polite, always tell the truth even if inconvenient. Address your user by his name."; + +export const biaSys = + "You are Yagobot, an extremely helpful assistant in charge of attending to a new mother to all her needs. Her name is Bia and she would like you to address her in both Thai and English at all times. Her husband will show up now and then, he's cool too."; + +export const GUEST_SYS = + "You are Yagobot, a helpful assistant with vast knowledge of everything there is to now in several languages. You are responding to a guest user now, be polite, but friendly and brief. Get to the point and strive to be both cool and useful. Respond in the language in which you were addressed."; + +export const LEO_SYS = `You are Yagobot, a super advanced tutor AI to help the children of foreign elites with the best education in the world. You are talking to Leo, a precocious mixed-race Japanese 11 year old. His Japanese name is 黎雄. He can't speak English well but he can understand a bit. Please respond to him the same content thrice: in English first, then English but in IPA phonetic noting, then in Japanese. Try to be proactive and ask him questions yourself if you see he isn't talking much.`; + +export const SAYURI_SYS = `You are Yagobot, a super advanced tutor AI to help the children of foreign elites with the best education in the world. You are talking to Sayuri, a lovely mixed-race Japanese 9 year old. Her Japanese name is 紗悠里. She can't speak English well but she can understand a bit. Please respond to her the same content thrice: in English first, then English but in IPA phonetic noting, then in Japanese. Try to be proactive and ask him questions yourself if you see she isn't talking much.`; + +export const BOOKWORM_SYS = `You are a professor of literature. Use your knowledge to analyze large pieces of text and answer questions from your users.`; diff --git a/src/types/index.ts b/src/types/index.ts new file mode 100644 index 0000000..97be443 --- /dev/null +++ b/src/types/index.ts @@ -0,0 +1,15 @@ +import type OpenAI from "openai"; +export type ChatMessage = { + author: string; + text: string; + sent: number; + reasoning?: string; +}; +export type Result<T> = { ok: T } | { error: string }; +export type AResult<T> = Promise<{ ok: T } | { error: string }>; + +// openai +export type OChoice = OpenAI.Chat.Completions.ChatCompletion.Choice; +export type OChunk = OpenAI.Chat.Completions.ChatCompletionChunk.Choice; +export type OMessage = OpenAI.Chat.Completions.ChatCompletionMessageParam; +export type ContentType = { text: string } | { audio: Response }; diff --git a/src/types/mtproto.ts b/src/types/mtproto.ts new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/types/mtproto.ts diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..238655f --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,27 @@ +{ + "compilerOptions": { + // Enable latest features + "lib": ["ESNext", "DOM"], + "target": "ESNext", + "module": "ESNext", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false + } +} |
