diff --git a/e2e-tests/snapshots/ollama.spec.ts_send-message-to-ollama-1.aria.yml b/e2e-tests/snapshots/ollama.spec.ts_send-message-to-ollama-1.aria.yml index c26e957..6e76411 100644 --- a/e2e-tests/snapshots/ollama.spec.ts_send-message-to-ollama-1.aria.yml +++ b/e2e-tests/snapshots/ollama.spec.ts_send-message-to-ollama-1.aria.yml @@ -1,4 +1,8 @@ - paragraph: hi -- paragraph: ollamachunkollamachunk +- img +- text: file1.txt +- img +- text: file1.txt +- paragraph: More EOM - button "Retry": - img \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index fd28f2a..757cfd5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,25 +1,26 @@ { "name": "dyad", - "version": "0.17.0-beta.2", + "version": "0.18.0-beta.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "dyad", - "version": "0.17.0-beta.2", + "version": "0.18.0-beta.1", "license": "MIT", "dependencies": { - "@ai-sdk/anthropic": "^1.2.8", - "@ai-sdk/google": "^1.2.19", - "@ai-sdk/openai": "^1.3.24", - "@ai-sdk/openai-compatible": "^0.2.13", + "@ai-sdk/anthropic": "^2.0.4", + "@ai-sdk/google": "^2.0.6", + "@ai-sdk/openai": "^2.0.15", + "@ai-sdk/openai-compatible": "^1.0.8", + "@ai-sdk/provider-utils": "^3.0.3", "@biomejs/biome": "^1.9.4", "@dyad-sh/supabase-management-js": "v1.0.0", "@lexical/react": "^0.33.1", "@monaco-editor/react": "^4.7.0-rc.0", "@neondatabase/api-client": "^2.1.0", "@neondatabase/serverless": "^1.0.1", - "@openrouter/ai-sdk-provider": "^0.4.5", + "@openrouter/ai-sdk-provider": "^1.1.2", "@radix-ui/react-accordion": "^1.2.4", "@radix-ui/react-alert-dialog": "^1.1.13", "@radix-ui/react-checkbox": "^1.3.2", @@ -43,7 +44,7 @@ "@types/uuid": "^10.0.0", "@vercel/sdk": "^1.10.0", "@vitejs/plugin-react": "^4.3.4", - "ai": "^4.3.4", + "ai": "^5.0.15", "better-sqlite3": "^11.9.1", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -65,7 +66,6 @@ "lexical-beautiful-mentions": "^0.1.47", "lucide-react": "^0.487.0", "monaco-editor": "^0.52.2", - "ollama-ai-provider": "^1.2.0", "openai": "^4.91.1", "posthog-js": "^1.236.3", "react": "^19.0.0", @@ -83,7 +83,8 @@ "tree-kill": "^1.2.2", "tw-animate-css": "^1.2.5", "update-electron-app": "^3.1.1", - "uuid": "^11.1.0" + "uuid": "^11.1.0", + "zod": "^3.25.76" }, "devDependencies": { "@electron-forge/cli": "^7.8.0", @@ -126,73 +127,89 @@ } }, "node_modules/@ai-sdk/anthropic": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-1.2.12.tgz", - "integrity": "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.4.tgz", + "integrity": "sha512-ii2bZEUPwBitUiK1dpX+HsOarcDGY71G9TVdSJqbfXSVqa+speJNZ8PA/bjuNMml0NyX8VxNsaMg3SwBUCZspA==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8" + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3" }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.0.0" + "zod": "^3.25.76 || ^4" + } + }, + "node_modules/@ai-sdk/gateway": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-1.0.7.tgz", + "integrity": "sha512-Athrq7OARuNc0iHZJP6InhSQ53tImCc990vMWyR1UHaZgPZJbXjKhIMiOj54F0I0Nlemx48V4fHYUTfLkJotnQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.25.76 || ^4" } }, "node_modules/@ai-sdk/google": { - "version": "1.2.22", - "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-1.2.22.tgz", - "integrity": "sha512-Ppxu3DIieF1G9pyQ5O1Z646GYR0gkC57YdBqXJ82qvCdhEhZHu0TWhmnOoeIWe2olSbuDeoOY+MfJrW8dzS3Hw==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.6.tgz", + "integrity": "sha512-8acuseWJI+RRH99JDWM/n7IJRuuGNa4YzLXB/leqE/ZByHyIiVWGADjJi/vfnJnmdM5fQnezJ6SRTF6feI5rSQ==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8" + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3" }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.0.0" + "zod": "^3.25.76 || ^4" } }, "node_modules/@ai-sdk/openai": { - "version": "1.3.24", - "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-1.3.24.tgz", - "integrity": "sha512-GYXnGJTHRTZc4gJMSmFRgEQudjqd4PUN0ZjQhPwOAYH1yOAvQoG/Ikqs+HyISRbLPCrhbZnPKCNHuRU4OfpW0Q==", + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.15.tgz", + "integrity": "sha512-/IUyQ9ck4uUTtGojvQamcUWpNWkwpL/P1F6LYRxpQGj07H00oJEBH/VUizrIq0ZvW/vkuK6c6X4UJS9PrdYyxA==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8" + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3" }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.0.0" + "zod": "^3.25.76 || ^4" } }, "node_modules/@ai-sdk/openai-compatible": { - "version": "0.2.16", - "resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-0.2.16.tgz", - "integrity": "sha512-LkvfcM8slJedRyJa/MiMiaOzcMjV1zNDwzTHEGz7aAsgsQV0maLfmJRi/nuSwf5jmp0EouC+JXXDUj2l94HgQw==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-1.0.8.tgz", + "integrity": "sha512-vxJ7tUisGTS4IzxboU3NT6JYsaUqRiGjvugFvx/zW5cT7FaIUprDwQIMM4ZrmH5b9kJ48rOEXqNjpdtkvtXlVA==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8" + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3" }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.0.0" + "zod": "^3.25.76 || ^4" } }, "node_modules/@ai-sdk/provider": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.3.tgz", - "integrity": "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz", + "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==", "license": "Apache-2.0", "dependencies": { "json-schema": "^0.4.0" @@ -202,61 +219,21 @@ } }, "node_modules/@ai-sdk/provider-utils": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.8.tgz", - "integrity": "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.3.tgz", + "integrity": "sha512-kAxIw1nYmFW1g5TvE54ZB3eNtgZna0RnLjPUp1ltz1+t9xkXJIuDT4atrwfau9IbS0BOef38wqrI8CjFfQrxhw==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "nanoid": "^3.3.8", - "secure-json-parse": "^2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.23.8" - } - }, - "node_modules/@ai-sdk/react": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.12.tgz", - "integrity": "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider-utils": "2.2.8", - "@ai-sdk/ui-utils": "1.2.11", - "swr": "^2.2.5", - "throttleit": "2.1.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "react": "^18 || ^19 || ^19.0.0-rc", - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, - "node_modules/@ai-sdk/ui-utils": { - "version": "1.2.11", - "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.11.tgz", - "integrity": "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8", + "@ai-sdk/provider": "2.0.0", + "@standard-schema/spec": "^1.0.0", + "eventsource-parser": "^3.0.3", "zod-to-json-schema": "^3.24.1" }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.23.8" + "zod": "^3.25.76 || ^4" } }, "node_modules/@ampproject/remapping": { @@ -4317,54 +4294,16 @@ } }, "node_modules/@openrouter/ai-sdk-provider": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-0.4.6.tgz", - "integrity": "sha512-oUa8xtssyUhiKEU/aW662lsZ0HUvIUTRk8vVIF3Ha3KI/DnqX54zmVIuzYnaDpermqhy18CHqblAY4dDt1JW3g==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.1.2.tgz", + "integrity": "sha512-cfiKVpNygGFaJojBHFvtTf7UiF458Xh9yPcTg4FXF7bGYN5V33Rxx9dXNE12fjv6lHeC5C7jwQHDrzUIFol1iQ==", "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.9", - "@ai-sdk/provider-utils": "2.1.10" - }, "engines": { "node": ">=18" }, "peerDependencies": { - "zod": "^3.0.0" - } - }, - "node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.9.tgz", - "integrity": "sha512-jie6ZJT2ZR0uVOVCDc9R2xCX5I/Dum/wEK28lx21PJx6ZnFAN9EzD2WsPhcDWfCgGx3OAZZ0GyM3CEobXpa9LA==", - "license": "Apache-2.0", - "dependencies": { - "json-schema": "^0.4.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider-utils": { - "version": "2.1.10", - "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.1.10.tgz", - "integrity": "sha512-4GZ8GHjOFxePFzkl3q42AU0DQOtTQ5w09vmaWUf/pKFXJPizlnzKSUkF0f+VkapIUfDugyMqPMT1ge8XQzVI7Q==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "1.0.9", - "eventsource-parser": "^3.0.0", - "nanoid": "^3.3.8", - "secure-json-parse": "^2.7.0" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } + "ai": "^5.0.0", + "zod": "^3.24.1 || ^v4" } }, "node_modules/@opentelemetry/api": { @@ -5997,6 +5936,12 @@ "url": "https://github.com/sindresorhus/is?sponsor=1" } }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "license": "MIT" + }, "node_modules/@swc/helpers": { "version": "0.5.15", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", @@ -6630,12 +6575,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/diff-match-patch": { - "version": "1.0.36", - "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", - "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==", - "license": "MIT" - }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -7402,29 +7341,21 @@ } }, "node_modules/ai": { - "version": "4.3.19", - "resolved": "https://registry.npmjs.org/ai/-/ai-4.3.19.tgz", - "integrity": "sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==", + "version": "5.0.15", + "resolved": "https://registry.npmjs.org/ai/-/ai-5.0.15.tgz", + "integrity": "sha512-EX5hF+NVFm6R11mvdZRbg6eJEjyMlniI4/xOnyTh4VtDQ457lhIgi3kDGrHW3/qw9ELon9m2e7AK3g5z5sLwsQ==", "license": "Apache-2.0", "dependencies": { - "@ai-sdk/provider": "1.1.3", - "@ai-sdk/provider-utils": "2.2.8", - "@ai-sdk/react": "1.2.12", - "@ai-sdk/ui-utils": "1.2.11", - "@opentelemetry/api": "1.9.0", - "jsondiffpatch": "0.6.0" + "@ai-sdk/gateway": "1.0.7", + "@ai-sdk/provider": "2.0.0", + "@ai-sdk/provider-utils": "3.0.3", + "@opentelemetry/api": "1.9.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "react": "^18 || ^19 || ^19.0.0-rc", - "zod": "^3.23.8" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - } + "zod": "^3.25.76 || ^4" } }, "node_modules/ajv": { @@ -8956,12 +8887,6 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/diff-match-patch": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", - "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==", - "license": "Apache-2.0" - }, "node_modules/diff-sequences": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", @@ -13426,35 +13351,6 @@ "node": ">=6" } }, - "node_modules/jsondiffpatch": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz", - "integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==", - "license": "MIT", - "dependencies": { - "@types/diff-match-patch": "^1.0.36", - "chalk": "^5.3.0", - "diff-match-patch": "^1.0.5" - }, - "bin": { - "jsondiffpatch": "bin/jsondiffpatch.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - } - }, - "node_modules/jsondiffpatch/node_modules/chalk": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.5.0.tgz", - "integrity": "sha512-1tm8DTaJhPBG3bIkVeZt1iZM9GfSX2lzOeDVZH9R9ffRHpmHvxZ/QhgQH/aDTkswQVt+YHdXAdS/In/30OjCbg==", - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, "node_modules/jsonfile": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", @@ -16041,28 +15937,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/ollama-ai-provider": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/ollama-ai-provider/-/ollama-ai-provider-1.2.0.tgz", - "integrity": "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww==", - "license": "Apache-2.0", - "dependencies": { - "@ai-sdk/provider": "^1.0.0", - "@ai-sdk/provider-utils": "^2.0.0", - "partial-json": "0.1.7" - }, - "engines": { - "node": ">=18" - }, - "peerDependencies": { - "zod": "^3.0.0" - }, - "peerDependenciesMeta": { - "zod": { - "optional": true - } - } - }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -16497,12 +16371,6 @@ "node": ">=0.10.0" } }, - "node_modules/partial-json": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz", - "integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==", - "license": "MIT" - }, "node_modules/path-browserify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", @@ -17972,12 +17840,6 @@ "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", "license": "MIT" }, - "node_modules/secure-json-parse": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", - "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==", - "license": "BSD-3-Clause" - }, "node_modules/semver": { "version": "7.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", @@ -19237,19 +19099,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/swr": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/swr/-/swr-2.3.6.tgz", - "integrity": "sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==", - "license": "MIT", - "dependencies": { - "dequal": "^2.0.3", - "use-sync-external-store": "^1.4.0" - }, - "peerDependencies": { - "react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/tabbable": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", @@ -19424,18 +19273,6 @@ "dev": true, "license": "MIT" }, - "node_modules/throttleit": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz", - "integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/tiny-each-async": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz", diff --git a/package.json b/package.json index b1e7295..178ea1a 100644 --- a/package.json +++ b/package.json @@ -84,17 +84,18 @@ "vitest": "^3.1.1" }, "dependencies": { - "@ai-sdk/anthropic": "^1.2.8", - "@ai-sdk/google": "^1.2.19", - "@ai-sdk/openai": "^1.3.24", - "@ai-sdk/openai-compatible": "^0.2.13", + "@ai-sdk/anthropic": "^2.0.4", + "@ai-sdk/google": "^2.0.6", + "@ai-sdk/openai": "^2.0.15", + "@ai-sdk/openai-compatible": "^1.0.8", + "@ai-sdk/provider-utils": "^3.0.3", "@biomejs/biome": "^1.9.4", "@dyad-sh/supabase-management-js": "v1.0.0", "@lexical/react": "^0.33.1", "@monaco-editor/react": "^4.7.0-rc.0", "@neondatabase/api-client": "^2.1.0", "@neondatabase/serverless": "^1.0.1", - "@openrouter/ai-sdk-provider": "^0.4.5", + "@openrouter/ai-sdk-provider": "^1.1.2", "@radix-ui/react-accordion": "^1.2.4", "@radix-ui/react-alert-dialog": "^1.1.13", "@radix-ui/react-checkbox": "^1.3.2", @@ -118,7 +119,7 @@ "@types/uuid": "^10.0.0", "@vercel/sdk": "^1.10.0", "@vitejs/plugin-react": "^4.3.4", - "ai": "^4.3.4", + "ai": "^5.0.15", "better-sqlite3": "^11.9.1", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -140,7 +141,6 @@ "lexical-beautiful-mentions": "^0.1.47", "lucide-react": "^0.487.0", "monaco-editor": "^0.52.2", - "ollama-ai-provider": "^1.2.0", "openai": "^4.91.1", "posthog-js": "^1.236.3", "react": "^19.0.0", @@ -158,7 +158,8 @@ "tree-kill": "^1.2.2", "tw-animate-css": "^1.2.5", "update-electron-app": "^3.1.1", - "uuid": "^11.1.0" + "uuid": "^11.1.0", + "zod": "^3.25.76" }, "lint-staged": { "**/*.{js,mjs,cjs,jsx,ts,mts,cts,tsx,vue,astro,svelte}": "oxlint", diff --git a/src/ipc/handlers/chat_stream_handlers.ts b/src/ipc/handlers/chat_stream_handlers.ts index c737a6d..25ce038 100644 --- a/src/ipc/handlers/chat_stream_handlers.ts +++ b/src/ipc/handlers/chat_stream_handlers.ts @@ -1,7 +1,7 @@ import { v4 as uuidv4 } from "uuid"; import { ipcMain } from "electron"; import { - CoreMessage, + ModelMessage, TextPart, ImagePart, streamText, @@ -134,14 +134,14 @@ async function processStreamChunks({ chunk = ""; inThinkingBlock = false; } - chunk += part.textDelta; - } else if (part.type === "reasoning") { + chunk += part.text; + } else if (part.type === "reasoning-delta") { if (!inThinkingBlock) { chunk = ""; inThinkingBlock = true; } - chunk += escapeDyadTags(part.textDelta); + chunk += escapeDyadTags(part.text); } if (!chunk) { @@ -603,7 +603,7 @@ This conversation includes one or more image attachments. When the user uploads ] as const) : []; - let chatMessages: CoreMessage[] = [ + let chatMessages: ModelMessage[] = [ ...codebasePrefix, ...otherCodebasePrefix, ...limitedMessageHistory.map((msg) => ({ @@ -647,7 +647,7 @@ This conversation includes one or more image attachments. When the user uploads content: "Summarize the following chat: " + formatMessagesForSummary(previousChat?.messages ?? []), - } satisfies CoreMessage, + } satisfies ModelMessage, ]; } @@ -655,7 +655,7 @@ This conversation includes one or more image attachments. When the user uploads chatMessages, modelClient, }: { - chatMessages: CoreMessage[]; + chatMessages: ModelMessage[]; modelClient: ModelClient; }) => { const dyadRequestId = uuidv4(); @@ -668,7 +668,7 @@ This conversation includes one or more image attachments. When the user uploads logger.log("sending AI request"); } return streamText({ - maxTokens: await getMaxTokens(settings.selectedModel), + maxOutputTokens: await getMaxTokens(settings.selectedModel), temperature: await getTemperature(settings.selectedModel), maxRetries: 2, model: modelClient.model, @@ -798,7 +798,7 @@ This conversation includes one or more image attachments. When the user uploads break; } if (part.type !== "text-delta") continue; // ignore reasoning for continuation - fullResponse += part.textDelta; + fullResponse += part.text; fullResponse = cleanFullResponse(fullResponse); fullResponse = await processResponseChunkUpdate({ fullResponse, @@ -825,7 +825,7 @@ This conversation includes one or more image attachments. When the user uploads let autoFixAttempts = 0; const originalFullResponse = fullResponse; - const previousAttempts: CoreMessage[] = []; + const previousAttempts: ModelMessage[] = []; while ( problemReport.problems.length > 0 && autoFixAttempts < 2 && @@ -1161,9 +1161,9 @@ async function replaceTextAttachmentWithContent( // Helper function to convert traditional message to one with proper image attachments async function prepareMessageWithAttachments( - message: CoreMessage, + message: ModelMessage, attachmentPaths: string[], -): Promise { +): Promise { let textContent = message.content; // Get the original text content if (typeof textContent !== "string") { diff --git a/src/ipc/handlers/local_model_ollama_handler.ts b/src/ipc/handlers/local_model_ollama_handler.ts index e1ed804..a05e1d9 100644 --- a/src/ipc/handlers/local_model_ollama_handler.ts +++ b/src/ipc/handlers/local_model_ollama_handler.ts @@ -37,7 +37,9 @@ export function parseOllamaHost(host?: string): string { return `http://${host}:11434`; } -const OLLAMA_API_URL = parseOllamaHost(process.env.OLLAMA_HOST); +export function getOllamaApiUrl(): string { + return parseOllamaHost(process.env.OLLAMA_HOST); +} interface OllamaModel { name: string; @@ -55,7 +57,7 @@ interface OllamaModel { export async function fetchOllamaModels(): Promise { try { - const response = await fetch(`${OLLAMA_API_URL}/api/tags`); + const response = await fetch(`${getOllamaApiUrl()}/api/tags`); if (!response.ok) { throw new Error(`Failed to fetch model: ${response.statusText}`); } diff --git a/src/ipc/utils/get_model_client.ts b/src/ipc/utils/get_model_client.ts index a696828..8f46868 100644 --- a/src/ipc/utils/get_model_client.ts +++ b/src/ipc/utils/get_model_client.ts @@ -1,9 +1,7 @@ -import { LanguageModelV1 } from "ai"; import { createOpenAI } from "@ai-sdk/openai"; import { createGoogleGenerativeAI as createGoogle } from "@ai-sdk/google"; import { createAnthropic } from "@ai-sdk/anthropic"; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; -import { createOllama } from "ollama-ai-provider"; import { createOpenAICompatible } from "@ai-sdk/openai-compatible"; import type { LargeLanguageModel, UserSettings } from "../../lib/schemas"; import { getEnvVar } from "./read_env"; @@ -13,6 +11,9 @@ import { LanguageModelProvider } from "../ipc_types"; import { createDyadEngine } from "./llm_engine_provider"; import { LM_STUDIO_BASE_URL } from "./lm_studio_utils"; +import { LanguageModel } from "ai"; +import { createOllamaProvider } from "./ollama_provider"; +import { getOllamaApiUrl } from "../handlers/local_model_ollama_handler"; const dyadEngineUrl = process.env.DYAD_ENGINE_URL; const dyadGatewayUrl = process.env.DYAD_GATEWAY_URL; @@ -33,7 +34,7 @@ const AUTO_MODELS = [ ]; export interface ModelClient { - model: LanguageModelV1; + model: LanguageModel; builtinProviderId?: string; } @@ -168,7 +169,10 @@ function getRegularModelClient( model: LargeLanguageModel, settings: UserSettings, providerConfig: LanguageModelProvider, -) { +): { + modelClient: ModelClient; + backupModelClients: ModelClient[]; +} { // Get API key for the specific provider const apiKey = settings.providerSettings?.[model.provider]?.apiKey?.value || @@ -220,13 +224,11 @@ function getRegularModelClient( }; } case "ollama": { - // Ollama typically runs locally and doesn't require an API key in the same way - const provider = createOllama({ - baseURL: process.env.OLLAMA_HOST, - }); + const provider = createOllamaProvider({ baseURL: getOllamaApiUrl() }); return { modelClient: { model: provider(model.name), + builtinProviderId: providerId, }, backupModelClients: [], }; diff --git a/src/ipc/utils/llm_engine_provider.ts b/src/ipc/utils/llm_engine_provider.ts index 8b62cc2..d2ab3c0 100644 --- a/src/ipc/utils/llm_engine_provider.ts +++ b/src/ipc/utils/llm_engine_provider.ts @@ -1,7 +1,4 @@ -import { - LanguageModelV1, - LanguageModelV1ObjectGenerationMode, -} from "@ai-sdk/provider"; +import { LanguageModel } from "ai"; import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible"; import { FetchFunction, @@ -9,7 +6,6 @@ import { withoutTrailingSlash, } from "@ai-sdk/provider-utils"; -import { OpenAICompatibleChatSettings } from "@ai-sdk/openai-compatible"; import log from "electron-log"; import { getExtraProviderOptions } from "./thinking_utils"; import type { UserSettings } from "../../lib/schemas"; @@ -18,7 +14,7 @@ const logger = log.scope("llm_engine_provider"); export type ExampleChatModelId = string & {}; -export interface ExampleChatSettings extends OpenAICompatibleChatSettings { +export interface ExampleChatSettings { files?: { path: string; content: string }[]; } export interface ExampleProviderSettings { @@ -56,10 +52,7 @@ export interface DyadEngineProvider { /** Creates a model for text generation. */ - ( - modelId: ExampleChatModelId, - settings?: ExampleChatSettings, - ): LanguageModelV1; + (modelId: ExampleChatModelId, settings?: ExampleChatSettings): LanguageModel; /** Creates a chat model for text generation. @@ -67,7 +60,7 @@ Creates a chat model for text generation. chatModel( modelId: ExampleChatModelId, settings?: ExampleChatSettings, - ): LanguageModelV1; + ): LanguageModel; } export function createDyadEngine( @@ -113,13 +106,13 @@ export function createDyadEngine( settings: ExampleChatSettings = {}, ) => { // Extract files from settings to process them appropriately - const { files, ...restSettings } = settings; + const { files } = settings; // Create configuration with file handling const config = { ...getCommonModelConfig(), - defaultObjectGenerationMode: - "tool" as LanguageModelV1ObjectGenerationMode, + // defaultObjectGenerationMode: + // "tool" as LanguageModelV1ObjectGenerationMode, // Custom fetch implementation that adds files to the request fetch: (input: RequestInfo | URL, init?: RequestInit) => { // Use default fetch if no init or body @@ -181,7 +174,7 @@ export function createDyadEngine( }, }; - return new OpenAICompatibleChatLanguageModel(modelId, restSettings, config); + return new OpenAICompatibleChatLanguageModel(modelId, config); }; const provider = ( diff --git a/src/ipc/utils/ollama_provider.ts b/src/ipc/utils/ollama_provider.ts new file mode 100644 index 0000000..6ec2f06 --- /dev/null +++ b/src/ipc/utils/ollama_provider.ts @@ -0,0 +1,39 @@ +import { LanguageModel } from "ai"; +import { createOpenAICompatible } from "@ai-sdk/openai-compatible"; +import type { FetchFunction } from "@ai-sdk/provider-utils"; +import { withoutTrailingSlash } from "@ai-sdk/provider-utils"; +import type {} from "@ai-sdk/provider"; + +type OllamaChatModelId = string; + +export interface OllamaProviderOptions { + /** + * Base URL for the Ollama API. For real Ollama, use e.g. http://localhost:11434/api + * The provider will POST to `${baseURL}/chat`. + * If undefined, defaults to http://localhost:11434/api + */ + baseURL?: string; + headers?: Record; + fetch?: FetchFunction; +} + +export interface OllamaChatSettings {} + +export interface OllamaProvider { + (modelId: OllamaChatModelId, settings?: OllamaChatSettings): LanguageModel; +} + +export function createOllamaProvider( + options?: OllamaProviderOptions, +): OllamaProvider { + const base = withoutTrailingSlash( + options?.baseURL ?? "http://localhost:11434", + )!; + const v1Base = (base.endsWith("/v1") ? base : `${base}/v1`) as string; + const provider = createOpenAICompatible({ + name: "ollama", + baseURL: v1Base, + headers: options?.headers, + }); + return (modelId: OllamaChatModelId) => provider(modelId); +} diff --git a/testing/fake-llm-server/index.ts b/testing/fake-llm-server/index.ts index 4404f4a..bc36acb 100644 --- a/testing/fake-llm-server/index.ts +++ b/testing/fake-llm-server/index.ts @@ -94,51 +94,6 @@ app.get("/ollama/api/tags", (req, res) => { res.json(ollamaModels); }); -app.post("/ollama/chat", (req, res) => { - // Tell the client we're going to stream NDJSON - res.setHeader("Content-Type", "application/x-ndjson"); - res.setHeader("Cache-Control", "no-cache"); - - // Chunk #1 – partial answer - const firstChunk = { - model: "llama3.2", - created_at: "2023-08-04T08:52:19.385406455-07:00", - message: { - role: "assistant", - content: "ollamachunk", - images: null, - }, - done: false, - }; - - // Chunk #2 – final answer + metrics - const secondChunk = { - model: "llama3.2", - created_at: "2023-08-04T19:22:45.499127Z", - message: { - role: "assistant", - content: "", - }, - done: true, - total_duration: 4883583458, - load_duration: 1334875, - prompt_eval_count: 26, - prompt_eval_duration: 342546000, - eval_count: 282, - eval_duration: 4535599000, - }; - - // Send the first object right away - res.write(JSON.stringify(firstChunk) + "\n"); - res.write(JSON.stringify(firstChunk) + "\n"); - - // …and the second one a moment later to mimic streaming - setTimeout(() => { - res.write(JSON.stringify(secondChunk) + "\n"); - res.end(); // Close the HTTP stream - }, 300); // 300 ms delay – tweak as you like -}); - // LM Studio specific endpoints app.get("/lmstudio/api/v0/models", (req, res) => { const lmStudioModels = { @@ -182,7 +137,7 @@ app.get("/lmstudio/api/v0/models", (req, res) => { res.json(lmStudioModels); }); -["lmstudio", "gateway", "engine"].forEach((provider) => { +["lmstudio", "gateway", "engine", "ollama"].forEach((provider) => { app.post( `/${provider}/v1/chat/completions`, createChatCompletionHandler(provider),