Upgrade to AI sdk with codemod (#1000)

This commit is contained in:
Will Chen
2025-08-18 22:21:27 -07:00
committed by GitHub
parent 573642ae5f
commit d535db6251
9 changed files with 168 additions and 335 deletions

View File

@@ -1,4 +1,8 @@
- paragraph: hi - paragraph: hi
- paragraph: ollamachunkollamachunk - img
- text: file1.txt
- img
- text: file1.txt
- paragraph: More EOM
- button "Retry": - button "Retry":
- img - img

323
package-lock.json generated
View File

@@ -1,25 +1,26 @@
{ {
"name": "dyad", "name": "dyad",
"version": "0.17.0-beta.2", "version": "0.18.0-beta.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "dyad", "name": "dyad",
"version": "0.17.0-beta.2", "version": "0.18.0-beta.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@ai-sdk/anthropic": "^1.2.8", "@ai-sdk/anthropic": "^2.0.4",
"@ai-sdk/google": "^1.2.19", "@ai-sdk/google": "^2.0.6",
"@ai-sdk/openai": "^1.3.24", "@ai-sdk/openai": "^2.0.15",
"@ai-sdk/openai-compatible": "^0.2.13", "@ai-sdk/openai-compatible": "^1.0.8",
"@ai-sdk/provider-utils": "^3.0.3",
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@dyad-sh/supabase-management-js": "v1.0.0", "@dyad-sh/supabase-management-js": "v1.0.0",
"@lexical/react": "^0.33.1", "@lexical/react": "^0.33.1",
"@monaco-editor/react": "^4.7.0-rc.0", "@monaco-editor/react": "^4.7.0-rc.0",
"@neondatabase/api-client": "^2.1.0", "@neondatabase/api-client": "^2.1.0",
"@neondatabase/serverless": "^1.0.1", "@neondatabase/serverless": "^1.0.1",
"@openrouter/ai-sdk-provider": "^0.4.5", "@openrouter/ai-sdk-provider": "^1.1.2",
"@radix-ui/react-accordion": "^1.2.4", "@radix-ui/react-accordion": "^1.2.4",
"@radix-ui/react-alert-dialog": "^1.1.13", "@radix-ui/react-alert-dialog": "^1.1.13",
"@radix-ui/react-checkbox": "^1.3.2", "@radix-ui/react-checkbox": "^1.3.2",
@@ -43,7 +44,7 @@
"@types/uuid": "^10.0.0", "@types/uuid": "^10.0.0",
"@vercel/sdk": "^1.10.0", "@vercel/sdk": "^1.10.0",
"@vitejs/plugin-react": "^4.3.4", "@vitejs/plugin-react": "^4.3.4",
"ai": "^4.3.4", "ai": "^5.0.15",
"better-sqlite3": "^11.9.1", "better-sqlite3": "^11.9.1",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
@@ -65,7 +66,6 @@
"lexical-beautiful-mentions": "^0.1.47", "lexical-beautiful-mentions": "^0.1.47",
"lucide-react": "^0.487.0", "lucide-react": "^0.487.0",
"monaco-editor": "^0.52.2", "monaco-editor": "^0.52.2",
"ollama-ai-provider": "^1.2.0",
"openai": "^4.91.1", "openai": "^4.91.1",
"posthog-js": "^1.236.3", "posthog-js": "^1.236.3",
"react": "^19.0.0", "react": "^19.0.0",
@@ -83,7 +83,8 @@
"tree-kill": "^1.2.2", "tree-kill": "^1.2.2",
"tw-animate-css": "^1.2.5", "tw-animate-css": "^1.2.5",
"update-electron-app": "^3.1.1", "update-electron-app": "^3.1.1",
"uuid": "^11.1.0" "uuid": "^11.1.0",
"zod": "^3.25.76"
}, },
"devDependencies": { "devDependencies": {
"@electron-forge/cli": "^7.8.0", "@electron-forge/cli": "^7.8.0",
@@ -126,73 +127,89 @@
} }
}, },
"node_modules/@ai-sdk/anthropic": { "node_modules/@ai-sdk/anthropic": {
"version": "1.2.12", "version": "2.0.4",
"resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-1.2.12.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-2.0.4.tgz",
"integrity": "sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ==", "integrity": "sha512-ii2bZEUPwBitUiK1dpX+HsOarcDGY71G9TVdSJqbfXSVqa+speJNZ8PA/bjuNMml0NyX8VxNsaMg3SwBUCZspA==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "2.2.8" "@ai-sdk/provider-utils": "3.0.3"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.0.0" "zod": "^3.25.76 || ^4"
}
},
"node_modules/@ai-sdk/gateway": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@ai-sdk/gateway/-/gateway-1.0.7.tgz",
"integrity": "sha512-Athrq7OARuNc0iHZJP6InhSQ53tImCc990vMWyR1UHaZgPZJbXjKhIMiOj54F0I0Nlemx48V4fHYUTfLkJotnQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "3.0.3"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ai-sdk/google": { "node_modules/@ai-sdk/google": {
"version": "1.2.22", "version": "2.0.6",
"resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-1.2.22.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-2.0.6.tgz",
"integrity": "sha512-Ppxu3DIieF1G9pyQ5O1Z646GYR0gkC57YdBqXJ82qvCdhEhZHu0TWhmnOoeIWe2olSbuDeoOY+MfJrW8dzS3Hw==", "integrity": "sha512-8acuseWJI+RRH99JDWM/n7IJRuuGNa4YzLXB/leqE/ZByHyIiVWGADjJi/vfnJnmdM5fQnezJ6SRTF6feI5rSQ==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "2.2.8" "@ai-sdk/provider-utils": "3.0.3"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.0.0" "zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ai-sdk/openai": { "node_modules/@ai-sdk/openai": {
"version": "1.3.24", "version": "2.0.15",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-1.3.24.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-2.0.15.tgz",
"integrity": "sha512-GYXnGJTHRTZc4gJMSmFRgEQudjqd4PUN0ZjQhPwOAYH1yOAvQoG/Ikqs+HyISRbLPCrhbZnPKCNHuRU4OfpW0Q==", "integrity": "sha512-/IUyQ9ck4uUTtGojvQamcUWpNWkwpL/P1F6LYRxpQGj07H00oJEBH/VUizrIq0ZvW/vkuK6c6X4UJS9PrdYyxA==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "2.2.8" "@ai-sdk/provider-utils": "3.0.3"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.0.0" "zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ai-sdk/openai-compatible": { "node_modules/@ai-sdk/openai-compatible": {
"version": "0.2.16", "version": "1.0.8",
"resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-0.2.16.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/openai-compatible/-/openai-compatible-1.0.8.tgz",
"integrity": "sha512-LkvfcM8slJedRyJa/MiMiaOzcMjV1zNDwzTHEGz7aAsgsQV0maLfmJRi/nuSwf5jmp0EouC+JXXDUj2l94HgQw==", "integrity": "sha512-vxJ7tUisGTS4IzxboU3NT6JYsaUqRiGjvugFvx/zW5cT7FaIUprDwQIMM4ZrmH5b9kJ48rOEXqNjpdtkvtXlVA==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/provider-utils": "2.2.8" "@ai-sdk/provider-utils": "3.0.3"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.0.0" "zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ai-sdk/provider": { "node_modules/@ai-sdk/provider": {
"version": "1.1.3", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.3.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-2.0.0.tgz",
"integrity": "sha512-qZMxYJ0qqX/RfnuIaab+zp8UAeJn/ygXXAffR5I4N0n1IrvA6qBsjc8hXLmBiMV2zoXlifkacF7sEFnYnjBcqg==", "integrity": "sha512-6o7Y2SeO9vFKB8lArHXehNuusnpddKPk7xqL7T2/b+OvXMRIXUO1rR4wcv1hAFUAT9avGZshty3Wlua/XA7TvA==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"json-schema": "^0.4.0" "json-schema": "^0.4.0"
@@ -202,61 +219,21 @@
} }
}, },
"node_modules/@ai-sdk/provider-utils": { "node_modules/@ai-sdk/provider-utils": {
"version": "2.2.8", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.8.tgz", "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-3.0.3.tgz",
"integrity": "sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==", "integrity": "sha512-kAxIw1nYmFW1g5TvE54ZB3eNtgZna0RnLjPUp1ltz1+t9xkXJIuDT4atrwfau9IbS0BOef38wqrI8CjFfQrxhw==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/provider": "2.0.0",
"nanoid": "^3.3.8", "@standard-schema/spec": "^1.0.0",
"secure-json-parse": "^2.7.0" "eventsource-parser": "^3.0.3",
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.23.8"
}
},
"node_modules/@ai-sdk/react": {
"version": "1.2.12",
"resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.12.tgz",
"integrity": "sha512-jK1IZZ22evPZoQW3vlkZ7wvjYGYF+tRBKXtrcolduIkQ/m/sOAVcVeVDUDvh1T91xCnWCdUGCPZg2avZ90mv3g==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider-utils": "2.2.8",
"@ai-sdk/ui-utils": "1.2.11",
"swr": "^2.2.5",
"throttleit": "2.1.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"react": "^18 || ^19 || ^19.0.0-rc",
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
}
},
"node_modules/@ai-sdk/ui-utils": {
"version": "1.2.11",
"resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.11.tgz",
"integrity": "sha512-3zcwCc8ezzFlwp3ZD15wAPjf2Au4s3vAbKsXQVyhxODHcmu0iyPO2Eua6D/vicq/AUm/BAo60r97O6HU+EI0+w==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.1.3",
"@ai-sdk/provider-utils": "2.2.8",
"zod-to-json-schema": "^3.24.1" "zod-to-json-schema": "^3.24.1"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.23.8" "zod": "^3.25.76 || ^4"
} }
}, },
"node_modules/@ampproject/remapping": { "node_modules/@ampproject/remapping": {
@@ -4317,54 +4294,16 @@
} }
}, },
"node_modules/@openrouter/ai-sdk-provider": { "node_modules/@openrouter/ai-sdk-provider": {
"version": "0.4.6", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-0.4.6.tgz", "resolved": "https://registry.npmjs.org/@openrouter/ai-sdk-provider/-/ai-sdk-provider-1.1.2.tgz",
"integrity": "sha512-oUa8xtssyUhiKEU/aW662lsZ0HUvIUTRk8vVIF3Ha3KI/DnqX54zmVIuzYnaDpermqhy18CHqblAY4dDt1JW3g==", "integrity": "sha512-cfiKVpNygGFaJojBHFvtTf7UiF458Xh9yPcTg4FXF7bGYN5V33Rxx9dXNE12fjv6lHeC5C7jwQHDrzUIFol1iQ==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.0.9",
"@ai-sdk/provider-utils": "2.1.10"
},
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"zod": "^3.0.0" "ai": "^5.0.0",
} "zod": "^3.24.1 || ^v4"
},
"node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.0.9.tgz",
"integrity": "sha512-jie6ZJT2ZR0uVOVCDc9R2xCX5I/Dum/wEK28lx21PJx6ZnFAN9EzD2WsPhcDWfCgGx3OAZZ0GyM3CEobXpa9LA==",
"license": "Apache-2.0",
"dependencies": {
"json-schema": "^0.4.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@openrouter/ai-sdk-provider/node_modules/@ai-sdk/provider-utils": {
"version": "2.1.10",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.1.10.tgz",
"integrity": "sha512-4GZ8GHjOFxePFzkl3q42AU0DQOtTQ5w09vmaWUf/pKFXJPizlnzKSUkF0f+VkapIUfDugyMqPMT1ge8XQzVI7Q==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.0.9",
"eventsource-parser": "^3.0.0",
"nanoid": "^3.3.8",
"secure-json-parse": "^2.7.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.0.0"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
} }
}, },
"node_modules/@opentelemetry/api": { "node_modules/@opentelemetry/api": {
@@ -5997,6 +5936,12 @@
"url": "https://github.com/sindresorhus/is?sponsor=1" "url": "https://github.com/sindresorhus/is?sponsor=1"
} }
}, },
"node_modules/@standard-schema/spec": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz",
"integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==",
"license": "MIT"
},
"node_modules/@swc/helpers": { "node_modules/@swc/helpers": {
"version": "0.5.15", "version": "0.5.15",
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz",
@@ -6630,12 +6575,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/diff-match-patch": {
"version": "1.0.36",
"resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz",
"integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==",
"license": "MIT"
},
"node_modules/@types/estree": { "node_modules/@types/estree": {
"version": "1.0.8", "version": "1.0.8",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
@@ -7402,29 +7341,21 @@
} }
}, },
"node_modules/ai": { "node_modules/ai": {
"version": "4.3.19", "version": "5.0.15",
"resolved": "https://registry.npmjs.org/ai/-/ai-4.3.19.tgz", "resolved": "https://registry.npmjs.org/ai/-/ai-5.0.15.tgz",
"integrity": "sha512-dIE2bfNpqHN3r6IINp9znguYdhIOheKW2LDigAMrgt/upT3B8eBGPSCblENvaZGoq+hxaN9fSMzjWpbqloP+7Q==", "integrity": "sha512-EX5hF+NVFm6R11mvdZRbg6eJEjyMlniI4/xOnyTh4VtDQ457lhIgi3kDGrHW3/qw9ELon9m2e7AK3g5z5sLwsQ==",
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@ai-sdk/provider": "1.1.3", "@ai-sdk/gateway": "1.0.7",
"@ai-sdk/provider-utils": "2.2.8", "@ai-sdk/provider": "2.0.0",
"@ai-sdk/react": "1.2.12", "@ai-sdk/provider-utils": "3.0.3",
"@ai-sdk/ui-utils": "1.2.11", "@opentelemetry/api": "1.9.0"
"@opentelemetry/api": "1.9.0",
"jsondiffpatch": "0.6.0"
}, },
"engines": { "engines": {
"node": ">=18" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"react": "^18 || ^19 || ^19.0.0-rc", "zod": "^3.25.76 || ^4"
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"react": {
"optional": true
}
} }
}, },
"node_modules/ajv": { "node_modules/ajv": {
@@ -8956,12 +8887,6 @@
"url": "https://github.com/sponsors/wooorm" "url": "https://github.com/sponsors/wooorm"
} }
}, },
"node_modules/diff-match-patch": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz",
"integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==",
"license": "Apache-2.0"
},
"node_modules/diff-sequences": { "node_modules/diff-sequences": {
"version": "29.6.3", "version": "29.6.3",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz",
@@ -13426,35 +13351,6 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/jsondiffpatch": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz",
"integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==",
"license": "MIT",
"dependencies": {
"@types/diff-match-patch": "^1.0.36",
"chalk": "^5.3.0",
"diff-match-patch": "^1.0.5"
},
"bin": {
"jsondiffpatch": "bin/jsondiffpatch.js"
},
"engines": {
"node": "^18.0.0 || >=20.0.0"
}
},
"node_modules/jsondiffpatch/node_modules/chalk": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.5.0.tgz",
"integrity": "sha512-1tm8DTaJhPBG3bIkVeZt1iZM9GfSX2lzOeDVZH9R9ffRHpmHvxZ/QhgQH/aDTkswQVt+YHdXAdS/In/30OjCbg==",
"license": "MIT",
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/jsonfile": { "node_modules/jsonfile": {
"version": "6.2.0", "version": "6.2.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz",
@@ -16041,28 +15937,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/ollama-ai-provider": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/ollama-ai-provider/-/ollama-ai-provider-1.2.0.tgz",
"integrity": "sha512-jTNFruwe3O/ruJeppI/quoOUxG7NA6blG3ZyQj3lei4+NnJo7bi3eIRWqlVpRlu/mbzbFXeJSBuYQWF6pzGKww==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "^1.0.0",
"@ai-sdk/provider-utils": "^2.0.0",
"partial-json": "0.1.7"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.0.0"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
}
},
"node_modules/once": { "node_modules/once": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -16497,12 +16371,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/partial-json": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz",
"integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==",
"license": "MIT"
},
"node_modules/path-browserify": { "node_modules/path-browserify": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz",
@@ -17972,12 +17840,6 @@
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/secure-json-parse": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz",
"integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==",
"license": "BSD-3-Clause"
},
"node_modules/semver": { "node_modules/semver": {
"version": "7.7.2", "version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
@@ -19237,19 +19099,6 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/swr": {
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/swr/-/swr-2.3.6.tgz",
"integrity": "sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==",
"license": "MIT",
"dependencies": {
"dequal": "^2.0.3",
"use-sync-external-store": "^1.4.0"
},
"peerDependencies": {
"react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/tabbable": { "node_modules/tabbable": {
"version": "6.2.0", "version": "6.2.0",
"resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz",
@@ -19424,18 +19273,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/throttleit": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz",
"integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==",
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/tiny-each-async": { "node_modules/tiny-each-async": {
"version": "2.0.3", "version": "2.0.3",
"resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz", "resolved": "https://registry.npmjs.org/tiny-each-async/-/tiny-each-async-2.0.3.tgz",

View File

@@ -84,17 +84,18 @@
"vitest": "^3.1.1" "vitest": "^3.1.1"
}, },
"dependencies": { "dependencies": {
"@ai-sdk/anthropic": "^1.2.8", "@ai-sdk/anthropic": "^2.0.4",
"@ai-sdk/google": "^1.2.19", "@ai-sdk/google": "^2.0.6",
"@ai-sdk/openai": "^1.3.24", "@ai-sdk/openai": "^2.0.15",
"@ai-sdk/openai-compatible": "^0.2.13", "@ai-sdk/openai-compatible": "^1.0.8",
"@ai-sdk/provider-utils": "^3.0.3",
"@biomejs/biome": "^1.9.4", "@biomejs/biome": "^1.9.4",
"@dyad-sh/supabase-management-js": "v1.0.0", "@dyad-sh/supabase-management-js": "v1.0.0",
"@lexical/react": "^0.33.1", "@lexical/react": "^0.33.1",
"@monaco-editor/react": "^4.7.0-rc.0", "@monaco-editor/react": "^4.7.0-rc.0",
"@neondatabase/api-client": "^2.1.0", "@neondatabase/api-client": "^2.1.0",
"@neondatabase/serverless": "^1.0.1", "@neondatabase/serverless": "^1.0.1",
"@openrouter/ai-sdk-provider": "^0.4.5", "@openrouter/ai-sdk-provider": "^1.1.2",
"@radix-ui/react-accordion": "^1.2.4", "@radix-ui/react-accordion": "^1.2.4",
"@radix-ui/react-alert-dialog": "^1.1.13", "@radix-ui/react-alert-dialog": "^1.1.13",
"@radix-ui/react-checkbox": "^1.3.2", "@radix-ui/react-checkbox": "^1.3.2",
@@ -118,7 +119,7 @@
"@types/uuid": "^10.0.0", "@types/uuid": "^10.0.0",
"@vercel/sdk": "^1.10.0", "@vercel/sdk": "^1.10.0",
"@vitejs/plugin-react": "^4.3.4", "@vitejs/plugin-react": "^4.3.4",
"ai": "^4.3.4", "ai": "^5.0.15",
"better-sqlite3": "^11.9.1", "better-sqlite3": "^11.9.1",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
@@ -140,7 +141,6 @@
"lexical-beautiful-mentions": "^0.1.47", "lexical-beautiful-mentions": "^0.1.47",
"lucide-react": "^0.487.0", "lucide-react": "^0.487.0",
"monaco-editor": "^0.52.2", "monaco-editor": "^0.52.2",
"ollama-ai-provider": "^1.2.0",
"openai": "^4.91.1", "openai": "^4.91.1",
"posthog-js": "^1.236.3", "posthog-js": "^1.236.3",
"react": "^19.0.0", "react": "^19.0.0",
@@ -158,7 +158,8 @@
"tree-kill": "^1.2.2", "tree-kill": "^1.2.2",
"tw-animate-css": "^1.2.5", "tw-animate-css": "^1.2.5",
"update-electron-app": "^3.1.1", "update-electron-app": "^3.1.1",
"uuid": "^11.1.0" "uuid": "^11.1.0",
"zod": "^3.25.76"
}, },
"lint-staged": { "lint-staged": {
"**/*.{js,mjs,cjs,jsx,ts,mts,cts,tsx,vue,astro,svelte}": "oxlint", "**/*.{js,mjs,cjs,jsx,ts,mts,cts,tsx,vue,astro,svelte}": "oxlint",

View File

@@ -1,7 +1,7 @@
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import { ipcMain } from "electron"; import { ipcMain } from "electron";
import { import {
CoreMessage, ModelMessage,
TextPart, TextPart,
ImagePart, ImagePart,
streamText, streamText,
@@ -134,14 +134,14 @@ async function processStreamChunks({
chunk = "</think>"; chunk = "</think>";
inThinkingBlock = false; inThinkingBlock = false;
} }
chunk += part.textDelta; chunk += part.text;
} else if (part.type === "reasoning") { } else if (part.type === "reasoning-delta") {
if (!inThinkingBlock) { if (!inThinkingBlock) {
chunk = "<think>"; chunk = "<think>";
inThinkingBlock = true; inThinkingBlock = true;
} }
chunk += escapeDyadTags(part.textDelta); chunk += escapeDyadTags(part.text);
} }
if (!chunk) { if (!chunk) {
@@ -603,7 +603,7 @@ This conversation includes one or more image attachments. When the user uploads
] as const) ] as const)
: []; : [];
let chatMessages: CoreMessage[] = [ let chatMessages: ModelMessage[] = [
...codebasePrefix, ...codebasePrefix,
...otherCodebasePrefix, ...otherCodebasePrefix,
...limitedMessageHistory.map((msg) => ({ ...limitedMessageHistory.map((msg) => ({
@@ -647,7 +647,7 @@ This conversation includes one or more image attachments. When the user uploads
content: content:
"Summarize the following chat: " + "Summarize the following chat: " +
formatMessagesForSummary(previousChat?.messages ?? []), formatMessagesForSummary(previousChat?.messages ?? []),
} satisfies CoreMessage, } satisfies ModelMessage,
]; ];
} }
@@ -655,7 +655,7 @@ This conversation includes one or more image attachments. When the user uploads
chatMessages, chatMessages,
modelClient, modelClient,
}: { }: {
chatMessages: CoreMessage[]; chatMessages: ModelMessage[];
modelClient: ModelClient; modelClient: ModelClient;
}) => { }) => {
const dyadRequestId = uuidv4(); const dyadRequestId = uuidv4();
@@ -668,7 +668,7 @@ This conversation includes one or more image attachments. When the user uploads
logger.log("sending AI request"); logger.log("sending AI request");
} }
return streamText({ return streamText({
maxTokens: await getMaxTokens(settings.selectedModel), maxOutputTokens: await getMaxTokens(settings.selectedModel),
temperature: await getTemperature(settings.selectedModel), temperature: await getTemperature(settings.selectedModel),
maxRetries: 2, maxRetries: 2,
model: modelClient.model, model: modelClient.model,
@@ -798,7 +798,7 @@ This conversation includes one or more image attachments. When the user uploads
break; break;
} }
if (part.type !== "text-delta") continue; // ignore reasoning for continuation if (part.type !== "text-delta") continue; // ignore reasoning for continuation
fullResponse += part.textDelta; fullResponse += part.text;
fullResponse = cleanFullResponse(fullResponse); fullResponse = cleanFullResponse(fullResponse);
fullResponse = await processResponseChunkUpdate({ fullResponse = await processResponseChunkUpdate({
fullResponse, fullResponse,
@@ -825,7 +825,7 @@ This conversation includes one or more image attachments. When the user uploads
let autoFixAttempts = 0; let autoFixAttempts = 0;
const originalFullResponse = fullResponse; const originalFullResponse = fullResponse;
const previousAttempts: CoreMessage[] = []; const previousAttempts: ModelMessage[] = [];
while ( while (
problemReport.problems.length > 0 && problemReport.problems.length > 0 &&
autoFixAttempts < 2 && autoFixAttempts < 2 &&
@@ -1161,9 +1161,9 @@ async function replaceTextAttachmentWithContent(
// Helper function to convert traditional message to one with proper image attachments // Helper function to convert traditional message to one with proper image attachments
async function prepareMessageWithAttachments( async function prepareMessageWithAttachments(
message: CoreMessage, message: ModelMessage,
attachmentPaths: string[], attachmentPaths: string[],
): Promise<CoreMessage> { ): Promise<ModelMessage> {
let textContent = message.content; let textContent = message.content;
// Get the original text content // Get the original text content
if (typeof textContent !== "string") { if (typeof textContent !== "string") {

View File

@@ -37,7 +37,9 @@ export function parseOllamaHost(host?: string): string {
return `http://${host}:11434`; return `http://${host}:11434`;
} }
const OLLAMA_API_URL = parseOllamaHost(process.env.OLLAMA_HOST); export function getOllamaApiUrl(): string {
return parseOllamaHost(process.env.OLLAMA_HOST);
}
interface OllamaModel { interface OllamaModel {
name: string; name: string;
@@ -55,7 +57,7 @@ interface OllamaModel {
export async function fetchOllamaModels(): Promise<LocalModelListResponse> { export async function fetchOllamaModels(): Promise<LocalModelListResponse> {
try { try {
const response = await fetch(`${OLLAMA_API_URL}/api/tags`); const response = await fetch(`${getOllamaApiUrl()}/api/tags`);
if (!response.ok) { if (!response.ok) {
throw new Error(`Failed to fetch model: ${response.statusText}`); throw new Error(`Failed to fetch model: ${response.statusText}`);
} }

View File

@@ -1,9 +1,7 @@
import { LanguageModelV1 } from "ai";
import { createOpenAI } from "@ai-sdk/openai"; import { createOpenAI } from "@ai-sdk/openai";
import { createGoogleGenerativeAI as createGoogle } from "@ai-sdk/google"; import { createGoogleGenerativeAI as createGoogle } from "@ai-sdk/google";
import { createAnthropic } from "@ai-sdk/anthropic"; import { createAnthropic } from "@ai-sdk/anthropic";
import { createOpenRouter } from "@openrouter/ai-sdk-provider"; import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createOllama } from "ollama-ai-provider";
import { createOpenAICompatible } from "@ai-sdk/openai-compatible"; import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
import type { LargeLanguageModel, UserSettings } from "../../lib/schemas"; import type { LargeLanguageModel, UserSettings } from "../../lib/schemas";
import { getEnvVar } from "./read_env"; import { getEnvVar } from "./read_env";
@@ -13,6 +11,9 @@ import { LanguageModelProvider } from "../ipc_types";
import { createDyadEngine } from "./llm_engine_provider"; import { createDyadEngine } from "./llm_engine_provider";
import { LM_STUDIO_BASE_URL } from "./lm_studio_utils"; import { LM_STUDIO_BASE_URL } from "./lm_studio_utils";
import { LanguageModel } from "ai";
import { createOllamaProvider } from "./ollama_provider";
import { getOllamaApiUrl } from "../handlers/local_model_ollama_handler";
const dyadEngineUrl = process.env.DYAD_ENGINE_URL; const dyadEngineUrl = process.env.DYAD_ENGINE_URL;
const dyadGatewayUrl = process.env.DYAD_GATEWAY_URL; const dyadGatewayUrl = process.env.DYAD_GATEWAY_URL;
@@ -33,7 +34,7 @@ const AUTO_MODELS = [
]; ];
export interface ModelClient { export interface ModelClient {
model: LanguageModelV1; model: LanguageModel;
builtinProviderId?: string; builtinProviderId?: string;
} }
@@ -168,7 +169,10 @@ function getRegularModelClient(
model: LargeLanguageModel, model: LargeLanguageModel,
settings: UserSettings, settings: UserSettings,
providerConfig: LanguageModelProvider, providerConfig: LanguageModelProvider,
) { ): {
modelClient: ModelClient;
backupModelClients: ModelClient[];
} {
// Get API key for the specific provider // Get API key for the specific provider
const apiKey = const apiKey =
settings.providerSettings?.[model.provider]?.apiKey?.value || settings.providerSettings?.[model.provider]?.apiKey?.value ||
@@ -220,13 +224,11 @@ function getRegularModelClient(
}; };
} }
case "ollama": { case "ollama": {
// Ollama typically runs locally and doesn't require an API key in the same way const provider = createOllamaProvider({ baseURL: getOllamaApiUrl() });
const provider = createOllama({
baseURL: process.env.OLLAMA_HOST,
});
return { return {
modelClient: { modelClient: {
model: provider(model.name), model: provider(model.name),
builtinProviderId: providerId,
}, },
backupModelClients: [], backupModelClients: [],
}; };

View File

@@ -1,7 +1,4 @@
import { import { LanguageModel } from "ai";
LanguageModelV1,
LanguageModelV1ObjectGenerationMode,
} from "@ai-sdk/provider";
import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible"; import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible";
import { import {
FetchFunction, FetchFunction,
@@ -9,7 +6,6 @@ import {
withoutTrailingSlash, withoutTrailingSlash,
} from "@ai-sdk/provider-utils"; } from "@ai-sdk/provider-utils";
import { OpenAICompatibleChatSettings } from "@ai-sdk/openai-compatible";
import log from "electron-log"; import log from "electron-log";
import { getExtraProviderOptions } from "./thinking_utils"; import { getExtraProviderOptions } from "./thinking_utils";
import type { UserSettings } from "../../lib/schemas"; import type { UserSettings } from "../../lib/schemas";
@@ -18,7 +14,7 @@ const logger = log.scope("llm_engine_provider");
export type ExampleChatModelId = string & {}; export type ExampleChatModelId = string & {};
export interface ExampleChatSettings extends OpenAICompatibleChatSettings { export interface ExampleChatSettings {
files?: { path: string; content: string }[]; files?: { path: string; content: string }[];
} }
export interface ExampleProviderSettings { export interface ExampleProviderSettings {
@@ -56,10 +52,7 @@ export interface DyadEngineProvider {
/** /**
Creates a model for text generation. Creates a model for text generation.
*/ */
( (modelId: ExampleChatModelId, settings?: ExampleChatSettings): LanguageModel;
modelId: ExampleChatModelId,
settings?: ExampleChatSettings,
): LanguageModelV1;
/** /**
Creates a chat model for text generation. Creates a chat model for text generation.
@@ -67,7 +60,7 @@ Creates a chat model for text generation.
chatModel( chatModel(
modelId: ExampleChatModelId, modelId: ExampleChatModelId,
settings?: ExampleChatSettings, settings?: ExampleChatSettings,
): LanguageModelV1; ): LanguageModel;
} }
export function createDyadEngine( export function createDyadEngine(
@@ -113,13 +106,13 @@ export function createDyadEngine(
settings: ExampleChatSettings = {}, settings: ExampleChatSettings = {},
) => { ) => {
// Extract files from settings to process them appropriately // Extract files from settings to process them appropriately
const { files, ...restSettings } = settings; const { files } = settings;
// Create configuration with file handling // Create configuration with file handling
const config = { const config = {
...getCommonModelConfig(), ...getCommonModelConfig(),
defaultObjectGenerationMode: // defaultObjectGenerationMode:
"tool" as LanguageModelV1ObjectGenerationMode, // "tool" as LanguageModelV1ObjectGenerationMode,
// Custom fetch implementation that adds files to the request // Custom fetch implementation that adds files to the request
fetch: (input: RequestInfo | URL, init?: RequestInit) => { fetch: (input: RequestInfo | URL, init?: RequestInit) => {
// Use default fetch if no init or body // Use default fetch if no init or body
@@ -181,7 +174,7 @@ export function createDyadEngine(
}, },
}; };
return new OpenAICompatibleChatLanguageModel(modelId, restSettings, config); return new OpenAICompatibleChatLanguageModel(modelId, config);
}; };
const provider = ( const provider = (

View File

@@ -0,0 +1,39 @@
import { LanguageModel } from "ai";
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
import type { FetchFunction } from "@ai-sdk/provider-utils";
import { withoutTrailingSlash } from "@ai-sdk/provider-utils";
import type {} from "@ai-sdk/provider";
type OllamaChatModelId = string;
export interface OllamaProviderOptions {
/**
* Base URL for the Ollama API. For real Ollama, use e.g. http://localhost:11434/api
* The provider will POST to `${baseURL}/chat`.
* If undefined, defaults to http://localhost:11434/api
*/
baseURL?: string;
headers?: Record<string, string>;
fetch?: FetchFunction;
}
export interface OllamaChatSettings {}
export interface OllamaProvider {
(modelId: OllamaChatModelId, settings?: OllamaChatSettings): LanguageModel;
}
export function createOllamaProvider(
options?: OllamaProviderOptions,
): OllamaProvider {
const base = withoutTrailingSlash(
options?.baseURL ?? "http://localhost:11434",
)!;
const v1Base = (base.endsWith("/v1") ? base : `${base}/v1`) as string;
const provider = createOpenAICompatible({
name: "ollama",
baseURL: v1Base,
headers: options?.headers,
});
return (modelId: OllamaChatModelId) => provider(modelId);
}

View File

@@ -94,51 +94,6 @@ app.get("/ollama/api/tags", (req, res) => {
res.json(ollamaModels); res.json(ollamaModels);
}); });
app.post("/ollama/chat", (req, res) => {
// Tell the client we're going to stream NDJSON
res.setHeader("Content-Type", "application/x-ndjson");
res.setHeader("Cache-Control", "no-cache");
// Chunk #1 partial answer
const firstChunk = {
model: "llama3.2",
created_at: "2023-08-04T08:52:19.385406455-07:00",
message: {
role: "assistant",
content: "ollamachunk",
images: null,
},
done: false,
};
// Chunk #2 final answer + metrics
const secondChunk = {
model: "llama3.2",
created_at: "2023-08-04T19:22:45.499127Z",
message: {
role: "assistant",
content: "",
},
done: true,
total_duration: 4883583458,
load_duration: 1334875,
prompt_eval_count: 26,
prompt_eval_duration: 342546000,
eval_count: 282,
eval_duration: 4535599000,
};
// Send the first object right away
res.write(JSON.stringify(firstChunk) + "\n");
res.write(JSON.stringify(firstChunk) + "\n");
// …and the second one a moment later to mimic streaming
setTimeout(() => {
res.write(JSON.stringify(secondChunk) + "\n");
res.end(); // Close the HTTP stream
}, 300); // 300 ms delay tweak as you like
});
// LM Studio specific endpoints // LM Studio specific endpoints
app.get("/lmstudio/api/v0/models", (req, res) => { app.get("/lmstudio/api/v0/models", (req, res) => {
const lmStudioModels = { const lmStudioModels = {
@@ -182,7 +137,7 @@ app.get("/lmstudio/api/v0/models", (req, res) => {
res.json(lmStudioModels); res.json(lmStudioModels);
}); });
["lmstudio", "gateway", "engine"].forEach((provider) => { ["lmstudio", "gateway", "engine", "ollama"].forEach((provider) => {
app.post( app.post(
`/${provider}/v1/chat/completions`, `/${provider}/v1/chat/completions`,
createChatCompletionHandler(provider), createChatCompletionHandler(provider),