Upgrade to AI sdk with codemod (#1000)
This commit is contained in:
@@ -1,9 +1,7 @@
|
||||
import { LanguageModelV1 } from "ai";
|
||||
import { createOpenAI } from "@ai-sdk/openai";
|
||||
import { createGoogleGenerativeAI as createGoogle } from "@ai-sdk/google";
|
||||
import { createAnthropic } from "@ai-sdk/anthropic";
|
||||
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
|
||||
import { createOllama } from "ollama-ai-provider";
|
||||
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
|
||||
import type { LargeLanguageModel, UserSettings } from "../../lib/schemas";
|
||||
import { getEnvVar } from "./read_env";
|
||||
@@ -13,6 +11,9 @@ import { LanguageModelProvider } from "../ipc_types";
|
||||
import { createDyadEngine } from "./llm_engine_provider";
|
||||
|
||||
import { LM_STUDIO_BASE_URL } from "./lm_studio_utils";
|
||||
import { LanguageModel } from "ai";
|
||||
import { createOllamaProvider } from "./ollama_provider";
|
||||
import { getOllamaApiUrl } from "../handlers/local_model_ollama_handler";
|
||||
|
||||
const dyadEngineUrl = process.env.DYAD_ENGINE_URL;
|
||||
const dyadGatewayUrl = process.env.DYAD_GATEWAY_URL;
|
||||
@@ -33,7 +34,7 @@ const AUTO_MODELS = [
|
||||
];
|
||||
|
||||
export interface ModelClient {
|
||||
model: LanguageModelV1;
|
||||
model: LanguageModel;
|
||||
builtinProviderId?: string;
|
||||
}
|
||||
|
||||
@@ -168,7 +169,10 @@ function getRegularModelClient(
|
||||
model: LargeLanguageModel,
|
||||
settings: UserSettings,
|
||||
providerConfig: LanguageModelProvider,
|
||||
) {
|
||||
): {
|
||||
modelClient: ModelClient;
|
||||
backupModelClients: ModelClient[];
|
||||
} {
|
||||
// Get API key for the specific provider
|
||||
const apiKey =
|
||||
settings.providerSettings?.[model.provider]?.apiKey?.value ||
|
||||
@@ -220,13 +224,11 @@ function getRegularModelClient(
|
||||
};
|
||||
}
|
||||
case "ollama": {
|
||||
// Ollama typically runs locally and doesn't require an API key in the same way
|
||||
const provider = createOllama({
|
||||
baseURL: process.env.OLLAMA_HOST,
|
||||
});
|
||||
const provider = createOllamaProvider({ baseURL: getOllamaApiUrl() });
|
||||
return {
|
||||
modelClient: {
|
||||
model: provider(model.name),
|
||||
builtinProviderId: providerId,
|
||||
},
|
||||
backupModelClients: [],
|
||||
};
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import {
|
||||
LanguageModelV1,
|
||||
LanguageModelV1ObjectGenerationMode,
|
||||
} from "@ai-sdk/provider";
|
||||
import { LanguageModel } from "ai";
|
||||
import { OpenAICompatibleChatLanguageModel } from "@ai-sdk/openai-compatible";
|
||||
import {
|
||||
FetchFunction,
|
||||
@@ -9,7 +6,6 @@ import {
|
||||
withoutTrailingSlash,
|
||||
} from "@ai-sdk/provider-utils";
|
||||
|
||||
import { OpenAICompatibleChatSettings } from "@ai-sdk/openai-compatible";
|
||||
import log from "electron-log";
|
||||
import { getExtraProviderOptions } from "./thinking_utils";
|
||||
import type { UserSettings } from "../../lib/schemas";
|
||||
@@ -18,7 +14,7 @@ const logger = log.scope("llm_engine_provider");
|
||||
|
||||
export type ExampleChatModelId = string & {};
|
||||
|
||||
export interface ExampleChatSettings extends OpenAICompatibleChatSettings {
|
||||
export interface ExampleChatSettings {
|
||||
files?: { path: string; content: string }[];
|
||||
}
|
||||
export interface ExampleProviderSettings {
|
||||
@@ -56,10 +52,7 @@ export interface DyadEngineProvider {
|
||||
/**
|
||||
Creates a model for text generation.
|
||||
*/
|
||||
(
|
||||
modelId: ExampleChatModelId,
|
||||
settings?: ExampleChatSettings,
|
||||
): LanguageModelV1;
|
||||
(modelId: ExampleChatModelId, settings?: ExampleChatSettings): LanguageModel;
|
||||
|
||||
/**
|
||||
Creates a chat model for text generation.
|
||||
@@ -67,7 +60,7 @@ Creates a chat model for text generation.
|
||||
chatModel(
|
||||
modelId: ExampleChatModelId,
|
||||
settings?: ExampleChatSettings,
|
||||
): LanguageModelV1;
|
||||
): LanguageModel;
|
||||
}
|
||||
|
||||
export function createDyadEngine(
|
||||
@@ -113,13 +106,13 @@ export function createDyadEngine(
|
||||
settings: ExampleChatSettings = {},
|
||||
) => {
|
||||
// Extract files from settings to process them appropriately
|
||||
const { files, ...restSettings } = settings;
|
||||
const { files } = settings;
|
||||
|
||||
// Create configuration with file handling
|
||||
const config = {
|
||||
...getCommonModelConfig(),
|
||||
defaultObjectGenerationMode:
|
||||
"tool" as LanguageModelV1ObjectGenerationMode,
|
||||
// defaultObjectGenerationMode:
|
||||
// "tool" as LanguageModelV1ObjectGenerationMode,
|
||||
// Custom fetch implementation that adds files to the request
|
||||
fetch: (input: RequestInfo | URL, init?: RequestInit) => {
|
||||
// Use default fetch if no init or body
|
||||
@@ -181,7 +174,7 @@ export function createDyadEngine(
|
||||
},
|
||||
};
|
||||
|
||||
return new OpenAICompatibleChatLanguageModel(modelId, restSettings, config);
|
||||
return new OpenAICompatibleChatLanguageModel(modelId, config);
|
||||
};
|
||||
|
||||
const provider = (
|
||||
|
||||
39
src/ipc/utils/ollama_provider.ts
Normal file
39
src/ipc/utils/ollama_provider.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { LanguageModel } from "ai";
|
||||
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
|
||||
import type { FetchFunction } from "@ai-sdk/provider-utils";
|
||||
import { withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
||||
import type {} from "@ai-sdk/provider";
|
||||
|
||||
type OllamaChatModelId = string;
|
||||
|
||||
export interface OllamaProviderOptions {
|
||||
/**
|
||||
* Base URL for the Ollama API. For real Ollama, use e.g. http://localhost:11434/api
|
||||
* The provider will POST to `${baseURL}/chat`.
|
||||
* If undefined, defaults to http://localhost:11434/api
|
||||
*/
|
||||
baseURL?: string;
|
||||
headers?: Record<string, string>;
|
||||
fetch?: FetchFunction;
|
||||
}
|
||||
|
||||
export interface OllamaChatSettings {}
|
||||
|
||||
export interface OllamaProvider {
|
||||
(modelId: OllamaChatModelId, settings?: OllamaChatSettings): LanguageModel;
|
||||
}
|
||||
|
||||
export function createOllamaProvider(
|
||||
options?: OllamaProviderOptions,
|
||||
): OllamaProvider {
|
||||
const base = withoutTrailingSlash(
|
||||
options?.baseURL ?? "http://localhost:11434",
|
||||
)!;
|
||||
const v1Base = (base.endsWith("/v1") ? base : `${base}/v1`) as string;
|
||||
const provider = createOpenAICompatible({
|
||||
name: "ollama",
|
||||
baseURL: v1Base,
|
||||
headers: options?.headers,
|
||||
});
|
||||
return (modelId: OllamaChatModelId) => provider(modelId);
|
||||
}
|
||||
Reference in New Issue
Block a user