LM studio e2e test (#297)

This commit is contained in:
Will Chen
2025-05-31 23:04:28 -07:00
committed by GitHub
parent af7d6fa9f8
commit 8a743ca4f5
10 changed files with 113 additions and 95 deletions

View File

@@ -1,6 +1,7 @@
import { ipcMain } from "electron";
import log from "electron-log";
import type { LocalModelListResponse, LocalModel } from "../ipc_types";
import { LM_STUDIO_BASE_URL } from "../utils/lm_studio_utils";
const logger = log.scope("lmstudio_handler");
@@ -19,7 +20,7 @@ export interface LMStudioModel {
export async function fetchLMStudioModels(): Promise<LocalModelListResponse> {
const modelsResponse: Response = await fetch(
"http://localhost:1234/api/v0/models",
`${LM_STUDIO_BASE_URL}/api/v0/models`,
);
if (!modelsResponse.ok) {
throw new Error("Failed to fetch models from LM Studio");

View File

@@ -13,6 +13,7 @@ import { LanguageModelProvider } from "../ipc_types";
import { llmErrorStore } from "@/main/llm_error_store";
import { createDyadEngine } from "./llm_engine_provider";
import { findLanguageModel } from "./findLanguageModel";
import { LM_STUDIO_BASE_URL } from "./lm_studio_utils";
const dyadLocalEngine = process.env.DYAD_LOCAL_ENGINE;
const dyadGatewayUrl = process.env.DYAD_GATEWAY_URL;
@@ -257,7 +258,7 @@ function getRegularModelClient(
}
case "lmstudio": {
// LM Studio uses OpenAI compatible API
const baseURL = providerConfig.apiBaseUrl || "http://localhost:1234/v1";
const baseURL = providerConfig.apiBaseUrl || LM_STUDIO_BASE_URL + "/v1";
const provider = createOpenAICompatible({
name: "lmstudio",
baseURL,

View File

@@ -0,0 +1,2 @@
export const LM_STUDIO_BASE_URL =
process.env.LM_STUDIO_BASE_URL_FOR_TESTING || "http://localhost:1234";