fix auto model logic so that dyad pro key doesn't error
This commit is contained in:
@@ -19,11 +19,13 @@ export function getModelClient(
|
|||||||
model: LargeLanguageModel,
|
model: LargeLanguageModel,
|
||||||
settings: UserSettings
|
settings: UserSettings
|
||||||
) {
|
) {
|
||||||
|
const dyadApiKey = settings.providerSettings?.auto?.apiKey?.value;
|
||||||
// Handle 'auto' provider by trying each model in AUTO_MODELS until one works
|
// Handle 'auto' provider by trying each model in AUTO_MODELS until one works
|
||||||
if (model.provider === "auto") {
|
if (model.provider === "auto") {
|
||||||
// Try each model in AUTO_MODELS in order until finding one with an API key
|
// Try each model in AUTO_MODELS in order until finding one with an API key
|
||||||
for (const autoModel of AUTO_MODELS) {
|
for (const autoModel of AUTO_MODELS) {
|
||||||
const apiKey =
|
const apiKey =
|
||||||
|
dyadApiKey ||
|
||||||
settings.providerSettings?.[autoModel.provider]?.apiKey ||
|
settings.providerSettings?.[autoModel.provider]?.apiKey ||
|
||||||
getEnvVar(PROVIDER_TO_ENV_VAR[autoModel.provider]);
|
getEnvVar(PROVIDER_TO_ENV_VAR[autoModel.provider]);
|
||||||
|
|
||||||
@@ -46,7 +48,6 @@ export function getModelClient(
|
|||||||
throw new Error("No API keys available for any model in AUTO_MODELS");
|
throw new Error("No API keys available for any model in AUTO_MODELS");
|
||||||
}
|
}
|
||||||
|
|
||||||
const dyadApiKey = settings.providerSettings?.auto?.apiKey?.value;
|
|
||||||
if (dyadApiKey && settings.enableDyadPro) {
|
if (dyadApiKey && settings.enableDyadPro) {
|
||||||
const provider = createOpenAI({
|
const provider = createOpenAI({
|
||||||
apiKey: dyadApiKey,
|
apiKey: dyadApiKey,
|
||||||
@@ -82,14 +83,14 @@ export function getModelClient(
|
|||||||
case "ollama": {
|
case "ollama": {
|
||||||
const provider = createOllama();
|
const provider = createOllama();
|
||||||
return provider(model.name);
|
return provider(model.name);
|
||||||
}
|
}
|
||||||
case "lmstudio": {
|
case "lmstudio": {
|
||||||
// Using LM Studio's OpenAI compatible API
|
// Using LM Studio's OpenAI compatible API
|
||||||
const baseURL = "http://localhost:1234/v1"; // Default LM Studio OpenAI API URL
|
const baseURL = "http://localhost:1234/v1"; // Default LM Studio OpenAI API URL
|
||||||
const provider = createOpenAICompatible({ name: "lmstudio", baseURL });
|
const provider = createOpenAICompatible({ name: "lmstudio", baseURL });
|
||||||
return provider(model.name);
|
return provider(model.name);
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
// Ensure exhaustive check if more providers are added
|
// Ensure exhaustive check if more providers are added
|
||||||
const _exhaustiveCheck: never = model.provider;
|
const _exhaustiveCheck: never = model.provider;
|
||||||
throw new Error(`Unsupported model provider: ${model.provider}`);
|
throw new Error(`Unsupported model provider: ${model.provider}`);
|
||||||
|
|||||||
Reference in New Issue
Block a user