+ If you see a "model not found" error, try a different region. Some
+ partner models (MaaS) are only available in specific locations
+ (e.g., us-central1, us-west2).
+
+
+
+
+
+
+
+
+
+ {saved && !error && (
+
+ Saved
+
+ )}
+
+
+ {!isConfigured && (
+
+
+ Configuration Required
+
+ Provide Project, Location, and a service account JSON key with
+ Vertex AI access.
+
+
+ )}
+
+ {error && (
+
+ Save Error
+ {error}
+
+ )}
+
+ );
+}
diff --git a/src/hooks/useLanguageModelProviders.ts b/src/hooks/useLanguageModelProviders.ts
index 68793a9..d37fd54 100644
--- a/src/hooks/useLanguageModelProviders.ts
+++ b/src/hooks/useLanguageModelProviders.ts
@@ -2,7 +2,7 @@ import { useQuery } from "@tanstack/react-query";
import { IpcClient } from "@/ipc/ipc_client";
import type { LanguageModelProvider } from "@/ipc/ipc_types";
import { useSettings } from "./useSettings";
-import { cloudProviders } from "@/lib/schemas";
+import { cloudProviders, VertexProviderSetting } from "@/lib/schemas";
export function useLanguageModelProviders() {
const ipcClient = IpcClient.getInstance();
@@ -20,6 +20,18 @@ export function useLanguageModelProviders() {
if (queryResult.isLoading) {
return false;
}
+ // Vertex uses service account credentials instead of an API key
+ if (provider === "vertex") {
+ const vertexSettings = providerSettings as VertexProviderSetting;
+ if (
+ vertexSettings?.serviceAccountKey?.value &&
+ vertexSettings?.projectId &&
+ vertexSettings?.location
+ ) {
+ return true;
+ }
+ return false;
+ }
if (providerSettings?.apiKey?.value) {
return true;
}
diff --git a/src/ipc/handlers/chat_stream_handlers.ts b/src/ipc/handlers/chat_stream_handlers.ts
index 25ce038..39628a3 100644
--- a/src/ipc/handlers/chat_stream_handlers.ts
+++ b/src/ipc/handlers/chat_stream_handlers.ts
@@ -667,28 +667,53 @@ This conversation includes one or more image attachments. When the user uploads
} else {
logger.log("sending AI request");
}
+ // Build provider options with correct Google/Vertex thinking config gating
+ const providerOptions: Record = {
+ "dyad-engine": {
+ dyadRequestId,
+ },
+ "dyad-gateway": getExtraProviderOptions(
+ modelClient.builtinProviderId,
+ settings,
+ ),
+ openai: {
+ reasoningSummary: "auto",
+ } satisfies OpenAIResponsesProviderOptions,
+ };
+
+ // Conditionally include Google thinking config only for supported models
+ const selectedModelName = settings.selectedModel.name || "";
+ const providerId = modelClient.builtinProviderId;
+ const isVertex = providerId === "vertex";
+ const isGoogle = providerId === "google";
+ const isPartnerModel = selectedModelName.includes("/");
+ const isGeminiModel = selectedModelName.startsWith("gemini");
+ const isFlashLite = selectedModelName.includes("flash-lite");
+
+ // Keep Google provider behavior unchanged: always include includeThoughts
+ if (isGoogle) {
+ providerOptions.google = {
+ thinkingConfig: {
+ includeThoughts: true,
+ },
+ } satisfies GoogleGenerativeAIProviderOptions;
+ }
+
+ // Vertex-specific fix: only enable thinking on supported Gemini models
+ if (isVertex && isGeminiModel && !isFlashLite && !isPartnerModel) {
+ providerOptions.google = {
+ thinkingConfig: {
+ includeThoughts: true,
+ },
+ } satisfies GoogleGenerativeAIProviderOptions;
+ }
+
return streamText({
maxOutputTokens: await getMaxTokens(settings.selectedModel),
temperature: await getTemperature(settings.selectedModel),
maxRetries: 2,
model: modelClient.model,
- providerOptions: {
- "dyad-engine": {
- dyadRequestId,
- },
- "dyad-gateway": getExtraProviderOptions(
- modelClient.builtinProviderId,
- settings,
- ),
- google: {
- thinkingConfig: {
- includeThoughts: true,
- },
- } satisfies GoogleGenerativeAIProviderOptions,
- openai: {
- reasoningSummary: "auto",
- } satisfies OpenAIResponsesProviderOptions,
- },
+ providerOptions,
system: systemPrompt,
messages: chatMessages.filter((m) => m.content),
onError: (error: any) => {
diff --git a/src/ipc/shared/language_model_helpers.ts b/src/ipc/shared/language_model_helpers.ts
index cfe112e..625ebf0 100644
--- a/src/ipc/shared/language_model_helpers.ts
+++ b/src/ipc/shared/language_model_helpers.ts
@@ -8,6 +8,7 @@ import { eq } from "drizzle-orm";
export const PROVIDERS_THAT_SUPPORT_THINKING: (keyof typeof MODEL_OPTIONS)[] = [
"google",
+ "vertex",
"auto",
];
@@ -144,6 +145,26 @@ export const MODEL_OPTIONS: Record = {
dollarSigns: 2,
},
],
+ vertex: [
+ // Vertex Gemini 2.5 Pro
+ {
+ name: "gemini-2.5-pro",
+ displayName: "Gemini 2.5 Pro",
+ description: "Vertex Gemini 2.5 Pro",
+ maxOutputTokens: 65_536 - 1,
+ contextWindow: 1_048_576,
+ temperature: 0,
+ },
+ // Vertex Gemini 2.5 Flash
+ {
+ name: "gemini-2.5-flash",
+ displayName: "Gemini 2.5 Flash",
+ description: "Vertex Gemini 2.5 Flash",
+ maxOutputTokens: 65_536 - 1,
+ contextWindow: 1_048_576,
+ temperature: 0,
+ },
+ ],
openrouter: [
{
name: "qwen/qwen3-coder",
@@ -270,6 +291,14 @@ export const CLOUD_PROVIDERS: Record<
websiteUrl: "https://aistudio.google.com/app/apikey",
gatewayPrefix: "gemini/",
},
+ vertex: {
+ displayName: "Google Vertex AI",
+ hasFreeTier: false,
+ websiteUrl: "https://console.cloud.google.com/vertex-ai",
+ // Use the same gateway prefix as Google Gemini for Dyad Pro compatibility.
+ gatewayPrefix: "gemini/",
+ secondary: true,
+ },
openrouter: {
displayName: "OpenRouter",
hasFreeTier: true,
diff --git a/src/ipc/utils/get_model_client.ts b/src/ipc/utils/get_model_client.ts
index 2ff8a80..5e8c5e7 100644
--- a/src/ipc/utils/get_model_client.ts
+++ b/src/ipc/utils/get_model_client.ts
@@ -1,10 +1,15 @@
import { createOpenAI } from "@ai-sdk/openai";
import { createGoogleGenerativeAI as createGoogle } from "@ai-sdk/google";
import { createAnthropic } from "@ai-sdk/anthropic";
+import { createVertex as createGoogleVertex } from "@ai-sdk/google-vertex";
import { azure } from "@ai-sdk/azure";
import { createOpenRouter } from "@openrouter/ai-sdk-provider";
import { createOpenAICompatible } from "@ai-sdk/openai-compatible";
-import type { LargeLanguageModel, UserSettings } from "../../lib/schemas";
+import type {
+ LargeLanguageModel,
+ UserSettings,
+ VertexProviderSetting,
+} from "../../lib/schemas";
import { getEnvVar } from "./read_env";
import log from "electron-log";
import { getLanguageModelProviders } from "../shared/language_model_helpers";
@@ -216,6 +221,45 @@ function getRegularModelClient(
backupModelClients: [],
};
}
+ case "vertex": {
+ // Vertex uses Google service account credentials with project/location
+ const vertexSettings = settings.providerSettings?.[
+ model.provider
+ ] as VertexProviderSetting;
+ const project = vertexSettings?.projectId;
+ const location = vertexSettings?.location;
+ const serviceAccountKey = vertexSettings?.serviceAccountKey?.value;
+
+ // Use a baseURL that does NOT pin to publishers/google so that
+ // full publisher model IDs (e.g. publishers/deepseek-ai/models/...) work.
+ const regionHost = `${location === "global" ? "" : `${location}-`}aiplatform.googleapis.com`;
+ const baseURL = `https://${regionHost}/v1/projects/${project}/locations/${location}`;
+ const provider = createGoogleVertex({
+ project,
+ location,
+ baseURL,
+ googleAuthOptions: serviceAccountKey
+ ? {
+ // Expecting the user to paste the full JSON of the service account key
+ credentials: JSON.parse(serviceAccountKey),
+ }
+ : undefined,
+ });
+ return {
+ modelClient: {
+ // For built-in Google models on Vertex, the path must include
+ // publishers/google/models/. For partner MaaS models the
+ // full publisher path is already included.
+ model: provider(
+ model.name.includes("/")
+ ? model.name
+ : `publishers/google/models/${model.name}`,
+ ),
+ builtinProviderId: providerId,
+ },
+ backupModelClients: [],
+ };
+ }
case "openrouter": {
const provider = createOpenRouter({ apiKey });
return {
diff --git a/src/lib/schemas.ts b/src/lib/schemas.ts
index 097a283..9dca45c 100644
--- a/src/lib/schemas.ts
+++ b/src/lib/schemas.ts
@@ -30,6 +30,7 @@ const providers = [
"openai",
"anthropic",
"google",
+ "vertex",
"auto",
"openrouter",
"ollama",
@@ -57,15 +58,35 @@ export type LargeLanguageModel = z.infer;
/**
* Zod schema for provider settings
+ * Regular providers use only apiKey. Vertex has additional optional fields.
*/
-export const ProviderSettingSchema = z.object({
+export const RegularProviderSettingSchema = z.object({
apiKey: SecretSchema.optional(),
});
+export const VertexProviderSettingSchema = z.object({
+ // We make this undefined so that it makes existing callsites easier.
+ apiKey: z.undefined(),
+ projectId: z.string().optional(),
+ location: z.string().optional(),
+ serviceAccountKey: SecretSchema.optional(),
+});
+
+export const ProviderSettingSchema = z.union([
+ // Must use more specific type first!
+ // Zod uses the first type that matches.
+ VertexProviderSettingSchema,
+ RegularProviderSettingSchema,
+]);
+
/**
* Type derived from the ProviderSettingSchema
*/
export type ProviderSetting = z.infer;
+export type RegularProviderSetting = z.infer<
+ typeof RegularProviderSettingSchema
+>;
+export type VertexProviderSetting = z.infer;
export const RuntimeModeSchema = z.enum(["web-sandbox", "local-node", "unset"]);
export type RuntimeMode = z.infer;
diff --git a/src/main/settings.ts b/src/main/settings.ts
index 4153853..f9c8ca5 100644
--- a/src/main/settings.ts
+++ b/src/main/settings.ts
@@ -1,7 +1,12 @@
import fs from "node:fs";
import path from "node:path";
import { getUserDataPath } from "../paths/paths";
-import { UserSettingsSchema, type UserSettings, Secret } from "../lib/schemas";
+import {
+ UserSettingsSchema,
+ type UserSettings,
+ Secret,
+ VertexProviderSetting,
+} from "../lib/schemas";
import { safeStorage } from "electron";
import { v4 as uuidv4 } from "uuid";
import log from "electron-log";
@@ -114,6 +119,17 @@ export function readSettings(): UserSettings {
encryptionType,
};
}
+ // Decrypt Vertex service account key if present
+ const v = combinedSettings.providerSettings[
+ provider
+ ] as VertexProviderSetting;
+ if (provider === "vertex" && v?.serviceAccountKey) {
+ const encryptionType = v.serviceAccountKey.encryptionType;
+ v.serviceAccountKey = {
+ value: decrypt(v.serviceAccountKey),
+ encryptionType,
+ };
+ }
}
// Validate and merge with defaults
@@ -171,6 +187,11 @@ export function writeSettings(settings: Partial): void {
newSettings.providerSettings[provider].apiKey.value,
);
}
+ // Encrypt Vertex service account key if present
+ const v = newSettings.providerSettings[provider] as VertexProviderSetting;
+ if (provider === "vertex" && v?.serviceAccountKey) {
+ v.serviceAccountKey = encrypt(v.serviceAccountKey.value);
+ }
}
const validatedSettings = UserSettingsSchema.parse(newSettings);
fs.writeFileSync(filePath, JSON.stringify(validatedSettings, null, 2));