From d571d303eb31a24ec013c6dd2f0f060447eb4fc1 Mon Sep 17 00:00:00 2001 From: Will Chen Date: Thu, 16 Oct 2025 17:19:30 -0700 Subject: [PATCH] Selected component engine (#1562) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary by cubic Enable the Dyad Engine to prioritize a user-selected component by flagging the file as focused and sending codebase files per request. Keeps full context when the engine is on; falls back to path-scoped context when it’s off. - **New Features** - Mark the selected component file as focused when the engine is enabled. - Send codebase files to the engine via dyadFiles, applied to dyad_options unless disabled. - Maintain full chatContext with engine; restrict to the selected file path only when engine is off. - **Refactors** - Removed files from getModelClient and provider APIs; file transport moved into request payload. - Stream handlers now pass files to model calls and include dyadDisableFiles/dyadFiles in the request. - Added focused flag to CodebaseFile. --- > [!NOTE] > Sends codebase files per request to the Dyad Engine, focuses the selected component when smart context is enabled, and refactors model client/provider APIs to remove file parameters. > > - **Engine integration**: > - Send codebase files per request via `dyadFiles` in provider options; propagate through `simpleStreamText` and `dyad-engine` options. > - Add `isSmartContextEnabled` from `get_model_client` and gate context behavior accordingly. > - **Selected component focus**: > - When smart context is on and a component is selected, mark its file as `focused` in `CodebaseFile` and avoid broad smart context includes; allow on-demand reads. > - When smart context is off, restrict `chatContext` to the selected file path. > - **Refactors**: > - Remove `files` parameter from `getModelClient` and Dyad provider; move file transport into request body. > - Update `llm_engine_provider` to read `dyadFiles` from request and populate `dyad_options.files` unless `dyadDisableFiles`. > - Extend `CodebaseFile` with optional `focused` flag; thread `files` through `chat_stream_handlers` calls. > > Written by [Cursor Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit 022b26d0197ab5b5d4f5b589f45bc230de36e0e5. This will update automatically on new commits. Configure [here](https://cursor.com/dashboard?tab=bugbot). --- src/ipc/handlers/chat_stream_handlers.ts | 64 ++++++++++++++++-------- src/ipc/utils/get_model_client.ts | 17 +++---- src/ipc/utils/llm_engine_provider.ts | 26 ++++------ src/utils/codebase.ts | 1 + 4 files changed, 60 insertions(+), 48 deletions(-) diff --git a/src/ipc/handlers/chat_stream_handlers.ts b/src/ipc/handlers/chat_stream_handlers.ts index b8e901e..a26a0c8 100644 --- a/src/ipc/handlers/chat_stream_handlers.ts +++ b/src/ipc/handlers/chat_stream_handlers.ts @@ -25,7 +25,11 @@ import { import { getDyadAppPath } from "../../paths/paths"; import { readSettings } from "../../main/settings"; import type { ChatResponseEnd, ChatStreamParams } from "../ipc_types"; -import { extractCodebase, readFileWithCache } from "../../utils/codebase"; +import { + CodebaseFile, + extractCodebase, + readFileWithCache, +} from "../../utils/codebase"; import { processFullResponseActions } from "../processors/response_processor"; import { streamTestResponse } from "./testing_chat_handlers"; import { getTestResponse } from "./testing_chat_handlers"; @@ -437,21 +441,26 @@ ${componentSnippet} ); } else { // Normal AI processing for non-test prompts + const { modelClient, isEngineEnabled, isSmartContextEnabled } = + await getModelClient(settings.selectedModel, settings); const appPath = getDyadAppPath(updatedChat.app.path); - const chatContext = req.selectedComponent - ? { - contextPaths: [ - { - globPath: req.selectedComponent.relativePath, - }, - ], - smartContextAutoIncludes: [], - } - : validateChatContext(updatedChat.app.chatContext); - - // Parse app mentions from the prompt - const mentionedAppNames = parseAppMentions(req.prompt); + // When we don't have smart context enabled, we + // only include the selected component's file for codebase context. + // + // If we have selected component and smart context is enabled, + // we handle this specially below. + const chatContext = + req.selectedComponent && !isSmartContextEnabled + ? { + contextPaths: [ + { + globPath: req.selectedComponent.relativePath, + }, + ], + smartContextAutoIncludes: [], + } + : validateChatContext(updatedChat.app.chatContext); // Extract codebase for current app const { formattedOutput: codebaseInfo, files } = await extractCodebase({ @@ -459,6 +468,20 @@ ${componentSnippet} chatContext, }); + // For smart context and selected component, we will mark the selected component's file as focused. + // This means that we don't do the regular smart context handling, but we'll allow fetching + // additional files through as needed. + if (isSmartContextEnabled && req.selectedComponent) { + for (const file of files) { + if (file.path === req.selectedComponent.relativePath) { + file.focused = true; + } + } + } + + // Parse app mentions from the prompt + const mentionedAppNames = parseAppMentions(req.prompt); + // Extract codebases for mentioned apps const mentionedAppsCodebases = await extractMentionedAppsCodebases( mentionedAppNames, @@ -489,11 +512,6 @@ ${componentSnippet} "estimated tokens", codebaseInfo.length / 4, ); - const { modelClient, isEngineEnabled } = await getModelClient( - settings.selectedModel, - settings, - files, - ); // Prepare message history for the AI const messageHistory = updatedChat.messages.map((message) => ({ @@ -709,9 +727,11 @@ This conversation includes one or more image attachments. When the user uploads tools, systemPromptOverride = systemPrompt, dyadDisableFiles = false, + files, }: { chatMessages: ModelMessage[]; modelClient: ModelClient; + files: CodebaseFile[]; tools?: ToolSet; systemPromptOverride?: string; dyadDisableFiles?: boolean; @@ -729,6 +749,7 @@ This conversation includes one or more image attachments. When the user uploads "dyad-engine": { dyadRequestId, dyadDisableFiles, + dyadFiles: files, dyadMentionedApps: mentionedAppsCodebases.map( ({ files, appName }) => ({ appName, @@ -878,6 +899,7 @@ This conversation includes one or more image attachments. When the user uploads aiRules: await readAiRules(getDyadAppPath(updatedChat.app.path)), chatMode: "agent", }), + files: files, dyadDisableFiles: true, }); @@ -903,6 +925,7 @@ This conversation includes one or more image attachments. When the user uploads const { fullStream } = await simpleStreamText({ chatMessages, modelClient, + files: files, }); // Process the stream as before @@ -939,6 +962,7 @@ This conversation includes one or more image attachments. When the user uploads { role: "assistant", content: fullResponse }, ], modelClient, + files: files, }); for await (const part of contStream) { // If the stream was aborted, exit early @@ -1020,11 +1044,11 @@ ${problemReport.problems const { modelClient } = await getModelClient( settings.selectedModel, settings, - files, ); const { fullStream } = await simpleStreamText({ modelClient, + files: files, chatMessages: [ ...chatMessages.map((msg, index) => { if ( diff --git a/src/ipc/utils/get_model_client.ts b/src/ipc/utils/get_model_client.ts index c771be0..182bf12 100644 --- a/src/ipc/utils/get_model_client.ts +++ b/src/ipc/utils/get_model_client.ts @@ -52,19 +52,15 @@ export interface ModelClient { builtinProviderId?: string; } -interface File { - path: string; - content: string; -} - const logger = log.scope("getModelClient"); export async function getModelClient( model: LargeLanguageModel, settings: UserSettings, - files?: File[], + // files?: File[], ): Promise<{ modelClient: ModelClient; isEngineEnabled?: boolean; + isSmartContextEnabled?: boolean; }> { const allProviders = await getLanguageModelProviders(); @@ -84,6 +80,7 @@ export async function getModelClient( // IMPORTANT: some providers like OpenAI have an empty string gateway prefix, // so we do a nullish and not a truthy check here. if (providerConfig.gatewayPrefix != null || dyadEngineUrl) { + const enableSmartFilesContext = settings.enableProSmartFilesContextMode; const provider = createDyadEngine({ apiKey: dyadApiKey, baseURL: dyadEngineUrl ?? "https://engine.dyad.sh/v1", @@ -93,7 +90,7 @@ export async function getModelClient( settings.selectedChatMode === "ask" ? false : settings.enableProLazyEditsMode, - enableSmartFilesContext: settings.enableProSmartFilesContextMode, + enableSmartFilesContext, // Keep in sync with getCurrentValue in ProModeSelector.tsx smartContextMode: settings.proSmartContextOption ?? "balanced", enableWebSearch: settings.enableProWebSearch, @@ -112,15 +109,14 @@ export async function getModelClient( // Do not use free variant (for openrouter). const modelName = model.name.split(":free")[0]; const autoModelClient = { - model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`, { - files, - }), + model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`), builtinProviderId: model.provider, }; return { modelClient: autoModelClient, isEngineEnabled: true, + isSmartContextEnabled: enableSmartFilesContext, }; } else { logger.warn( @@ -176,7 +172,6 @@ export async function getModelClient( name: autoModel.name, }, settings, - files, ); } } diff --git a/src/ipc/utils/llm_engine_provider.ts b/src/ipc/utils/llm_engine_provider.ts index 168287e..dcb6d0d 100644 --- a/src/ipc/utils/llm_engine_provider.ts +++ b/src/ipc/utils/llm_engine_provider.ts @@ -13,10 +13,7 @@ import { LanguageModelV2 } from "@ai-sdk/provider"; const logger = log.scope("llm_engine_provider"); export type ExampleChatModelId = string & {}; - -export interface ExampleChatSettings { - files?: { path: string; content: string }[]; -} +export interface ExampleChatSettings {} export interface ExampleProviderSettings { /** Example API key. @@ -106,13 +103,7 @@ export function createDyadEngine( fetch: options.fetch, }); - const createChatModel = ( - modelId: ExampleChatModelId, - settings: ExampleChatSettings = {}, - ) => { - // Extract files from settings to process them appropriately - const { files } = settings; - + const createChatModel = (modelId: ExampleChatModelId) => { // Create configuration with file handling const config = { ...getCommonModelConfig(), @@ -134,6 +125,10 @@ export function createDyadEngine( options.settings, ), }; + const dyadFiles = parsedBody.dyadFiles; + if ("dyadFiles" in parsedBody) { + delete parsedBody.dyadFiles; + } const requestId = parsedBody.dyadRequestId; if ("dyadRequestId" in parsedBody) { delete parsedBody.dyadRequestId; @@ -156,9 +151,9 @@ export function createDyadEngine( } // Add files to the request if they exist - if (files?.length && !dyadDisableFiles) { + if (dyadFiles?.length && !dyadDisableFiles) { parsedBody.dyad_options = { - files, + files: dyadFiles, enable_lazy_edits: options.dyadOptions.enableLazyEdits, enable_smart_files_context: options.dyadOptions.enableSmartFilesContext, @@ -195,10 +190,7 @@ export function createDyadEngine( return new OpenAICompatibleChatLanguageModel(modelId, config); }; - const provider = ( - modelId: ExampleChatModelId, - settings?: ExampleChatSettings, - ) => createChatModel(modelId, settings); + const provider = (modelId: ExampleChatModelId) => createChatModel(modelId); provider.chatModel = createChatModel; diff --git a/src/utils/codebase.ts b/src/utils/codebase.ts index ec76c20..60f7f39 100644 --- a/src/utils/codebase.ts +++ b/src/utils/codebase.ts @@ -406,6 +406,7 @@ ${content} export type CodebaseFile = { path: string; content: string; + focused?: boolean; force?: boolean; };