diff --git a/src/ipc/handlers/chat_stream_handlers.ts b/src/ipc/handlers/chat_stream_handlers.ts index b8e901e..a26a0c8 100644 --- a/src/ipc/handlers/chat_stream_handlers.ts +++ b/src/ipc/handlers/chat_stream_handlers.ts @@ -25,7 +25,11 @@ import { import { getDyadAppPath } from "../../paths/paths"; import { readSettings } from "../../main/settings"; import type { ChatResponseEnd, ChatStreamParams } from "../ipc_types"; -import { extractCodebase, readFileWithCache } from "../../utils/codebase"; +import { + CodebaseFile, + extractCodebase, + readFileWithCache, +} from "../../utils/codebase"; import { processFullResponseActions } from "../processors/response_processor"; import { streamTestResponse } from "./testing_chat_handlers"; import { getTestResponse } from "./testing_chat_handlers"; @@ -437,21 +441,26 @@ ${componentSnippet} ); } else { // Normal AI processing for non-test prompts + const { modelClient, isEngineEnabled, isSmartContextEnabled } = + await getModelClient(settings.selectedModel, settings); const appPath = getDyadAppPath(updatedChat.app.path); - const chatContext = req.selectedComponent - ? { - contextPaths: [ - { - globPath: req.selectedComponent.relativePath, - }, - ], - smartContextAutoIncludes: [], - } - : validateChatContext(updatedChat.app.chatContext); - - // Parse app mentions from the prompt - const mentionedAppNames = parseAppMentions(req.prompt); + // When we don't have smart context enabled, we + // only include the selected component's file for codebase context. + // + // If we have selected component and smart context is enabled, + // we handle this specially below. + const chatContext = + req.selectedComponent && !isSmartContextEnabled + ? { + contextPaths: [ + { + globPath: req.selectedComponent.relativePath, + }, + ], + smartContextAutoIncludes: [], + } + : validateChatContext(updatedChat.app.chatContext); // Extract codebase for current app const { formattedOutput: codebaseInfo, files } = await extractCodebase({ @@ -459,6 +468,20 @@ ${componentSnippet} chatContext, }); + // For smart context and selected component, we will mark the selected component's file as focused. + // This means that we don't do the regular smart context handling, but we'll allow fetching + // additional files through as needed. + if (isSmartContextEnabled && req.selectedComponent) { + for (const file of files) { + if (file.path === req.selectedComponent.relativePath) { + file.focused = true; + } + } + } + + // Parse app mentions from the prompt + const mentionedAppNames = parseAppMentions(req.prompt); + // Extract codebases for mentioned apps const mentionedAppsCodebases = await extractMentionedAppsCodebases( mentionedAppNames, @@ -489,11 +512,6 @@ ${componentSnippet} "estimated tokens", codebaseInfo.length / 4, ); - const { modelClient, isEngineEnabled } = await getModelClient( - settings.selectedModel, - settings, - files, - ); // Prepare message history for the AI const messageHistory = updatedChat.messages.map((message) => ({ @@ -709,9 +727,11 @@ This conversation includes one or more image attachments. When the user uploads tools, systemPromptOverride = systemPrompt, dyadDisableFiles = false, + files, }: { chatMessages: ModelMessage[]; modelClient: ModelClient; + files: CodebaseFile[]; tools?: ToolSet; systemPromptOverride?: string; dyadDisableFiles?: boolean; @@ -729,6 +749,7 @@ This conversation includes one or more image attachments. When the user uploads "dyad-engine": { dyadRequestId, dyadDisableFiles, + dyadFiles: files, dyadMentionedApps: mentionedAppsCodebases.map( ({ files, appName }) => ({ appName, @@ -878,6 +899,7 @@ This conversation includes one or more image attachments. When the user uploads aiRules: await readAiRules(getDyadAppPath(updatedChat.app.path)), chatMode: "agent", }), + files: files, dyadDisableFiles: true, }); @@ -903,6 +925,7 @@ This conversation includes one or more image attachments. When the user uploads const { fullStream } = await simpleStreamText({ chatMessages, modelClient, + files: files, }); // Process the stream as before @@ -939,6 +962,7 @@ This conversation includes one or more image attachments. When the user uploads { role: "assistant", content: fullResponse }, ], modelClient, + files: files, }); for await (const part of contStream) { // If the stream was aborted, exit early @@ -1020,11 +1044,11 @@ ${problemReport.problems const { modelClient } = await getModelClient( settings.selectedModel, settings, - files, ); const { fullStream } = await simpleStreamText({ modelClient, + files: files, chatMessages: [ ...chatMessages.map((msg, index) => { if ( diff --git a/src/ipc/utils/get_model_client.ts b/src/ipc/utils/get_model_client.ts index c771be0..182bf12 100644 --- a/src/ipc/utils/get_model_client.ts +++ b/src/ipc/utils/get_model_client.ts @@ -52,19 +52,15 @@ export interface ModelClient { builtinProviderId?: string; } -interface File { - path: string; - content: string; -} - const logger = log.scope("getModelClient"); export async function getModelClient( model: LargeLanguageModel, settings: UserSettings, - files?: File[], + // files?: File[], ): Promise<{ modelClient: ModelClient; isEngineEnabled?: boolean; + isSmartContextEnabled?: boolean; }> { const allProviders = await getLanguageModelProviders(); @@ -84,6 +80,7 @@ export async function getModelClient( // IMPORTANT: some providers like OpenAI have an empty string gateway prefix, // so we do a nullish and not a truthy check here. if (providerConfig.gatewayPrefix != null || dyadEngineUrl) { + const enableSmartFilesContext = settings.enableProSmartFilesContextMode; const provider = createDyadEngine({ apiKey: dyadApiKey, baseURL: dyadEngineUrl ?? "https://engine.dyad.sh/v1", @@ -93,7 +90,7 @@ export async function getModelClient( settings.selectedChatMode === "ask" ? false : settings.enableProLazyEditsMode, - enableSmartFilesContext: settings.enableProSmartFilesContextMode, + enableSmartFilesContext, // Keep in sync with getCurrentValue in ProModeSelector.tsx smartContextMode: settings.proSmartContextOption ?? "balanced", enableWebSearch: settings.enableProWebSearch, @@ -112,15 +109,14 @@ export async function getModelClient( // Do not use free variant (for openrouter). const modelName = model.name.split(":free")[0]; const autoModelClient = { - model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`, { - files, - }), + model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`), builtinProviderId: model.provider, }; return { modelClient: autoModelClient, isEngineEnabled: true, + isSmartContextEnabled: enableSmartFilesContext, }; } else { logger.warn( @@ -176,7 +172,6 @@ export async function getModelClient( name: autoModel.name, }, settings, - files, ); } } diff --git a/src/ipc/utils/llm_engine_provider.ts b/src/ipc/utils/llm_engine_provider.ts index 168287e..dcb6d0d 100644 --- a/src/ipc/utils/llm_engine_provider.ts +++ b/src/ipc/utils/llm_engine_provider.ts @@ -13,10 +13,7 @@ import { LanguageModelV2 } from "@ai-sdk/provider"; const logger = log.scope("llm_engine_provider"); export type ExampleChatModelId = string & {}; - -export interface ExampleChatSettings { - files?: { path: string; content: string }[]; -} +export interface ExampleChatSettings {} export interface ExampleProviderSettings { /** Example API key. @@ -106,13 +103,7 @@ export function createDyadEngine( fetch: options.fetch, }); - const createChatModel = ( - modelId: ExampleChatModelId, - settings: ExampleChatSettings = {}, - ) => { - // Extract files from settings to process them appropriately - const { files } = settings; - + const createChatModel = (modelId: ExampleChatModelId) => { // Create configuration with file handling const config = { ...getCommonModelConfig(), @@ -134,6 +125,10 @@ export function createDyadEngine( options.settings, ), }; + const dyadFiles = parsedBody.dyadFiles; + if ("dyadFiles" in parsedBody) { + delete parsedBody.dyadFiles; + } const requestId = parsedBody.dyadRequestId; if ("dyadRequestId" in parsedBody) { delete parsedBody.dyadRequestId; @@ -156,9 +151,9 @@ export function createDyadEngine( } // Add files to the request if they exist - if (files?.length && !dyadDisableFiles) { + if (dyadFiles?.length && !dyadDisableFiles) { parsedBody.dyad_options = { - files, + files: dyadFiles, enable_lazy_edits: options.dyadOptions.enableLazyEdits, enable_smart_files_context: options.dyadOptions.enableSmartFilesContext, @@ -195,10 +190,7 @@ export function createDyadEngine( return new OpenAICompatibleChatLanguageModel(modelId, config); }; - const provider = ( - modelId: ExampleChatModelId, - settings?: ExampleChatSettings, - ) => createChatModel(modelId, settings); + const provider = (modelId: ExampleChatModelId) => createChatModel(modelId); provider.chatModel = createChatModel; diff --git a/src/utils/codebase.ts b/src/utils/codebase.ts index ec76c20..60f7f39 100644 --- a/src/utils/codebase.ts +++ b/src/utils/codebase.ts @@ -406,6 +406,7 @@ ${content} export type CodebaseFile = { path: string; content: string; + focused?: boolean; force?: boolean; };