Selected component engine (#1562)

<!-- This is an auto-generated description by cubic. -->

## Summary by cubic
Enable the Dyad Engine to prioritize a user-selected component by
flagging the file as focused and sending codebase files per request.
Keeps full context when the engine is on; falls back to path-scoped
context when it’s off.

- **New Features**
- Mark the selected component file as focused when the engine is
enabled.
- Send codebase files to the engine via dyadFiles, applied to
dyad_options unless disabled.
- Maintain full chatContext with engine; restrict to the selected file
path only when engine is off.

- **Refactors**
- Removed files from getModelClient and provider APIs; file transport
moved into request payload.
- Stream handlers now pass files to model calls and include
dyadDisableFiles/dyadFiles in the request.
  - Added focused flag to CodebaseFile.

<!-- End of auto-generated description by cubic. -->

<!-- CURSOR_SUMMARY -->
---

> [!NOTE]
> Sends codebase files per request to the Dyad Engine, focuses the
selected component when smart context is enabled, and refactors model
client/provider APIs to remove file parameters.
> 
> - **Engine integration**:
> - Send codebase files per request via `dyadFiles` in provider options;
propagate through `simpleStreamText` and `dyad-engine` options.
> - Add `isSmartContextEnabled` from `get_model_client` and gate context
behavior accordingly.
> - **Selected component focus**:
> - When smart context is on and a component is selected, mark its file
as `focused` in `CodebaseFile` and avoid broad smart context includes;
allow on-demand reads.
> - When smart context is off, restrict `chatContext` to the selected
file path.
> - **Refactors**:
> - Remove `files` parameter from `getModelClient` and Dyad provider;
move file transport into request body.
> - Update `llm_engine_provider` to read `dyadFiles` from request and
populate `dyad_options.files` unless `dyadDisableFiles`.
> - Extend `CodebaseFile` with optional `focused` flag; thread `files`
through `chat_stream_handlers` calls.
> 
> <sup>Written by [Cursor
Bugbot](https://cursor.com/dashboard?tab=bugbot) for commit
022b26d0197ab5b5d4f5b589f45bc230de36e0e5. This will update automatically
on new commits. Configure
[here](https://cursor.com/dashboard?tab=bugbot).</sup>
<!-- /CURSOR_SUMMARY -->
This commit is contained in:
Will Chen
2025-10-16 17:19:30 -07:00
committed by GitHub
parent eae22bed90
commit d571d303eb
4 changed files with 60 additions and 48 deletions

View File

@@ -25,7 +25,11 @@ import {
import { getDyadAppPath } from "../../paths/paths";
import { readSettings } from "../../main/settings";
import type { ChatResponseEnd, ChatStreamParams } from "../ipc_types";
import { extractCodebase, readFileWithCache } from "../../utils/codebase";
import {
CodebaseFile,
extractCodebase,
readFileWithCache,
} from "../../utils/codebase";
import { processFullResponseActions } from "../processors/response_processor";
import { streamTestResponse } from "./testing_chat_handlers";
import { getTestResponse } from "./testing_chat_handlers";
@@ -437,9 +441,17 @@ ${componentSnippet}
);
} else {
// Normal AI processing for non-test prompts
const { modelClient, isEngineEnabled, isSmartContextEnabled } =
await getModelClient(settings.selectedModel, settings);
const appPath = getDyadAppPath(updatedChat.app.path);
const chatContext = req.selectedComponent
// When we don't have smart context enabled, we
// only include the selected component's file for codebase context.
//
// If we have selected component and smart context is enabled,
// we handle this specially below.
const chatContext =
req.selectedComponent && !isSmartContextEnabled
? {
contextPaths: [
{
@@ -450,15 +462,26 @@ ${componentSnippet}
}
: validateChatContext(updatedChat.app.chatContext);
// Parse app mentions from the prompt
const mentionedAppNames = parseAppMentions(req.prompt);
// Extract codebase for current app
const { formattedOutput: codebaseInfo, files } = await extractCodebase({
appPath,
chatContext,
});
// For smart context and selected component, we will mark the selected component's file as focused.
// This means that we don't do the regular smart context handling, but we'll allow fetching
// additional files through <dyad-read> as needed.
if (isSmartContextEnabled && req.selectedComponent) {
for (const file of files) {
if (file.path === req.selectedComponent.relativePath) {
file.focused = true;
}
}
}
// Parse app mentions from the prompt
const mentionedAppNames = parseAppMentions(req.prompt);
// Extract codebases for mentioned apps
const mentionedAppsCodebases = await extractMentionedAppsCodebases(
mentionedAppNames,
@@ -489,11 +512,6 @@ ${componentSnippet}
"estimated tokens",
codebaseInfo.length / 4,
);
const { modelClient, isEngineEnabled } = await getModelClient(
settings.selectedModel,
settings,
files,
);
// Prepare message history for the AI
const messageHistory = updatedChat.messages.map((message) => ({
@@ -709,9 +727,11 @@ This conversation includes one or more image attachments. When the user uploads
tools,
systemPromptOverride = systemPrompt,
dyadDisableFiles = false,
files,
}: {
chatMessages: ModelMessage[];
modelClient: ModelClient;
files: CodebaseFile[];
tools?: ToolSet;
systemPromptOverride?: string;
dyadDisableFiles?: boolean;
@@ -729,6 +749,7 @@ This conversation includes one or more image attachments. When the user uploads
"dyad-engine": {
dyadRequestId,
dyadDisableFiles,
dyadFiles: files,
dyadMentionedApps: mentionedAppsCodebases.map(
({ files, appName }) => ({
appName,
@@ -878,6 +899,7 @@ This conversation includes one or more image attachments. When the user uploads
aiRules: await readAiRules(getDyadAppPath(updatedChat.app.path)),
chatMode: "agent",
}),
files: files,
dyadDisableFiles: true,
});
@@ -903,6 +925,7 @@ This conversation includes one or more image attachments. When the user uploads
const { fullStream } = await simpleStreamText({
chatMessages,
modelClient,
files: files,
});
// Process the stream as before
@@ -939,6 +962,7 @@ This conversation includes one or more image attachments. When the user uploads
{ role: "assistant", content: fullResponse },
],
modelClient,
files: files,
});
for await (const part of contStream) {
// If the stream was aborted, exit early
@@ -1020,11 +1044,11 @@ ${problemReport.problems
const { modelClient } = await getModelClient(
settings.selectedModel,
settings,
files,
);
const { fullStream } = await simpleStreamText({
modelClient,
files: files,
chatMessages: [
...chatMessages.map((msg, index) => {
if (

View File

@@ -52,19 +52,15 @@ export interface ModelClient {
builtinProviderId?: string;
}
interface File {
path: string;
content: string;
}
const logger = log.scope("getModelClient");
export async function getModelClient(
model: LargeLanguageModel,
settings: UserSettings,
files?: File[],
// files?: File[],
): Promise<{
modelClient: ModelClient;
isEngineEnabled?: boolean;
isSmartContextEnabled?: boolean;
}> {
const allProviders = await getLanguageModelProviders();
@@ -84,6 +80,7 @@ export async function getModelClient(
// IMPORTANT: some providers like OpenAI have an empty string gateway prefix,
// so we do a nullish and not a truthy check here.
if (providerConfig.gatewayPrefix != null || dyadEngineUrl) {
const enableSmartFilesContext = settings.enableProSmartFilesContextMode;
const provider = createDyadEngine({
apiKey: dyadApiKey,
baseURL: dyadEngineUrl ?? "https://engine.dyad.sh/v1",
@@ -93,7 +90,7 @@ export async function getModelClient(
settings.selectedChatMode === "ask"
? false
: settings.enableProLazyEditsMode,
enableSmartFilesContext: settings.enableProSmartFilesContextMode,
enableSmartFilesContext,
// Keep in sync with getCurrentValue in ProModeSelector.tsx
smartContextMode: settings.proSmartContextOption ?? "balanced",
enableWebSearch: settings.enableProWebSearch,
@@ -112,15 +109,14 @@ export async function getModelClient(
// Do not use free variant (for openrouter).
const modelName = model.name.split(":free")[0];
const autoModelClient = {
model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`, {
files,
}),
model: provider(`${providerConfig.gatewayPrefix || ""}${modelName}`),
builtinProviderId: model.provider,
};
return {
modelClient: autoModelClient,
isEngineEnabled: true,
isSmartContextEnabled: enableSmartFilesContext,
};
} else {
logger.warn(
@@ -176,7 +172,6 @@ export async function getModelClient(
name: autoModel.name,
},
settings,
files,
);
}
}

View File

@@ -13,10 +13,7 @@ import { LanguageModelV2 } from "@ai-sdk/provider";
const logger = log.scope("llm_engine_provider");
export type ExampleChatModelId = string & {};
export interface ExampleChatSettings {
files?: { path: string; content: string }[];
}
export interface ExampleChatSettings {}
export interface ExampleProviderSettings {
/**
Example API key.
@@ -106,13 +103,7 @@ export function createDyadEngine(
fetch: options.fetch,
});
const createChatModel = (
modelId: ExampleChatModelId,
settings: ExampleChatSettings = {},
) => {
// Extract files from settings to process them appropriately
const { files } = settings;
const createChatModel = (modelId: ExampleChatModelId) => {
// Create configuration with file handling
const config = {
...getCommonModelConfig(),
@@ -134,6 +125,10 @@ export function createDyadEngine(
options.settings,
),
};
const dyadFiles = parsedBody.dyadFiles;
if ("dyadFiles" in parsedBody) {
delete parsedBody.dyadFiles;
}
const requestId = parsedBody.dyadRequestId;
if ("dyadRequestId" in parsedBody) {
delete parsedBody.dyadRequestId;
@@ -156,9 +151,9 @@ export function createDyadEngine(
}
// Add files to the request if they exist
if (files?.length && !dyadDisableFiles) {
if (dyadFiles?.length && !dyadDisableFiles) {
parsedBody.dyad_options = {
files,
files: dyadFiles,
enable_lazy_edits: options.dyadOptions.enableLazyEdits,
enable_smart_files_context:
options.dyadOptions.enableSmartFilesContext,
@@ -195,10 +190,7 @@ export function createDyadEngine(
return new OpenAICompatibleChatLanguageModel(modelId, config);
};
const provider = (
modelId: ExampleChatModelId,
settings?: ExampleChatSettings,
) => createChatModel(modelId, settings);
const provider = (modelId: ExampleChatModelId) => createChatModel(modelId);
provider.chatModel = createChatModel;

View File

@@ -406,6 +406,7 @@ ${content}
export type CodebaseFile = {
path: string;
content: string;
focused?: boolean;
force?: boolean;
};