Add MCP support (#1028)

This commit is contained in:
Will Chen
2025-09-19 15:43:39 -07:00
committed by GitHub
parent 7b160b7d0b
commit 6d3c397d40
39 changed files with 3865 additions and 650 deletions

50
testing/README.md Normal file
View File

@@ -0,0 +1,50 @@
### Fake stdio MCP server
This directory contains a minimal stdio MCP server for local testing.
- **Tools**:
- **calculator_add**: adds two numbers. Inputs: `a` (number), `b` (number).
- **print_envs**: returns all environment variables visible to the server as pretty JSON.
### Requirements
- **Node 20+** (same as the repo engines)
- Uses the repo dependency `@modelcontextprotocol/sdk` and `zod`
### Launch
- **Via Node**:
```bash
node testing/fake-stdio-mcp-server.mjs
```
- **Via script** (adds a stable entrypoint path):
```bash
testing/run-fake-stdio-mcp-server.sh
```
### Passing environment variables
Environment variables provided when launching (either from your shell or by the app) will be visible to the `print_envs` tool.
```bash
export FOO=bar
export SECRET_TOKEN=example
testing/run-fake-stdio-mcp-server.sh
```
### Integrating with Dyad (stdio MCP)
When adding a stdio MCP server in the app, use:
- **Command**: `testing/run-fake-stdio-mcp-server.sh` (absolute path recommended)
- **Transport**: `stdio`
- **Args**: leave empty (not required)
- **Env**: optional key/values (e.g., `FOO=bar`)
Once connected, you should see the two tools listed:
- `calculator_add`
- `print_envs`

View File

@@ -180,9 +180,46 @@ export default Index;
messageContent = `[[STRING_IS_FINISHED]]";</dyad-write>\nFinished writing file.`;
messageContent += "\n\n" + generateDump(req);
}
const isToolCall = !!(
lastMessage &&
lastMessage.content &&
lastMessage.content.includes("[call_tool=calculator_add]")
);
let message = {
role: "assistant",
content: messageContent,
} as any;
// Non-streaming response
if (!stream) {
if (isToolCall) {
const toolCallId = `call_${Date.now()}`;
return res.json({
id: `chatcmpl-${Date.now()}`,
object: "chat.completion",
created: Math.floor(Date.now() / 1000),
model: "fake-model",
choices: [
{
index: 0,
message: {
role: "assistant",
tool_calls: [
{
id: toolCallId,
type: "function",
function: {
name: "calculator_add",
arguments: JSON.stringify({ a: 1, b: 2 }),
},
},
],
},
finish_reason: "tool_calls",
},
],
});
}
return res.json({
id: `chatcmpl-${Date.now()}`,
object: "chat.completion",
@@ -191,10 +228,7 @@ export default Index;
choices: [
{
index: 0,
message: {
role: "assistant",
content: messageContent,
},
message,
finish_reason: "stop",
},
],
@@ -206,9 +240,73 @@ export default Index;
res.setHeader("Cache-Control", "no-cache");
res.setHeader("Connection", "keep-alive");
// Tool call streaming (OpenAI-style)
if (isToolCall) {
const now = Date.now();
const mkChunk = (delta: any, finish: null | string = null) => {
const chunk = {
id: `chatcmpl-${now}`,
object: "chat.completion.chunk",
created: Math.floor(now / 1000),
model: "fake-model",
choices: [
{
index: 0,
delta,
finish_reason: finish,
},
],
};
return `data: ${JSON.stringify(chunk)}\n\n`;
};
// 1) Send role
res.write(mkChunk({ role: "assistant" }));
// 2) Send tool_calls init with id + name + empty args
const toolCallId = `call_${now}`;
res.write(
mkChunk({
tool_calls: [
{
index: 0,
id: toolCallId,
type: "function",
function: {
name: "testing-mcp-server__calculator_add",
arguments: "",
},
},
],
}),
);
// 3) Stream arguments gradually
const args = JSON.stringify({ a: 1, b: 2 });
let i = 0;
const argBatchSize = 6;
const argInterval = setInterval(() => {
if (i < args.length) {
const part = args.slice(i, i + argBatchSize);
i += argBatchSize;
res.write(
mkChunk({
tool_calls: [{ index: 0, function: { arguments: part } }],
}),
);
} else {
// 4) Finalize with finish_reason tool_calls and [DONE]
res.write(mkChunk({}, "tool_calls"));
res.write("data: [DONE]\n\n");
clearInterval(argInterval);
res.end();
}
}, 10);
return;
}
// Split the message into characters to simulate streaming
const message = messageContent;
const messageChars = message.split("");
const messageChars = messageContent.split("");
// Stream each character with a delay
let index = 0;

View File

@@ -0,0 +1,44 @@
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { z } from "zod";
const server = new McpServer({
name: "fake-stdio-mcp",
version: "0.1.0",
});
server.registerTool(
"calculator_add",
{
title: "Calculator Add",
description: "Add two numbers and return the sum",
inputSchema: { a: z.number(), b: z.number() },
},
async ({ a, b }) => {
const sum = a + b;
return {
content: [{ type: "text", text: String(sum) }],
};
},
);
server.registerTool(
"print_envs",
{
title: "Print Envs",
description: "Print the environment variables received by the server",
inputSchema: {},
},
async () => {
const envObject = Object.fromEntries(
Object.entries(process.env).map(([key, value]) => [key, value ?? ""]),
);
const pretty = JSON.stringify(envObject, null, 2);
return {
content: [{ type: "text", text: pretty }],
};
},
);
const transport = new StdioServerTransport();
await server.connect(transport);

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -euo pipefail
# Launch the fake stdio MCP server with Node.
# Usage: testing/run-fake-stdio-mcp-server.sh
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
NODE_BIN="node"
exec "$NODE_BIN" "$SCRIPT_DIR/fake-stdio-mcp-server.mjs"