Emdash source with visual editor image upload fix

Fixes:
1. media.ts: wrap placeholder generation in try-catch
2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
2026-05-03 10:44:54 +07:00
parent 78f81bebb6
commit 2d1be52177
2352 changed files with 662964 additions and 0 deletions

View File

@@ -0,0 +1,55 @@
import type { CreateContentInput } from "../../src/database/repositories/types.js";
/**
* Fixture for creating a post
*/
export function createPostFixture(overrides: Partial<CreateContentInput> = {}): CreateContentInput {
return {
type: "post",
slug: "hello-world",
data: {
title: "Hello World",
content: [
{
_type: "block",
style: "normal",
children: [
{
_type: "span",
text: "This is a test post",
},
],
},
],
},
status: "draft",
...overrides,
};
}
/**
* Fixture for creating a page
*/
export function createPageFixture(overrides: Partial<CreateContentInput> = {}): CreateContentInput {
return {
type: "page",
slug: "about",
data: {
title: "About",
content: [
{
_type: "block",
style: "normal",
children: [
{
_type: "span",
text: "About page content",
},
],
},
],
},
status: "draft",
...overrides,
};
}

View File

@@ -0,0 +1,324 @@
/**
* MCP integration test harness.
*
* Builds a real `EmDashRuntime` against a pre-migrated test database, wires
* its handlers into a real `McpServer`, and connects a real `Client` over
* `InMemoryTransport`. No mocks. Production code paths run end-to-end —
* MCP tool dispatch, runtime handler logic (incl. draft revision flow),
* `ApiResult` error envelopes, repositories, and SQL.
*
* Use this for any test that asserts behavior of an MCP tool against real
* data. Use the unit-level mocked-handler suite (`tests/unit/mcp`) only
* for pure authorization/scope gating where a real DB adds nothing.
*/
import type { RoleLevel } from "@emdash-cms/auth";
import { Client } from "@modelcontextprotocol/sdk/client/index.js";
import { InMemoryTransport } from "@modelcontextprotocol/sdk/inMemory.js";
import type { Kysely } from "kysely";
import type { EmDashConfig } from "../../src/astro/integration/runtime.js";
import type { EmDashHandlers } from "../../src/astro/types.js";
import type { Database } from "../../src/database/types.js";
import { EmDashRuntime } from "../../src/emdash-runtime.js";
import { createMcpServer } from "../../src/mcp/server.js";
import { createHookPipeline } from "../../src/plugins/hooks.js";
import type { ResolvedPlugin } from "../../src/plugins/types.js";
import { invalidateUrlPatternCache } from "../../src/query.js";
// ---------------------------------------------------------------------------
// Auth-injecting transport
//
// Mirrors the production HTTP transport: every client send carries authInfo
// with the user's role + scopes + emdash handle. The MCP server pulls these
// out of `extra.authInfo.extra` to authorize the request.
// ---------------------------------------------------------------------------
class AuthInjectingTransport extends InMemoryTransport {
constructor(private authInfo: Record<string, unknown>) {
super();
}
override async send(
message: Parameters<InMemoryTransport["send"]>[0],
options?: Parameters<InMemoryTransport["send"]>[1],
): Promise<void> {
const existingExtra =
options?.authInfo && typeof options.authInfo === "object" && "extra" in options.authInfo
? // eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- narrowed by typeof + 'in' check
(options.authInfo.extra as Record<string, unknown>)
: {};
return super.send(message, {
...options,
authInfo: {
token: "",
clientId: "test",
scopes: [],
...options?.authInfo,
extra: {
...this.authInfo,
...existingExtra,
},
},
});
}
}
function createAuthenticatedPair(authInfo: {
emdash: EmDashHandlers;
userId: string;
userRole: RoleLevel;
tokenScopes?: string[];
}): [AuthInjectingTransport, InMemoryTransport] {
const clientTransport = new AuthInjectingTransport(authInfo);
const serverTransport = new InMemoryTransport();
// Link them — InMemoryTransport's pairing uses `_otherTransport`.
(clientTransport as unknown as Record<string, unknown>)._otherTransport = serverTransport;
(serverTransport as unknown as Record<string, unknown>)._otherTransport = clientTransport;
return [clientTransport, serverTransport];
}
// ---------------------------------------------------------------------------
// Real runtime construction
//
// Builds a runtime around a pre-migrated DB without spinning up cron,
// marketplace, sandboxed plugins, or a media provider. Every code path the
// MCP tools exercise — content handlers, repositories, schema registry,
// draft revisions, FTS — runs through the real runtime methods.
//
// Note: this constructs `EmDashRuntime` directly via its public constructor.
// The runtime never reads `runtimeDeps` after construction except in
// `rebuildHookPipeline`, which tests do not call. cron/scheduler are null.
// ---------------------------------------------------------------------------
export interface TestRuntimeOptions {
/** Optional plugins to participate in the hook pipeline. Default: none. */
plugins?: ResolvedPlugin[];
/** Optional partial config override. Default: empty config. */
config?: Partial<EmDashConfig>;
}
/**
* Build a real `EmDashRuntime` for a test database.
*
* The DB must already have migrations + collections set up (use
* `setupTestDatabaseWithCollections()` or equivalent).
*/
export function createTestRuntime(
db: Kysely<Database>,
opts: TestRuntimeOptions = {},
): EmDashRuntime {
const plugins = opts.plugins ?? [];
const config: EmDashConfig = { ...opts.config };
const pipelineFactoryOptions = { db } as const;
const hooks = createHookPipeline(plugins, pipelineFactoryOptions);
const pipelineRef = { current: hooks };
// runtimeDeps is only consumed by `rebuildHookPipeline()`, which is not
// invoked by any MCP tool path. We pass a minimal stub so the field
// satisfies the type. If a future test ever touches plugin toggling, this
// stub will need expanding (and that's a useful failure to hit, not
// silent dead code).
const runtimeDeps = {
config,
plugins,
// eslint-disable-next-line typescript-eslint(no-explicit-any) -- match RuntimeDependencies signature
createDialect: (() => {
throw new Error("createDialect not available in test runtime");
}) as any,
createStorage: null,
sandboxEnabled: false,
sandboxedPluginEntries: [],
createSandboxRunner: null,
};
return new EmDashRuntime({
db,
storage: null,
configuredPlugins: plugins,
sandboxedPlugins: new Map(),
sandboxedPluginEntries: [],
hooks,
enabledPlugins: new Set(plugins.map((p) => p.id)),
pluginStates: new Map(),
config,
mediaProviders: new Map(),
mediaProviderEntries: [],
cronExecutor: null,
cronScheduler: null,
emailPipeline: null,
allPipelinePlugins: plugins,
pipelineFactoryOptions,
runtimeDeps,
pipelineRef,
});
}
/**
* Build the `EmDashHandlers` shape the MCP server consumes from a runtime.
*
* Mirrors the wiring in `astro/middleware.ts` so the same code paths run
* under test as in production.
*/
export function handlersFromRuntime(runtime: EmDashRuntime): EmDashHandlers {
const handlers: EmDashHandlers = {
// Content
handleContentList: runtime.handleContentList.bind(runtime),
handleContentGet: runtime.handleContentGet.bind(runtime),
handleContentGetIncludingTrashed: runtime.handleContentGetIncludingTrashed.bind(runtime),
handleContentCreate: runtime.handleContentCreate.bind(runtime),
handleContentUpdate: runtime.handleContentUpdate.bind(runtime),
handleContentDelete: runtime.handleContentDelete.bind(runtime),
handleContentDuplicate: runtime.handleContentDuplicate.bind(runtime),
handleContentRestore: runtime.handleContentRestore.bind(runtime),
handleContentPermanentDelete: runtime.handleContentPermanentDelete.bind(runtime),
handleContentListTrashed: runtime.handleContentListTrashed.bind(runtime),
handleContentCountTrashed: runtime.handleContentCountTrashed.bind(runtime),
handleContentPublish: runtime.handleContentPublish.bind(runtime),
handleContentUnpublish: runtime.handleContentUnpublish.bind(runtime),
handleContentSchedule: runtime.handleContentSchedule.bind(runtime),
handleContentUnschedule: runtime.handleContentUnschedule.bind(runtime),
handleContentCountScheduled: runtime.handleContentCountScheduled.bind(runtime),
handleContentDiscardDraft: runtime.handleContentDiscardDraft.bind(runtime),
handleContentCompare: runtime.handleContentCompare.bind(runtime),
handleContentTranslations: runtime.handleContentTranslations.bind(runtime),
// Media
handleMediaList: runtime.handleMediaList.bind(runtime),
handleMediaGet: runtime.handleMediaGet.bind(runtime),
handleMediaCreate: runtime.handleMediaCreate.bind(runtime),
handleMediaUpdate: runtime.handleMediaUpdate.bind(runtime),
handleMediaDelete: runtime.handleMediaDelete.bind(runtime),
// Revisions
handleRevisionList: runtime.handleRevisionList.bind(runtime),
handleRevisionGet: runtime.handleRevisionGet.bind(runtime),
handleRevisionRestore: runtime.handleRevisionRestore.bind(runtime),
// Direct access (MCP tools use db for schema/menu/taxonomy/search)
storage: runtime.storage,
db: runtime.db,
hooks: runtime.hooks,
email: runtime.email,
configuredPlugins: runtime.configuredPlugins,
config: runtime.config,
getManifest: runtime.getManifest.bind(runtime),
invalidateUrlPatternCache,
// Fields the MCP server doesn't currently call. Stub so the type
// checks; if a tool ever reaches for one, the test will throw a
// clear error rather than silently no-op.
handlePluginApiRoute: () => {
throw new Error("handlePluginApiRoute not implemented in test runtime");
},
getPluginRouteMeta: () => null,
getMediaProvider: runtime.getMediaProvider.bind(runtime),
getMediaProviderList: runtime.getMediaProviderList.bind(runtime),
getSandboxRunner: runtime.getSandboxRunner.bind(runtime),
syncMarketplacePlugins: () => Promise.resolve(),
setPluginStatus: runtime.setPluginStatus.bind(runtime),
collectPageMetadata: runtime.collectPageMetadata.bind(runtime),
collectPageFragments: runtime.collectPageFragments.bind(runtime),
ensureSearchHealthy: runtime.ensureSearchHealthy.bind(runtime),
};
return handlers;
}
// ---------------------------------------------------------------------------
// MCP client/server pair
// ---------------------------------------------------------------------------
export interface McpHarness {
/** The connected MCP client — call `client.callTool({ name, arguments })`. */
client: Client;
/** The runtime backing the harness. Use to make direct DB writes/reads in setup. */
runtime: EmDashRuntime;
/** The handlers wired into the MCP server. */
handlers: EmDashHandlers;
/** Tear down both client and server. */
cleanup: () => Promise<void>;
}
export interface ConnectMcpOptions {
db: Kysely<Database>;
userId: string;
userRole: RoleLevel;
tokenScopes?: string[];
runtimeOptions?: TestRuntimeOptions;
}
/**
* Connect a real MCP client/server pair against a real runtime + DB.
*
* No mocks. The MCP tool dispatch, the runtime handlers, and the database
* are all production code. Anything that goes wrong in this harness is
* something users will hit too.
*/
export async function connectMcpHarness(opts: ConnectMcpOptions): Promise<McpHarness> {
const runtime = createTestRuntime(opts.db, opts.runtimeOptions);
const handlers = handlersFromRuntime(runtime);
const server = createMcpServer();
const [clientTransport, serverTransport] = createAuthenticatedPair({
emdash: handlers,
userId: opts.userId,
userRole: opts.userRole,
tokenScopes: opts.tokenScopes,
});
const client = new Client({ name: "test", version: "1.0" });
await server.connect(serverTransport);
await client.connect(clientTransport);
return {
client,
runtime,
handlers,
cleanup: async () => {
await client.close();
await server.close();
},
};
}
// ---------------------------------------------------------------------------
// Result helpers
//
// MCP tool results are an array of `{ type: "text", text: string }` blocks,
// with `isError: true` on failure. Tests almost always need either the
// parsed JSON of the success payload or the raw error text. These helpers
// make both readable.
// ---------------------------------------------------------------------------
interface ToolResult {
content?: Array<{ type: string; text?: string }>;
isError?: boolean;
[key: string]: unknown;
}
/** Extract the first text block's content from a tool result. */
export function extractText(result: unknown): string {
const r = result as ToolResult;
const block = r.content?.[0];
return typeof block?.text === "string" ? block.text : "";
}
/** Parse the JSON success payload of a tool result. Throws if the call errored. */
export function extractJson<T = unknown>(result: unknown): T {
const r = result as ToolResult;
if (r.isError) {
throw new Error(`Expected success but got error: ${extractText(result)}`);
}
const text = extractText(result);
if (!text) {
throw new Error("Tool result had no text content");
}
return JSON.parse(text) as T;
}
/** Whether the result is an MCP error response. */
export function isErrorResult(result: unknown): boolean {
return (result as ToolResult).isError === true;
}

View File

@@ -0,0 +1,347 @@
import Database from "better-sqlite3";
import { Kysely, PostgresDialect, SqliteDialect } from "kysely";
import { Pool } from "pg";
import { describe } from "vitest";
import { runMigrations } from "../../src/database/migrations/runner.js";
import type { Database as DatabaseSchema } from "../../src/database/types.js";
import { SchemaRegistry } from "../../src/schema/registry.js";
// ---------------------------------------------------------------------------
// Environment
// ---------------------------------------------------------------------------
/**
* PostgreSQL connection string for tests.
* When set, Postgres tests run; when absent, they're skipped.
*/
export const PG_CONNECTION_STRING = process.env.EMDASH_TEST_PG ?? "";
/**
* Whether a Postgres test database is available.
*/
export const hasPgTestDatabase = PG_CONNECTION_STRING.length > 0;
// ---------------------------------------------------------------------------
// SQLite helpers (unchanged)
// ---------------------------------------------------------------------------
/**
* Create an in-memory SQLite database for testing
*/
export function createTestDatabase(): Kysely<DatabaseSchema> {
const sqlite = new Database(":memory:");
return new Kysely<DatabaseSchema>({
dialect: new SqliteDialect({
database: sqlite,
}),
});
}
/**
* Setup a test database with migrations run
*/
export async function setupTestDatabase(): Promise<Kysely<DatabaseSchema>> {
const db = createTestDatabase();
await runMigrations(db);
return db;
}
/**
* Setup a test database with standard test collections (post, page)
* This creates the ec_post and ec_page tables with title and content fields
*/
export async function setupTestDatabaseWithCollections(): Promise<Kysely<DatabaseSchema>> {
const db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
// Create post collection
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("post", {
slug: "content",
label: "Content",
type: "portableText",
});
// Create page collection
await registry.createCollection({
slug: "page",
label: "Pages",
labelSingular: "Page",
});
await registry.createField("page", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("page", {
slug: "content",
label: "Content",
type: "portableText",
});
return db;
}
/**
* Cleanup and destroy a test database
*/
export async function teardownTestDatabase(db: Kysely<DatabaseSchema>): Promise<void> {
await db.destroy();
}
// ---------------------------------------------------------------------------
// PostgreSQL helpers
// ---------------------------------------------------------------------------
/**
* Shared pool for Postgres tests. One pool per test process, many schemas.
* Created lazily on first call to createTestPostgresDatabase().
*/
let sharedPool: Pool | null = null;
function getSharedPool(): Pool {
if (!sharedPool) {
sharedPool = new Pool({
connectionString: PG_CONNECTION_STRING,
max: 10,
});
}
return sharedPool;
}
/**
* Generate a unique schema name for test isolation.
* Format: test_<timestamp>_<random> — short, valid SQL identifier.
*/
function uniqueSchemaName(): string {
const ts = Date.now().toString(36);
const rand = Math.random().toString(36).slice(2, 8);
return `test_${ts}_${rand}`;
}
export interface PgTestContext {
db: Kysely<DatabaseSchema>;
schemaName: string;
}
/**
* Create an isolated Postgres database for a single test.
*
* Each call creates a unique schema and returns a Kysely instance
* whose search_path is set to that schema. Tables are fully isolated.
*
* Call `teardownTestPostgresDatabase()` in afterEach to drop the schema.
*/
export async function createTestPostgresDatabase(): Promise<PgTestContext> {
const pool = getSharedPool();
const schemaName = uniqueSchemaName();
// Create the isolated schema using a raw connection
const client = await pool.connect();
try {
await client.query(`CREATE SCHEMA ${schemaName}`);
} finally {
client.release();
}
// Create a Kysely instance that targets this schema.
// Test schema comes first so CREATE TABLE goes there.
// public is included for Postgres system functions and extensions.
const testPool = new Pool({
connectionString: PG_CONNECTION_STRING,
max: 5,
options: `-c search_path=${schemaName},public`,
});
const db = new Kysely<DatabaseSchema>({
dialect: new PostgresDialect({ pool: testPool }),
});
return { db, schemaName };
}
/**
* Setup a Postgres test database with migrations run.
*/
export async function setupTestPostgresDatabase(): Promise<PgTestContext> {
const ctx = await createTestPostgresDatabase();
await runMigrations(ctx.db);
return ctx;
}
/**
* Setup a Postgres test database with standard test collections (post, page).
*/
export async function setupTestPostgresDatabaseWithCollections(): Promise<PgTestContext> {
const ctx = await setupTestPostgresDatabase();
const registry = new SchemaRegistry(ctx.db);
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("post", {
slug: "content",
label: "Content",
type: "portableText",
});
await registry.createCollection({
slug: "page",
label: "Pages",
labelSingular: "Page",
});
await registry.createField("page", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("page", {
slug: "content",
label: "Content",
type: "portableText",
});
return ctx;
}
/**
* Tear down a Postgres test database — drops the schema and closes the pool.
*/
export async function teardownTestPostgresDatabase(ctx: PgTestContext): Promise<void> {
// Destroy the test pool first
await ctx.db.destroy();
// Drop the schema using the shared pool
const pool = getSharedPool();
const client = await pool.connect();
try {
await client.query(`DROP SCHEMA IF EXISTS ${ctx.schemaName} CASCADE`);
} finally {
client.release();
}
}
/**
* Shut down the shared Postgres pool. Call once at the end of the test run.
*/
export async function destroySharedPool(): Promise<void> {
if (sharedPool) {
await sharedPool.end();
sharedPool = null;
}
}
// ---------------------------------------------------------------------------
// Dialect-parametric test helpers
// ---------------------------------------------------------------------------
export type DialectName = "sqlite" | "postgres";
export interface DialectTestContext {
db: Kysely<DatabaseSchema>;
dialect: DialectName;
/** Only present for Postgres — needed for teardown */
pgCtx?: PgTestContext;
}
/**
* Create a bare test database for a given dialect (no migrations).
*/
export async function createForDialect(dialect: DialectName): Promise<DialectTestContext> {
if (dialect === "postgres") {
const pgCtx = await createTestPostgresDatabase();
return { db: pgCtx.db, dialect, pgCtx };
}
const db = createTestDatabase();
return { db, dialect };
}
/**
* Create a test database for a given dialect (with migrations).
*/
export async function setupForDialect(dialect: DialectName): Promise<DialectTestContext> {
if (dialect === "postgres") {
const pgCtx = await setupTestDatabase_pg();
return { db: pgCtx.db, dialect, pgCtx };
}
const db = await setupTestDatabase();
return { db, dialect };
}
/**
* Create a test database with collections for a given dialect.
*/
export async function setupForDialectWithCollections(
dialect: DialectName,
): Promise<DialectTestContext> {
if (dialect === "postgres") {
const pgCtx = await setupTestPostgresDatabaseWithCollections();
return { db: pgCtx.db, dialect, pgCtx };
}
const db = await setupTestDatabaseWithCollections();
return { db, dialect };
}
/**
* Tear down a test database for any dialect.
*/
export async function teardownForDialect(ctx: DialectTestContext): Promise<void> {
if (ctx.pgCtx) {
await teardownTestPostgresDatabase(ctx.pgCtx);
} else {
await teardownTestDatabase(ctx.db);
}
}
// Private alias to avoid name collision
const setupTestDatabase_pg = setupTestPostgresDatabase;
/**
* Run a describe block once per available dialect.
*
* When EMDASH_TEST_PG is not set, only SQLite runs.
* When set, the suite runs for both SQLite and Postgres.
*
* @example
* ```ts
* describeEachDialect("Migrations", (dialectName) => {
* let ctx: DialectTestContext;
* beforeEach(async () => { ctx = await setupForDialect(dialectName); });
* afterEach(async () => { await teardownForDialect(ctx); });
*
* it("creates tables", async () => {
* // ctx.db works with either dialect
* });
* });
* ```
*/
export function describeEachDialect(name: string, fn: (dialect: DialectName) => void): void {
const dialects: DialectName[] = ["sqlite"];
if (hasPgTestDatabase) {
dialects.push("postgres");
}
for (const dialect of dialects) {
describe(`${name} [${dialect}]`, () => {
fn(dialect);
});
}
}