Emdash source with visual editor image upload fix

Fixes:
1. media.ts: wrap placeholder generation in try-catch
2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
2026-05-03 10:44:54 +07:00
parent 78f81bebb6
commit 2d1be52177
2352 changed files with 662964 additions and 0 deletions

View File

@@ -0,0 +1,140 @@
/**
* Rate-limit behaviour on POST /_emdash/api/comments/:collection/:contentId.
*
* Specifically covers the removal of the user-agent-hash fallback. Before,
* a submitter with no trusted IP could rotate their User-Agent string to
* get a fresh rate-limit bucket each time; the route now buckets all
* trusted-IP-less requests together into the shared "unknown" bucket.
*
* Operators behind a reverse proxy they control should set
* `trustedProxyHeaders` (or EMDASH_TRUSTED_PROXY_HEADERS) so this path
* isn't hit for legitimate traffic. Those tests live alongside the
* extractRequestMeta unit tests.
*/
import type { APIContext } from "astro";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { POST as postComment } from "../../../src/astro/routes/api/comments/[collection]/[contentId]/index.js";
import { _resetTrustedProxyHeadersCache } from "../../../src/auth/trusted-proxy.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
// Keep the env-derived trusted-header cache from leaking into this file
// (a stale EMDASH_TRUSTED_PROXY_HEADERS would route every UA to its own
// bucket and make the test pass for the wrong reason).
const ORIGINAL_TRUSTED_ENV = process.env.EMDASH_TRUSTED_PROXY_HEADERS;
function buildRequest(opts: { userAgent?: string; body: unknown }): Request {
return new Request("http://localhost/_emdash/api/comments/post/post-1", {
method: "POST",
headers: {
"content-type": "application/json",
...(opts.userAgent ? { "user-agent": opts.userAgent } : {}),
},
body: JSON.stringify(opts.body),
});
}
function buildContext(opts: { db: Kysely<Database>; request: Request }): APIContext {
return {
params: { collection: "post", contentId: "post-1" },
request: opts.request,
locals: {
emdash: {
db: opts.db,
config: {},
hooks: {
// Pass-through beforeCreate (returns the event unchanged).
runCommentBeforeCreate: async (event: unknown) => event,
// No moderator configured — returns null (route coerces to pending).
invokeExclusiveHook: async () => null,
runCommentAfterCreate: async () => undefined,
},
},
user: null,
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub for tests
} as unknown as APIContext;
}
describe("POST /comments — UA-hash rate-limit removal", () => {
let db: Kysely<Database>;
beforeEach(async () => {
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
_resetTrustedProxyHeadersCache();
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
commentsEnabled: true,
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
// Create a published content row so the comment route can target it.
await db
.insertInto("ec_post" as never)
.values({
id: "post-1",
slug: "post-1",
status: "published",
published_at: new Date().toISOString(),
title: "Test post",
} as never)
.execute();
});
afterEach(async () => {
await teardownTestDatabase(db);
if (ORIGINAL_TRUSTED_ENV === undefined) {
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
} else {
process.env.EMDASH_TRUSTED_PROXY_HEADERS = ORIGINAL_TRUSTED_ENV;
}
_resetTrustedProxyHeadersCache();
});
it("buckets no-trusted-IP requests together regardless of User-Agent", async () => {
// Submit 20 comments from different UA strings but without any
// trusted IP header. The limit for the "unknown" bucket is 20/10min.
// Before the fix, rotating UAs would give each request its own
// bucket; with the fix, they share the "unknown" bucket.
for (let i = 0; i < 20; i++) {
const res = await postComment(
buildContext({
db,
request: buildRequest({
userAgent: `Bot/${i}`,
body: {
authorName: "Spam",
authorEmail: "s@example.com",
body: `message ${i}`,
},
}),
}),
);
expect([200, 201]).toContain(res.status);
}
// 21st call with a fresh UA must still hit the shared bucket and
// get rate-limited.
const limitedRes = await postComment(
buildContext({
db,
request: buildRequest({
userAgent: "Bot/fresh",
body: {
authorName: "Spam",
authorEmail: "s@example.com",
body: "one more",
},
}),
}),
);
expect(limitedRes.status).toBe(429);
});
});

View File

@@ -0,0 +1,188 @@
/**
* Success-path coverage for the setup nonce cookie.
*
* The sibling file `setup-admin-nonce.test.ts` covers the negative
* paths (missing cookie, mismatched cookie, rotation) by driving
* /setup/admin/verify with a bogus credential that fails at the
* WebAuthn step. That harness can't exercise the *successful* verify
* path — real WebAuthn verification requires a live authenticator.
*
* This file stubs `verifyRegistrationResponse` with a fake that
* returns synthetic credential material so we can reach the code
* after the nonce gate: user creation, passkey registration, setup
* completion, and — the property we actually care about — deletion
* of the nonce cookie.
*
* `registerPasskey` is left real; it only talks to the Kysely
* adapter against the in-memory test DB.
*/
import type { APIContext, AstroCookies } from "astro";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
vi.mock("@emdash-cms/auth/passkey", async (importOriginal) => {
const actual = await importOriginal<typeof import("@emdash-cms/auth/passkey")>();
return {
...actual,
verifyRegistrationResponse: vi.fn(async () => ({
credentialId: "fake-credential-id",
publicKey: new Uint8Array([1, 2, 3, 4]),
counter: 0,
deviceType: "singleDevice" as const,
backedUp: false,
transports: [],
})),
};
});
// Deferred so vi.mock applies before the route modules evaluate.
type AdminRoute = typeof import("../../../src/astro/routes/api/setup/admin.js");
type AdminVerifyRoute = typeof import("../../../src/astro/routes/api/setup/admin-verify.js");
let postAdmin: AdminRoute["POST"];
let postAdminVerify: AdminVerifyRoute["POST"];
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
interface CookieRecord {
value: string;
options: Record<string, unknown>;
deleted?: boolean;
}
interface CookieJar {
jar: Map<string, CookieRecord>;
cookies: AstroCookies;
}
function createCookieJar(initial: Record<string, string> = {}): CookieJar {
const jar = new Map<string, CookieRecord>();
for (const [name, value] of Object.entries(initial)) {
jar.set(name, { value, options: {} });
}
const cookies = {
get(name: string) {
const record = jar.get(name);
if (!record || record.deleted) return undefined;
return { value: record.value };
},
set(name: string, value: string, options: Record<string, unknown> = {}) {
jar.set(name, { value, options });
},
delete(name: string, options: Record<string, unknown> = {}) {
const existing = jar.get(name);
jar.set(name, {
value: existing?.value ?? "",
options: { ...existing?.options, ...options },
deleted: true,
});
},
has(name: string) {
const record = jar.get(name);
return !!record && !record.deleted;
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
} as unknown as AstroCookies;
return { jar, cookies };
}
function buildAdminRequest(body: unknown): Request {
return new Request("http://localhost/_emdash/api/setup/admin", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(body),
});
}
function buildVerifyRequest(body: unknown): Request {
return new Request("http://localhost/_emdash/api/setup/admin/verify", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(body),
});
}
function buildContext(db: Kysely<Database>, request: Request, cookies: AstroCookies): APIContext {
return {
params: {},
url: new URL(request.url),
request,
cookies,
locals: {
emdash: {
db,
config: {},
storage: undefined,
},
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
} as unknown as APIContext;
}
const adminBody = { email: "real@admin.example", name: "Real Admin" };
// Any object that passes setupAdminVerifyBody — the actual WebAuthn
// verification is mocked out, so the fields don't need to parse as
// valid authenticator data.
const fakeCredential = {
credential: {
id: "fake-credential-id",
rawId: "fake-credential-id",
type: "public-key" as const,
response: {
clientDataJSON: "AA",
attestationObject: "AA",
},
},
};
describe("POST /setup/admin/verify — success path clears nonce cookie", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
({ POST: postAdmin } = await import("../../../src/astro/routes/api/setup/admin.js"));
({ POST: postAdminVerify } =
await import("../../../src/astro/routes/api/setup/admin-verify.js"));
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("deletes the setup nonce cookie and marks setup complete when verify succeeds", async () => {
// 1. Start admin setup — mints the nonce and drops the cookie.
const { jar, cookies } = createCookieJar();
const adminRes = await postAdmin(buildContext(db, buildAdminRequest(adminBody), cookies));
expect(adminRes.status).toBe(200);
const setCookie = jar.get("emdash_setup_nonce");
expect(setCookie).toBeDefined();
expect(setCookie!.deleted).toBeFalsy();
// 2. Verify with the mocked-out WebAuthn check. The nonce gate
// runs first (real code path), then the stub returns a
// synthetic credential and the route creates the user.
const verifyRes = await postAdminVerify(
buildContext(db, buildVerifyRequest(fakeCredential), cookies),
);
expect(verifyRes.status).toBe(200);
// 3. Cookie should now be deleted. The deletion must be
// scoped to /_emdash/ so it actually supersedes the cookie
// the browser holds.
const afterVerify = jar.get("emdash_setup_nonce");
expect(afterVerify?.deleted).toBe(true);
expect(afterVerify?.options.path).toBe("/_emdash/");
// 4. Setup state is cleared and setup_complete is set.
const options = new OptionsRepository(db);
const setupState = await options.get("emdash:setup_state");
expect(setupState).toBeNull();
const setupComplete = await options.get("emdash:setup_complete");
expect(setupComplete).toBe(true);
});
});

View File

@@ -0,0 +1,265 @@
/**
* POST /_emdash/api/setup/admin mints a per-session nonce, sets it as an
* HttpOnly cookie scoped to /_emdash/, and stores it inside
* `emdash:setup_state`. POST /_emdash/api/setup/admin/verify must then
* present the same cookie value.
*
* Without this binding, an unauthenticated attacker could call
* /setup/admin during the setup window and overwrite the legitimate
* admin's email; when the admin then completes passkey verification,
* the user account would be created with the attacker's address.
*/
import type { APIContext, AstroCookies } from "astro";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { POST as postAdminVerify } from "../../../src/astro/routes/api/setup/admin-verify.js";
import { POST as postAdmin } from "../../../src/astro/routes/api/setup/admin.js";
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
interface CookieRecord {
value: string;
options: Record<string, unknown>;
}
interface CookieJar {
jar: Map<string, CookieRecord>;
cookies: AstroCookies;
}
/**
* Minimal in-memory implementation of Astro's `AstroCookies`. Tests
* compose two contexts (admin, verify) and carry cookies between them.
*/
function createCookieJar(initial: Record<string, string> = {}): CookieJar {
const jar = new Map<string, CookieRecord>();
for (const [name, value] of Object.entries(initial)) {
jar.set(name, { value, options: {} });
}
const cookies = {
get(name: string) {
const record = jar.get(name);
if (!record) return undefined;
return { value: record.value };
},
set(name: string, value: string, options: Record<string, unknown> = {}) {
jar.set(name, { value, options });
},
delete(name: string) {
jar.delete(name);
},
has(name: string) {
return jar.has(name);
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
} as unknown as AstroCookies;
return { jar, cookies };
}
function buildAdminRequest(body: unknown): Request {
return new Request("http://localhost/_emdash/api/setup/admin", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(body),
});
}
function buildVerifyRequest(body: unknown): Request {
return new Request("http://localhost/_emdash/api/setup/admin/verify", {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(body),
});
}
function buildContext(db: Kysely<Database>, request: Request, cookies: AstroCookies): APIContext {
return {
params: {},
url: new URL(request.url),
request,
cookies,
locals: {
emdash: {
db,
config: {},
storage: undefined,
},
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
} as unknown as APIContext;
}
const adminBody = { email: "real@admin.example", name: "Real Admin" };
const attackerBody = { email: "attacker@evil.example", name: "Attacker" };
// A bogus passkey credential — verify will fail at the WebAuthn step,
// but only AFTER the nonce check. We're asserting on the nonce gate, not
// the eventual passkey result.
const bogusCredential = {
credential: {
id: "AA",
rawId: "AA",
type: "public-key" as const,
response: {
clientDataJSON: "AA",
attestationObject: "AA",
},
},
};
describe("POST /setup/admin — session nonce binding", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("sets a HttpOnly nonce cookie on the response and stores it with setup state", async () => {
const { jar, cookies } = createCookieJar();
const res = await postAdmin(buildContext(db, buildAdminRequest(adminBody), cookies));
expect(res.status).toBe(200);
const cookie = jar.get("emdash_setup_nonce");
expect(cookie).toBeDefined();
// 32 bytes base64url-encoded with no padding = 43 chars. Lock the
// shape so accidental entropy changes trip this test.
expect(cookie!.value).toMatch(/^[A-Za-z0-9_-]{43}$/);
expect(cookie!.options.httpOnly).toBe(true);
// The route sets sameSite: "strict" deliberately — this is the
// property that prevents cross-site submission of the cookie.
// Allowing "lax" here would silently accept a regression.
expect(cookie!.options.sameSite).toBe("strict");
expect(cookie!.options.path).toBe("/_emdash/");
const options = new OptionsRepository(db);
const setupState = await options.get<{ email: string; nonce: string }>("emdash:setup_state");
expect(setupState).toBeDefined();
expect(setupState?.email).toBe("real@admin.example");
expect(setupState?.nonce).toBe(cookie!.value);
});
it("sets Secure on the nonce cookie when the public origin is HTTPS, even if the internal request URL is HTTP", async () => {
// Simulates a TLS-terminating reverse proxy: browser speaks
// https:// to the proxy, proxy speaks http:// to the app. The
// cookie must still be marked Secure so it's never sent over a
// plain-text channel on the public side.
const { jar, cookies } = createCookieJar();
const request = buildAdminRequest(adminBody);
const ctx = buildContext(db, request, cookies);
// Force the "internal" view to be HTTP…
(ctx as { url: URL }).url = new URL("http://internal.localhost/_emdash/api/setup/admin");
// …while config.siteUrl declares the public HTTPS origin.
(ctx.locals as { emdash: { config: { siteUrl: string } } }).emdash.config = {
siteUrl: "https://public.example.com",
};
const res = await postAdmin(ctx);
expect(res.status).toBe(200);
const cookie = jar.get("emdash_setup_nonce");
expect(cookie).toBeDefined();
expect(cookie!.options.secure).toBe(true);
});
it("omits Secure on the nonce cookie when the public origin is HTTP (local dev)", async () => {
// Mirror of the test above: a plain http://localhost deployment
// must not set Secure (Chromium would drop the cookie entirely).
const { jar, cookies } = createCookieJar();
const res = await postAdmin(buildContext(db, buildAdminRequest(adminBody), cookies));
expect(res.status).toBe(200);
const cookie = jar.get("emdash_setup_nonce");
expect(cookie).toBeDefined();
expect(cookie!.options.secure).toBe(false);
});
it("rejects /admin/verify when no nonce cookie is present", async () => {
// Legitimate admin call mints the nonce.
const { cookies: adminCookies } = createCookieJar();
const adminRes = await postAdmin(buildContext(db, buildAdminRequest(adminBody), adminCookies));
expect(adminRes.status).toBe(200);
// Attacker calls verify without the cookie.
const { cookies: noCookies } = createCookieJar();
const verifyRes = await postAdminVerify(
buildContext(db, buildVerifyRequest(bogusCredential), noCookies),
);
expect(verifyRes.status).toBe(400);
const body = (await verifyRes.json()) as { error?: { code?: string } };
expect(body.error?.code).toBe("INVALID_STATE");
});
it("rejects /admin/verify when the nonce cookie does not match the stored nonce", async () => {
const { cookies: adminCookies } = createCookieJar();
const adminRes = await postAdmin(buildContext(db, buildAdminRequest(adminBody), adminCookies));
expect(adminRes.status).toBe(200);
// Attacker presents a forged cookie with a guessed value.
const { cookies: attackerCookies } = createCookieJar({
emdash_setup_nonce: "obviously-wrong-value",
});
const verifyRes = await postAdminVerify(
buildContext(db, buildVerifyRequest(bogusCredential), attackerCookies),
);
expect(verifyRes.status).toBe(400);
const body = (await verifyRes.json()) as { error?: { code?: string } };
expect(body.error?.code).toBe("INVALID_STATE");
});
it("blocks the email-hijack attack: attacker overwrites setup_state but cannot complete verify", async () => {
// 1. Legitimate admin starts setup.
const { jar: adminJar, cookies: adminCookies } = createCookieJar();
const firstRes = await postAdmin(buildContext(db, buildAdminRequest(adminBody), adminCookies));
expect(firstRes.status).toBe(200);
const adminNonce = adminJar.get("emdash_setup_nonce")!.value;
// 2. Attacker (different browser, no cookie) calls /setup/admin to
// overwrite the email. With the fix this also rotates the nonce,
// invalidating the legitimate admin's session.
const { jar: attackerJar, cookies: attackerCookies } = createCookieJar();
const attackerRes = await postAdmin(
buildContext(db, buildAdminRequest(attackerBody), attackerCookies),
);
expect(attackerRes.status).toBe(200);
const attackerNonce = attackerJar.get("emdash_setup_nonce")!.value;
expect(attackerNonce).not.toBe(adminNonce);
// 3. Legitimate admin completes verify with their original cookie.
// This must NOT succeed, because the stored nonce has rotated.
const verifyRes = await postAdminVerify(
buildContext(db, buildVerifyRequest(bogusCredential), adminCookies),
);
expect(verifyRes.status).toBe(400);
const body = (await verifyRes.json()) as { error?: { code?: string } };
expect(body.error?.code).toBe("INVALID_STATE");
});
it("allows a legitimate admin to retry /setup/admin and reuse the new cookie", async () => {
// First call mints nonce A.
const { jar, cookies } = createCookieJar();
const first = await postAdmin(buildContext(db, buildAdminRequest(adminBody), cookies));
expect(first.status).toBe(200);
const nonceA = jar.get("emdash_setup_nonce")!.value;
// Same admin retries (e.g. corrected typo). Nonce rotates, cookie
// updates in the same jar — they continue with the new value.
const second = await postAdmin(buildContext(db, buildAdminRequest(adminBody), cookies));
expect(second.status).toBe(200);
const nonceB = jar.get("emdash_setup_nonce")!.value;
expect(nonceB).not.toBe(nonceA);
const options = new OptionsRepository(db);
const setupState = await options.get<{ nonce: string }>("emdash:setup_state");
expect(setupState?.nonce).toBe(nonceB);
});
});

View File

@@ -0,0 +1,161 @@
/**
* POST /_emdash/api/setup writes `emdash:site_url` once. Subsequent calls
* to the setup endpoint (during the multi-step wizard, before
* `emdash:setup_complete` is true) must not overwrite it.
*
* Without this, a spoofed Host header on any follow-up POST during the
* setup window could poison the site URL used in auth emails.
*
* The primary defence (config.siteUrl / EMDASH_SITE_URL env) was added
* earlier; this is the last-line lock for deployments that rely on the
* request-origin fallback.
*/
import type { APIContext } from "astro";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
// Stub the seed virtual module that loadSeed() imports at runtime. Without
// this the setup route errors out before reaching the site_url write.
vi.mock("virtual:emdash/seed", () => ({
seed: {
version: "1",
settings: {},
collections: [],
},
userSeed: null,
}));
import { POST as postSetup } from "../../../src/astro/routes/api/setup/index.js";
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
function buildRequest(host: string, body: unknown): Request {
return new Request(`http://${host}/_emdash/api/setup`, {
method: "POST",
headers: {
"content-type": "application/json",
host,
},
body: JSON.stringify(body),
});
}
function buildContext(db: Kysely<Database>, request: Request): APIContext {
return {
params: {},
url: new URL(request.url),
request,
locals: {
emdash: {
db,
config: {},
storage: undefined,
},
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
} as unknown as APIContext;
}
describe("POST /setup — site_url write-once lock", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("stores site_url from the first request", async () => {
const res = await postSetup(
buildContext(
db,
buildRequest("real-site.example", { title: "My Site", includeContent: false }),
),
);
expect(res.status).toBe(200);
const options = new OptionsRepository(db);
expect(await options.get("emdash:site_url")).toBe("http://real-site.example");
});
it("does not overwrite site_url when a later setup call arrives with a spoofed Host", async () => {
// First call — legitimate admin on the real host.
const first = await postSetup(
buildContext(
db,
buildRequest("real-site.example", { title: "My Site", includeContent: false }),
),
);
expect(first.status).toBe(200);
// Attacker sends a second setup call with a spoofed Host header
// before the admin has completed the final step. Without the lock,
// the stored site_url would be overwritten.
const second = await postSetup(
buildContext(
db,
buildRequest("attacker.example", { title: "My Site", includeContent: false }),
),
);
expect(second.status).toBe(200);
const options = new OptionsRepository(db);
expect(await options.get("emdash:site_url")).toBe("http://real-site.example");
});
it("is atomic under concurrent setup POSTs with different Hosts", async () => {
// Two concurrent callers observe an empty site_url and race to
// write. Without DB-level write-once semantics, the last writer
// wins and the legitimate host can still be replaced.
const [a, b] = await Promise.all([
postSetup(
buildContext(
db,
buildRequest("real-site.example", { title: "My Site", includeContent: false }),
),
),
postSetup(
buildContext(
db,
buildRequest("attacker.example", { title: "My Site", includeContent: false }),
),
),
]);
expect(a.status).toBe(200);
expect(b.status).toBe(200);
const options = new OptionsRepository(db);
const stored = await options.get("emdash:site_url");
// Whichever call won the race must now stick — a third caller must
// not be able to overwrite it.
expect(["http://real-site.example", "http://attacker.example"]).toContain(stored);
const third = await postSetup(
buildContext(db, buildRequest("other.example", { title: "My Site", includeContent: false })),
);
expect(third.status).toBe(200);
expect(await options.get("emdash:site_url")).toBe(stored);
});
it("does not overwrite a legitimately-stored empty string", async () => {
// Defence-in-depth: if site_url was somehow stored as "" (e.g.
// manual DB edit, legacy data, test fixture), the guard must treat
// it as present, not missing.
const options = new OptionsRepository(db);
await options.set("emdash:site_url", "");
const res = await postSetup(
buildContext(
db,
buildRequest("attacker.example", { title: "My Site", includeContent: false }),
),
);
expect(res.status).toBe(200);
expect(await options.get("emdash:site_url")).toBe("");
});
});

View File

@@ -0,0 +1,309 @@
/**
* Integration tests for API token handlers.
*
* Tests token CRUD and resolution against a real in-memory SQLite database.
*/
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleApiTokenCreate,
handleApiTokenList,
handleApiTokenRevoke,
resolveApiToken,
resolveOAuthToken,
} from "../../../src/api/handlers/api-tokens.js";
import { generatePrefixedToken, TOKEN_PREFIXES } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
// Regex patterns for token validation
const PAT_PREFIX_REGEX = /^ec_pat_/;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user_1",
email: "admin@test.com",
name: "Admin",
role: 50, // ADMIN
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
describe("handleApiTokenCreate", () => {
it("creates a token and returns the raw value", async () => {
const result = await handleApiTokenCreate(db, "user_1", {
name: "Test Token",
scopes: ["content:read", "content:write"],
});
expect(result.success).toBe(true);
expect(result.data).toBeDefined();
expect(result.data!.token).toMatch(PAT_PREFIX_REGEX);
expect(result.data!.info.name).toBe("Test Token");
expect(result.data!.info.scopes).toEqual(["content:read", "content:write"]);
expect(result.data!.info.userId).toBe("user_1");
expect(result.data!.info.prefix).toMatch(PAT_PREFIX_REGEX);
});
it("creates tokens with different hashes", async () => {
const result1 = await handleApiTokenCreate(db, "user_1", {
name: "Token 1",
scopes: ["content:read"],
});
const result2 = await handleApiTokenCreate(db, "user_1", {
name: "Token 2",
scopes: ["content:read"],
});
expect(result1.data!.token).not.toBe(result2.data!.token);
});
it("stores expiry date when provided", async () => {
const expiresAt = new Date(Date.now() + 86400000).toISOString();
const result = await handleApiTokenCreate(db, "user_1", {
name: "Expiring Token",
scopes: ["content:read"],
expiresAt,
});
expect(result.data!.info.expiresAt).toBe(expiresAt);
});
});
describe("handleApiTokenList", () => {
it("lists tokens for a user", async () => {
await handleApiTokenCreate(db, "user_1", {
name: "Token A",
scopes: ["content:read"],
});
await handleApiTokenCreate(db, "user_1", {
name: "Token B",
scopes: ["admin"],
});
const result = await handleApiTokenList(db, "user_1");
expect(result.success).toBe(true);
expect(result.data!.items).toHaveLength(2);
const names = result.data!.items.map((t) => t.name).toSorted();
expect(names).toEqual(["Token A", "Token B"]);
});
it("does not return tokens for other users", async () => {
await db
.insertInto("users")
.values({
id: "user_2",
email: "other@test.com",
name: "Other",
role: 50,
email_verified: 1,
})
.execute();
await handleApiTokenCreate(db, "user_1", {
name: "User 1 Token",
scopes: ["content:read"],
});
await handleApiTokenCreate(db, "user_2", {
name: "User 2 Token",
scopes: ["content:read"],
});
const result = await handleApiTokenList(db, "user_1");
expect(result.data!.items).toHaveLength(1);
expect(result.data!.items[0].name).toBe("User 1 Token");
});
it("never returns the token hash", async () => {
await handleApiTokenCreate(db, "user_1", {
name: "Test",
scopes: ["content:read"],
});
const result = await handleApiTokenList(db, "user_1");
const item = result.data!.items[0];
// Ensure no hash or raw token is exposed
expect(item).not.toHaveProperty("token_hash");
expect(item).not.toHaveProperty("tokenHash");
expect(item).not.toHaveProperty("token");
});
});
describe("handleApiTokenRevoke", () => {
it("revokes a token", async () => {
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "To Revoke",
scopes: ["content:read"],
});
const tokenId = createResult.data!.info.id;
const result = await handleApiTokenRevoke(db, tokenId, "user_1");
expect(result.success).toBe(true);
// Should be gone from the list
const list = await handleApiTokenList(db, "user_1");
expect(list.data!.items).toHaveLength(0);
});
it("returns error for non-existent token", async () => {
const result = await handleApiTokenRevoke(db, "nonexistent", "user_1");
expect(result.success).toBe(false);
expect(result.error!.code).toBe("NOT_FOUND");
});
it("cannot revoke another user's token", async () => {
await db
.insertInto("users")
.values({
id: "user_2",
email: "other@test.com",
name: "Other",
role: 50,
email_verified: 1,
})
.execute();
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "User 1 Token",
scopes: ["content:read"],
});
const tokenId = createResult.data!.info.id;
// User 2 tries to revoke user 1's token
const result = await handleApiTokenRevoke(db, tokenId, "user_2");
expect(result.success).toBe(false);
expect(result.error!.code).toBe("NOT_FOUND");
// Token should still exist
const list = await handleApiTokenList(db, "user_1");
expect(list.data!.items).toHaveLength(1);
});
});
describe("resolveApiToken", () => {
it("resolves a valid token to user and scopes", async () => {
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Test",
scopes: ["content:read", "media:write"],
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).not.toBeNull();
expect(resolved!.userId).toBe("user_1");
expect(resolved!.scopes).toEqual(["content:read", "media:write"]);
});
it("returns null for invalid token", async () => {
const resolved = await resolveApiToken(db, "ec_pat_invalidtoken123");
expect(resolved).toBeNull();
});
it("returns null for expired token", async () => {
const pastDate = new Date(Date.now() - 86400000).toISOString(); // Yesterday
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Expired",
scopes: ["content:read"],
expiresAt: pastDate,
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).toBeNull();
});
it("resolves non-expired token", async () => {
const futureDate = new Date(Date.now() + 86400000).toISOString(); // Tomorrow
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Future",
scopes: ["admin"],
expiresAt: futureDate,
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).not.toBeNull();
expect(resolved!.scopes).toEqual(["admin"]);
});
});
describe("resolveOAuthToken", () => {
it("resolves a valid OAuth access token", async () => {
// Insert directly since we don't have a Device Flow handler yet
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const futureDate = new Date(Date.now() + 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "access",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: futureDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).not.toBeNull();
expect(resolved!.userId).toBe("user_1");
expect(resolved!.scopes).toEqual(["content:read"]);
});
it("returns null for expired OAuth token", async () => {
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const pastDate = new Date(Date.now() - 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "access",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: pastDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).toBeNull();
});
it("does not resolve refresh tokens", async () => {
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
const futureDate = new Date(Date.now() + 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "refresh",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: futureDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).toBeNull();
});
});

View File

@@ -0,0 +1,475 @@
/**
* Integration tests for OAuth 2.1 Authorization Code + PKCE handlers.
*
* Tests the full authorization code flow lifecycle against a real
* in-memory SQLite database.
*/
import { computeS256Challenge, Role } from "@emdash-cms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
buildDeniedRedirect,
cleanupExpiredAuthorizationCodes,
handleAuthorizationApproval,
handleAuthorizationCodeExchange,
} from "../../../src/api/handlers/oauth-authorization.js";
import { handleOAuthClientCreate } from "../../../src/api/handlers/oauth-clients.js";
import { hashApiToken } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
const ACCESS_TOKEN_PREFIX_REGEX = /^ec_oat_/;
const REFRESH_TOKEN_PREFIX_REGEX = /^ec_ort_/;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
// Register OAuth clients used by tests
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["http://127.0.0.1:8080/callback", "https://myapp.example.com/callback"],
});
await handleOAuthClientCreate(db, {
id: "test",
name: "Test",
redirectUris: ["http://127.0.0.1:8080/callback"],
});
});
afterEach(async () => {
await db.destroy();
});
describe("Authorization Approval", () => {
it("should create an authorization code with valid params", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write",
state: "random-state-value",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
if (!result.success) return;
const redirectUrl = new URL(result.data.redirect_url);
expect(redirectUrl.origin).toBe("http://127.0.0.1:8080");
expect(redirectUrl.pathname).toBe("/callback");
expect(redirectUrl.searchParams.get("code")).toBeTruthy();
expect(redirectUrl.searchParams.get("state")).toBe("random-state-value");
});
it("should reject unsupported response_type", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "token",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_RESPONSE_TYPE");
});
it("should reject plain HTTP redirect to non-localhost", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://evil.com/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REDIRECT_URI");
});
it("should allow HTTPS redirects", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://myapp.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
});
it("should reject plain code challenge method", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "plain",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REQUEST");
});
it("should reject invalid scopes", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "invalid:scope",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_SCOPE");
});
});
describe("Authorization Code Exchange: Full Flow", () => {
it("should exchange code for tokens with valid PKCE", async () => {
// Step 1: Generate PKCE pair
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Step 2: Get authorization code
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write media:read",
state: "state123",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// Step 3: Exchange code for tokens
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(true);
if (!exchangeResult.success) return;
expect(exchangeResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(exchangeResult.data.refresh_token).toMatch(REFRESH_TOKEN_PREFIX_REGEX);
expect(exchangeResult.data.token_type).toBe("Bearer");
expect(exchangeResult.data.expires_in).toBe(3600);
expect(exchangeResult.data.scope).toBe("content:read content:write media:read");
// Step 4: Verify tokens are stored
const accessHash = hashApiToken(exchangeResult.data.access_token);
const accessRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(accessRow).toBeTruthy();
expect(accessRow!.token_type).toBe("access");
expect(accessRow!.user_id).toBe("user-1");
expect(accessRow!.client_id).toBe("test-client");
// Step 5: Authorization code is consumed (single-use)
const codeHash = hashApiToken(code);
const codeRow = await db
.selectFrom("_emdash_authorization_codes")
.selectAll()
.where("code_hash", "=", codeHash)
.executeTakeFirst();
expect(codeRow).toBeUndefined();
});
it("should reject wrong code verifier (PKCE failure)", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// Use a DIFFERENT code verifier
const wrongVerifier = generateCodeVerifier();
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: wrongVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("PKCE");
// Code should be deleted after failed PKCE verification
const codeHash = hashApiToken(code);
const codeRow = await db
.selectFrom("_emdash_authorization_codes")
.selectAll()
.where("code_hash", "=", codeHash)
.executeTakeFirst();
expect(codeRow).toBeUndefined();
});
it("should reject mismatched redirect_uri", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:9999/different",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("redirect_uri");
});
it("should reject mismatched client_id", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "different-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("client_id");
});
it("should reject expired authorization code", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Insert an expired code directly
const code = generateCodeVerifier();
const codeHash = hashApiToken(code);
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: codeHash,
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() - 1000).toISOString(), // Already expired
})
.execute();
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("expired");
});
it("should reject code reuse (single-use enforcement)", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// First exchange succeeds
const first = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(first.success).toBe(true);
// Second exchange with same code fails
const second = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(second.success).toBe(false);
if (second.success) return;
expect(second.error.code).toBe("invalid_grant");
});
});
describe("buildDeniedRedirect", () => {
it("should include error and state params", () => {
const url = buildDeniedRedirect("http://127.0.0.1:8080/callback", "state123");
const parsed = new URL(url);
expect(parsed.searchParams.get("error")).toBe("access_denied");
expect(parsed.searchParams.get("error_description")).toBeTruthy();
expect(parsed.searchParams.get("state")).toBe("state123");
});
it("should omit state when not provided", () => {
const url = buildDeniedRedirect("http://127.0.0.1:8080/callback");
const parsed = new URL(url);
expect(parsed.searchParams.get("error")).toBe("access_denied");
expect(parsed.searchParams.has("state")).toBe(false);
});
});
describe("cleanupExpiredAuthorizationCodes", () => {
it("should delete expired codes", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Insert an expired code
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: "expired-hash",
client_id: "test",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() - 1000).toISOString(),
})
.execute();
// Insert a valid code
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: "valid-hash",
client_id: "test",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() + 600000).toISOString(),
})
.execute();
const deleted = await cleanupExpiredAuthorizationCodes(db);
expect(deleted).toBe(1);
// Valid code should remain
const remaining = await db.selectFrom("_emdash_authorization_codes").selectAll().execute();
expect(remaining).toHaveLength(1);
expect(remaining[0]!.code_hash).toBe("valid-hash");
});
});

View File

@@ -0,0 +1,594 @@
/**
* Integration tests for OAuth Device Flow handlers.
*
* Tests the full device flow lifecycle against a real in-memory SQLite database.
*/
import { Role } from "@emdash-cms/auth";
import type { RoleLevel } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
handleDeviceAuthorize,
handleDeviceCodeRequest,
handleDeviceTokenExchange,
handleTokenRefresh,
handleTokenRevoke,
} from "../../../src/api/handlers/device-flow.js";
import { hashApiToken } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
const USER_CODE_FORMAT_REGEX = /^[A-Z0-9]{4}-[A-Z0-9]{4}$/;
const ACCESS_TOKEN_PREFIX_REGEX = /^ec_oat_/;
const REFRESH_TOKEN_PREFIX_REGEX = /^ec_ort_/;
const HYPHEN_REGEX = /-/g;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
describe("Device Code Request", () => {
it("should create a device code with default scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.device_code).toBeTruthy();
expect(result.data.user_code).toMatch(USER_CODE_FORMAT_REGEX);
expect(result.data.verification_uri).toBe("https://example.com/_emdash/device");
expect(result.data.expires_in).toBe(900); // 15 minutes
expect(result.data.interval).toBe(5);
});
it("should create a device code with custom scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ scope: "content:read media:read" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(true);
if (!result.success) return;
// Verify scopes were stored
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", result.data.device_code)
.executeTakeFirst();
expect(row).toBeTruthy();
expect(JSON.parse(row!.scopes)).toEqual(["content:read", "media:read"]);
});
it("should reject invalid scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ scope: "invalid:scope" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_SCOPE");
});
});
describe("Device Flow: Full Lifecycle", () => {
it("should complete the full device flow: code → authorize → exchange", async () => {
// Step 1: Request device code
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code, user_code } = codeResult.data;
// Step 2: Poll before authorization → pending
const pendingResult = await handleDeviceTokenExchange(db, {
device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(pendingResult.success).toBe(false);
expect(pendingResult.deviceFlowError).toBe("authorization_pending");
// Step 3: User authorizes (admin role = 50)
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code,
});
expect(authResult.success).toBe(true);
if (!authResult.success) return;
expect(authResult.data.authorized).toBe(true);
// Step 4: Exchange for tokens
const tokenResult = await handleDeviceTokenExchange(db, {
device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(true);
if (!tokenResult.success) return;
expect(tokenResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(tokenResult.data.refresh_token).toMatch(REFRESH_TOKEN_PREFIX_REGEX);
expect(tokenResult.data.token_type).toBe("Bearer");
expect(tokenResult.data.expires_in).toBe(3600);
expect(tokenResult.data.scope).toBeTruthy();
// Step 5: Device code should be consumed
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row).toBeUndefined();
// Step 6: Tokens should be stored
const accessHash = hashApiToken(tokenResult.data.access_token);
const accessRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(accessRow).toBeTruthy();
expect(accessRow!.token_type).toBe("access");
expect(accessRow!.user_id).toBe("user-1");
const refreshHash = hashApiToken(tokenResult.data.refresh_token);
const refreshRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", refreshHash)
.executeTakeFirst();
expect(refreshRow).toBeTruthy();
expect(refreshRow!.token_type).toBe("refresh");
});
it("should handle denied authorization", async () => {
const codeResult = await handleDeviceCodeRequest(db, {}, "https://example.com/_emdash/device");
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// User denies
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
action: "deny",
});
expect(authResult.success).toBe(true);
if (!authResult.success) return;
expect(authResult.data.authorized).toBe(false);
// Exchange should return access_denied
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(false);
expect(tokenResult.deviceFlowError).toBe("access_denied");
});
it("should normalize user codes (strip hyphens, case-insensitive)", async () => {
const codeResult = await handleDeviceCodeRequest(db, {}, "https://example.com/_emdash/device");
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// Submit lowercase without hyphen
const code = codeResult.data.user_code.replace(HYPHEN_REGEX, "").toLowerCase();
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: code,
});
expect(authResult.success).toBe(true);
});
});
describe("Device Token Exchange: Error Cases", () => {
it("should reject invalid grant_type", async () => {
const result = await handleDeviceTokenExchange(db, {
device_code: "whatever",
grant_type: "invalid",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_GRANT_TYPE");
});
it("should reject unknown device codes", async () => {
const result = await handleDeviceTokenExchange(db, {
device_code: "nonexistent",
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
it("should reject a second exchange for an already-consumed device code", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
// First exchange succeeds
const first = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(first.success).toBe(true);
// Second exchange fails — device code was consumed atomically
const second = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(second.success).toBe(false);
if (second.success) return;
expect(second.error.code).toBe("INVALID_GRANT");
});
it("should report expired device codes", async () => {
// Create a device code that's already expired
await db
.insertInto("_emdash_device_codes")
.values({
device_code: "expired-code",
user_code: "AAAA-BBBB",
scopes: JSON.stringify(["content:read"]),
status: "pending",
expires_at: new Date(Date.now() - 1000).toISOString(),
interval: 5,
})
.execute();
const result = await handleDeviceTokenExchange(db, {
device_code: "expired-code",
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(result.success).toBe(false);
expect(result.deviceFlowError).toBe("expired_token");
});
});
describe("Token Refresh", () => {
it("should exchange a refresh token for a new access token", async () => {
// Complete a device flow first to get tokens
const codeResult = await handleDeviceCodeRequest(db, {}, "https://example.com/_emdash/device");
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(true);
if (!tokenResult.success) return;
// Refresh
const refreshResult = await handleTokenRefresh(db, {
refresh_token: tokenResult.data.refresh_token,
grant_type: "refresh_token",
});
expect(refreshResult.success).toBe(true);
if (!refreshResult.success) return;
expect(refreshResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(refreshResult.data.access_token).not.toBe(tokenResult.data.access_token);
expect(refreshResult.data.refresh_token).toBe(tokenResult.data.refresh_token);
expect(refreshResult.data.expires_in).toBe(3600);
});
it("should reject invalid refresh tokens", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_ort_invalid",
grant_type: "refresh_token",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
it("should reject wrong grant_type", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_ort_whatever",
grant_type: "authorization_code",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_GRANT_TYPE");
});
it("should reject wrong token prefix", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_pat_notarefresh",
grant_type: "refresh_token",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
});
describe("Token Revoke", () => {
it("should revoke an access token", async () => {
// Get tokens via device flow
const codeResult = await handleDeviceCodeRequest(db, {}, "https://example.com/_emdash/device");
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return;
// Revoke the access token
const revokeResult = await handleTokenRevoke(db, {
token: tokenResult.data.access_token,
});
expect(revokeResult.success).toBe(true);
// Access token should be gone
const accessHash = hashApiToken(tokenResult.data.access_token);
const row = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(row).toBeUndefined();
});
it("should revoke a refresh token and its access tokens", async () => {
// Get tokens via device flow
const codeResult = await handleDeviceCodeRequest(db, {}, "https://example.com/_emdash/device");
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return;
// Revoke the refresh token
const revokeResult = await handleTokenRevoke(db, {
token: tokenResult.data.refresh_token,
});
expect(revokeResult.success).toBe(true);
// Both tokens should be gone
const tokenCount = await db
.selectFrom("_emdash_oauth_tokens")
.select(db.fn.count("token_hash").as("count"))
.executeTakeFirst();
expect(Number(tokenCount?.count ?? 0)).toBe(0);
});
it("should return success for unknown tokens (RFC 7009)", async () => {
const result = await handleTokenRevoke(db, {
token: "ec_oat_nonexistent",
});
expect(result.success).toBe(true);
});
});
describe("Device Authorize: Error Cases", () => {
it("should reject invalid user codes", async () => {
const result = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: "INVALID-CODE",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_CODE");
});
it("should reject expired device codes", async () => {
await db
.insertInto("_emdash_device_codes")
.values({
device_code: "expired-dc",
user_code: "CCCC-DDDD",
scopes: JSON.stringify(["content:read"]),
status: "pending",
expires_at: new Date(Date.now() - 1000).toISOString(),
interval: 5,
})
.execute();
const result = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: "CCCC-DDDD",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("EXPIRED_CODE");
});
});
// ---------------------------------------------------------------------------
// Scope escalation prevention (SEC: CWE-269)
// ---------------------------------------------------------------------------
describe("Scope Clamping: Role-based scope restriction", () => {
/** Helper: run a full device flow with given requested scopes and user role */
async function completeDeviceFlow(
requestedScopes: string,
userRole: RoleLevel,
): Promise<{ scopes: string; success: boolean }> {
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: requestedScopes },
"https://example.com/_emdash/device",
);
if (!codeResult.success) return { scopes: "", success: false };
const authResult = await handleDeviceAuthorize(db, "user-1", userRole, {
user_code: codeResult.data.user_code,
});
if (!authResult.success) return { scopes: "", success: false };
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return { scopes: "", success: false };
return { scopes: tokenResult.data.scope, success: true };
}
it("should strip admin scope from non-admin user tokens", async () => {
// CONTRIBUTOR requests admin scope — this is the core attack scenario
const result = await completeDeviceFlow("content:read content:write admin", Role.CONTRIBUTOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("content:write");
expect(scopes).not.toContain("admin");
});
it("should strip schema:write from non-admin user tokens", async () => {
// EDITOR requests schema:write — only ADMIN gets schema:write
const result = await completeDeviceFlow("content:read schema:read schema:write", Role.EDITOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("schema:read");
expect(scopes).not.toContain("schema:write");
});
it("should strip schema:read from contributor tokens", async () => {
// CONTRIBUTOR requests schema:read — only EDITOR+ gets schema:read
const result = await completeDeviceFlow("content:read schema:read", Role.CONTRIBUTOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).not.toContain("schema:read");
});
it("should allow admin user to get all scopes", async () => {
const result = await completeDeviceFlow(
"content:read content:write media:read media:write schema:read schema:write admin",
Role.ADMIN,
);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("admin");
expect(scopes).toContain("schema:write");
expect(scopes).toContain("content:write");
});
it("should return INSUFFICIENT_ROLE when no scopes survive clamping", async () => {
// SUBSCRIBER requests only admin scope — nothing survives
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: "admin schema:write" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const authResult = await handleDeviceAuthorize(db, "user-1", Role.SUBSCRIBER, {
user_code: codeResult.data.user_code,
});
expect(authResult.success).toBe(false);
if (authResult.success) return;
expect(authResult.error.code).toBe("INSUFFICIENT_ROLE");
});
it("should clamp scopes in stored device code at authorize time", async () => {
// Verify that the stored scopes are clamped, not just the response
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: "content:read content:write schema:write admin" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// Before authorize: scopes include admin and schema:write
const beforeRow = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", codeResult.data.device_code)
.executeTakeFirst();
expect(JSON.parse(beforeRow!.scopes)).toContain("admin");
expect(JSON.parse(beforeRow!.scopes)).toContain("schema:write");
// Authorize as CONTRIBUTOR — admin and schema:write must be stripped
await handleDeviceAuthorize(db, "user-1", Role.CONTRIBUTOR, {
user_code: codeResult.data.user_code,
});
// After authorize: scopes should be clamped in DB
const afterRow = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", codeResult.data.device_code)
.executeTakeFirst();
const storedScopes = JSON.parse(afterRow!.scopes) as string[];
expect(storedScopes).toContain("content:read");
expect(storedScopes).toContain("content:write");
expect(storedScopes).not.toContain("admin");
expect(storedScopes).not.toContain("schema:write");
});
it("should allow editor to get content + media + schema:read scopes", async () => {
const result = await completeDeviceFlow(
"content:read content:write media:read media:write schema:read",
Role.EDITOR,
);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("content:write");
expect(scopes).toContain("media:read");
expect(scopes).toContain("media:write");
expect(scopes).toContain("schema:read");
});
});

View File

@@ -0,0 +1,372 @@
/**
* Integration tests for OAuth client management and redirect URI allowlist.
*
* Tests that the authorization endpoint rejects unregistered clients and
* redirect URIs not in the client's registered set.
*/
import { computeS256Challenge, Role } from "@emdash-cms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { handleAuthorizationApproval } from "../../../src/api/handlers/oauth-authorization.js";
import {
handleOAuthClientCreate,
handleOAuthClientDelete,
handleOAuthClientGet,
handleOAuthClientList,
handleOAuthClientUpdate,
lookupOAuthClient,
validateClientRedirectUri,
} from "../../../src/api/handlers/oauth-clients.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
// ---------------------------------------------------------------------------
// validateClientRedirectUri (unit-level)
// ---------------------------------------------------------------------------
describe("validateClientRedirectUri", () => {
it("should return null for a registered redirect URI", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback", [
"https://myapp.example.com/callback",
"http://127.0.0.1:8080/callback",
]);
expect(result).toBeNull();
});
it("should return error for an unregistered redirect URI", () => {
const result = validateClientRedirectUri("https://evil.com/callback", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
it("should require exact match (no prefix matching)", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback/extra", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
it("should require exact match (no query string tolerance)", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback?foo=bar", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
});
// ---------------------------------------------------------------------------
// OAuth Client CRUD
// ---------------------------------------------------------------------------
describe("OAuth Client CRUD", () => {
it("should create a client", async () => {
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.id).toBe("test-client");
expect(result.data.name).toBe("Test Client");
expect(result.data.redirectUris).toEqual(["https://myapp.example.com/callback"]);
});
it("should reject duplicate client IDs", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Duplicate Client",
redirectUris: ["https://other.example.com/callback"],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("CONFLICT");
});
it("should reject clients with empty redirect URIs", async () => {
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: [],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
});
it("should reject clients with invalid redirect URIs", async () => {
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["http://example.com/callback"],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
expect(result.error.message).toContain("HTTP redirect URIs are only allowed for localhost");
});
it("should list clients", async () => {
await handleOAuthClientCreate(db, {
id: "client-1",
name: "Client 1",
redirectUris: ["https://one.example.com/callback"],
});
await handleOAuthClientCreate(db, {
id: "client-2",
name: "Client 2",
redirectUris: ["https://two.example.com/callback"],
});
const result = await handleOAuthClientList(db);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.items).toHaveLength(2);
});
it("should get a client by ID", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
scopes: ["content:read"],
});
const result = await handleOAuthClientGet(db, "test-client");
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.id).toBe("test-client");
expect(result.data.scopes).toEqual(["content:read"]);
});
it("should return NOT_FOUND for unknown client", async () => {
const result = await handleOAuthClientGet(db, "unknown");
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("NOT_FOUND");
});
it("should update a client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientUpdate(db, "test-client", {
name: "Updated Client",
redirectUris: ["https://myapp.example.com/callback", "https://myapp.example.com/callback2"],
});
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.name).toBe("Updated Client");
expect(result.data.redirectUris).toHaveLength(2);
});
it("should reject update with empty redirect URIs", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientUpdate(db, "test-client", {
redirectUris: [],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
});
it("should reject update with invalid redirect URIs", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientUpdate(db, "test-client", {
redirectUris: ["myapp://callback"],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
expect(result.error.message).toContain("Unsupported redirect URI scheme");
});
it("should delete a client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientDelete(db, "test-client");
expect(result.success).toBe(true);
const getResult = await handleOAuthClientGet(db, "test-client");
expect(getResult.success).toBe(false);
});
it("should return NOT_FOUND when deleting unknown client", async () => {
const result = await handleOAuthClientDelete(db, "unknown");
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("NOT_FOUND");
});
});
// ---------------------------------------------------------------------------
// lookupOAuthClient
// ---------------------------------------------------------------------------
describe("lookupOAuthClient", () => {
it("should return redirect URIs for a registered client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback", "http://127.0.0.1:8080/callback"],
});
const client = await lookupOAuthClient(db, "test-client");
expect(client).toBeTruthy();
expect(client!.redirectUris).toEqual([
"https://myapp.example.com/callback",
"http://127.0.0.1:8080/callback",
]);
});
it("should return null for an unregistered client", async () => {
const client = await lookupOAuthClient(db, "unknown-client");
expect(client).toBeNull();
});
});
// ---------------------------------------------------------------------------
// Authorization with client redirect URI validation
// ---------------------------------------------------------------------------
describe("Authorization with redirect URI allowlist", () => {
beforeEach(async () => {
// Register a client with specific redirect URIs
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["http://127.0.0.1:8080/callback", "https://myapp.example.com/callback"],
});
});
it("should approve authorization with a registered redirect URI", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write",
state: "random-state-value",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
if (!result.success) return;
const redirectUrl = new URL(result.data.redirect_url);
expect(redirectUrl.origin).toBe("http://127.0.0.1:8080");
expect(redirectUrl.searchParams.get("code")).toBeTruthy();
});
it("should reject authorization with unregistered redirect URI", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://evil.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REDIRECT_URI");
expect(result.error.message).toContain("not registered");
});
it("should reject authorization with unknown client_id", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "unknown-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_CLIENT");
});
it("should accept HTTPS redirect URI in allowlist", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://myapp.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
});
});

View File

@@ -0,0 +1,430 @@
/**
* Integration tests for database-backed rate limiting.
*
* Tests the rate limiter utility and slow_down enforcement
* against a real in-memory SQLite database.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
handleDeviceCodeRequest,
handleDeviceTokenExchange,
} from "../../../src/api/handlers/device-flow.js";
import {
checkRateLimit,
cleanupExpiredRateLimits,
getClientIp,
} from "../../../src/auth/rate-limit.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await db.destroy();
});
// ---------------------------------------------------------------------------
// Rate Limiter
// ---------------------------------------------------------------------------
describe("checkRateLimit", () => {
it("should allow requests within the limit", async () => {
const result1 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result1.allowed).toBe(true);
expect(result1.count).toBe(1);
const result2 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result2.allowed).toBe(true);
expect(result2.count).toBe(2);
const result3 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result3.allowed).toBe(true);
expect(result3.count).toBe(3);
});
it("should reject requests exceeding the limit", async () => {
// Use up the limit
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
// 4th request should be rejected
const result = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result.allowed).toBe(false);
expect(result.count).toBe(4);
expect(result.limit).toBe(3);
});
it("should track limits per IP independently", async () => {
// IP A uses its limit
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
const resultA = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
expect(resultA.allowed).toBe(false);
// IP B should still be allowed
const resultB = await checkRateLimit(db, "5.6.7.8", "test/endpoint", 2, 60);
expect(resultB.allowed).toBe(true);
expect(resultB.count).toBe(1);
});
it("should track limits per endpoint independently", async () => {
// Use up limit on endpoint A
await checkRateLimit(db, "1.2.3.4", "endpoint-a", 1, 60);
const resultA = await checkRateLimit(db, "1.2.3.4", "endpoint-a", 1, 60);
expect(resultA.allowed).toBe(false);
// Endpoint B should still be allowed
const resultB = await checkRateLimit(db, "1.2.3.4", "endpoint-b", 1, 60);
expect(resultB.allowed).toBe(true);
});
it("should skip rate limiting when IP is null", async () => {
// Even after many calls, null IP is always allowed
for (let i = 0; i < 10; i++) {
const result = await checkRateLimit(db, null, "test/endpoint", 1, 60);
expect(result.allowed).toBe(true);
expect(result.count).toBe(0);
}
});
it("should reset after window expires", async () => {
// Use a 1-second window
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
const blocked = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
expect(blocked.allowed).toBe(false);
// Wait for the window to expire (advance past the 1-second boundary)
await new Promise((resolve) => setTimeout(resolve, 1100));
const allowed = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
expect(allowed.allowed).toBe(true);
expect(allowed.count).toBe(1);
});
});
// ---------------------------------------------------------------------------
// IP Extraction
// ---------------------------------------------------------------------------
describe("getClientIp", () => {
/** Create a request with a fake `cf` object to simulate Cloudflare. */
function cfRequest(url: string, init?: RequestInit): Request {
const req = new Request(url, init);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- test helper
(req as unknown as { cf: Record<string, unknown> }).cf = { country: "US" };
return req;
}
it("should extract IP from CF-Connecting-IP on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "cf-connecting-ip": "198.51.100.1" },
});
expect(getClientIp(request)).toBe("198.51.100.1");
});
it("should extract IP from X-Forwarded-For on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "203.0.113.50, 70.41.3.18, 150.172.238.178" },
});
expect(getClientIp(request)).toBe("203.0.113.50");
});
it("should return null when not on Cloudflare (no cf object)", () => {
const request = new Request("http://localhost/test");
expect(getClientIp(request)).toBeNull();
});
it("should return null when not on Cloudflare even with XFF header", () => {
const request = new Request("http://localhost/test", {
headers: { "x-forwarded-for": "203.0.113.50" },
});
expect(getClientIp(request)).toBeNull();
});
it("should reject non-IP values in X-Forwarded-For", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "<script>alert(1)</script>" },
});
expect(getClientIp(request)).toBeNull();
});
it("should handle IPv6 addresses on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "2001:db8::1" },
});
expect(getClientIp(request)).toBe("2001:db8::1");
});
});
describe("getClientIp with trusted proxy headers", () => {
// On non-CF deployments behind an operator-controlled reverse proxy,
// the operator declares which header to trust. Without this they get
// null (which disables rate limiting) — a real operational foot-gun.
function cfRequest(url: string, init?: RequestInit): Request {
const req = new Request(url, init);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- test helper
(req as unknown as { cf: Record<string, unknown> }).cf = { country: "US" };
return req;
}
it("reads the IP from a declared trusted header off-Cloudflare", () => {
const request = new Request("http://localhost/test", {
headers: { "x-real-ip": "203.0.113.50" },
});
expect(getClientIp(request, ["x-real-ip"])).toBe("203.0.113.50");
});
it("tries trusted headers in declared order", () => {
const request = new Request("http://localhost/test", {
headers: {
"x-real-ip": "203.0.113.50",
"fly-client-ip": "198.51.100.7",
},
});
expect(getClientIp(request, ["fly-client-ip", "x-real-ip"])).toBe("198.51.100.7");
});
it("falls through when earlier trusted header is missing", () => {
const request = new Request("http://localhost/test", {
headers: { "x-real-ip": "203.0.113.50" },
});
expect(getClientIp(request, ["fly-client-ip", "x-real-ip"])).toBe("203.0.113.50");
});
it("takes the first entry when a trusted header is XFF-style", () => {
const request = new Request("http://localhost/test", {
headers: { "x-forwarded-for": "203.0.113.50, 10.0.0.1" },
});
expect(getClientIp(request, ["x-forwarded-for"])).toBe("203.0.113.50");
});
it("rejects non-IP-shaped values from trusted headers", () => {
const request = new Request("http://localhost/test", {
headers: { "x-real-ip": "<script>alert(1)</script>" },
});
expect(getClientIp(request, ["x-real-ip"])).toBeNull();
});
it("does not read from headers that are not on the trusted list", () => {
const request = new Request("http://localhost/test", {
headers: { "x-client-ip": "203.0.113.50" },
});
expect(getClientIp(request, ["x-real-ip"])).toBeNull();
});
it("without cf, returns null when no trusted header is set", () => {
const request = new Request("http://localhost/test", {
headers: { "x-real-ip": "203.0.113.50" },
});
// Empty list — operator did not opt in. Current null-IP behaviour preserved.
expect(getClientIp(request, [])).toBeNull();
});
it("matches header names case-insensitively", () => {
const request = new Request("http://localhost/test", {
headers: { "X-Real-IP": "203.0.113.50" },
});
expect(getClientIp(request, ["x-real-ip"])).toBe("203.0.113.50");
});
it("CF-Connecting-IP wins over trusted headers on Cloudflare", () => {
// Operator on CF misconfigures trustedProxyHeaders — CF-Connecting-IP
// is cryptographically trustworthy and must not be overridden.
const request = cfRequest("http://localhost/test", {
headers: {
"cf-connecting-ip": "1.1.1.1",
"x-real-ip": "203.0.113.50",
},
});
expect(getClientIp(request, ["x-real-ip"])).toBe("1.1.1.1");
});
it("trusted headers fill in when the CF path produces no IP", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-real-ip": "203.0.113.50" },
});
expect(getClientIp(request, ["x-real-ip"])).toBe("203.0.113.50");
});
});
// ---------------------------------------------------------------------------
// Cleanup
// ---------------------------------------------------------------------------
describe("cleanupExpiredRateLimits", () => {
it("should delete expired entries", async () => {
// Insert a rate limit entry with a window in the past
const oldWindow = new Date(Date.now() - 7200 * 1000).toISOString();
const currentWindow = new Date(Math.floor(Date.now() / (60 * 1000)) * 60 * 1000).toISOString();
await db
.insertInto("_emdash_rate_limits")
.values([
{ key: "old:entry", window: oldWindow, count: 5 },
{ key: "current:entry", window: currentWindow, count: 2 },
])
.execute();
const deleted = await cleanupExpiredRateLimits(db, 3600);
expect(deleted).toBe(1);
// Current entry should still exist
const rows = await db.selectFrom("_emdash_rate_limits").selectAll().execute();
expect(rows).toHaveLength(1);
expect(rows[0]?.key).toBe("current:entry");
});
});
// ---------------------------------------------------------------------------
// RFC 8628 slow_down
// ---------------------------------------------------------------------------
describe("Device Token Exchange: slow_down enforcement", () => {
const GRANT_TYPE = "urn:ietf:params:oauth:grant-type:device_code";
it("should return slow_down when polling faster than interval", async () => {
// Create a device code
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets last_polled_at, returns authorization_pending
const poll1 = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll1.success).toBe(false);
expect(poll1.deviceFlowError).toBe("authorization_pending");
// Second poll immediately — should get slow_down with new interval
const poll2 = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll2.success).toBe(false);
expect(poll2.deviceFlowError).toBe("slow_down");
// Default interval (5) + SLOW_DOWN_INCREMENT (5) = 10
expect(poll2.deviceFlowInterval).toBe(10);
});
it("should increase interval by 5s on each slow_down", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets baseline
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Rapid polls — each should trigger slow_down and increase interval
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Check the interval was increased
const row = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
// Default interval is 5, after one slow_down it should be 10
expect(row?.interval).toBe(10);
// Another rapid poll — interval should increase again to 15
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
const row2 = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row2?.interval).toBe(15);
});
it("should cap slow_down interval at 60 seconds", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets baseline
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Set interval to just below the cap so the next slow_down would exceed it
await db
.updateTable("_emdash_device_codes")
.set({ interval: 58 })
.where("device_code", "=", device_code)
.execute();
// Rapid poll — triggers slow_down, interval should cap at 60 not 63
const poll = await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
expect(poll.deviceFlowInterval).toBe(60);
const row = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row?.interval).toBe(60);
});
it("should not return slow_down when polling at or above the interval", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets last_polled_at
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Manually set last_polled_at to far enough in the past
await db
.updateTable("_emdash_device_codes")
.set({
last_polled_at: new Date(Date.now() - 10_000).toISOString(),
})
.where("device_code", "=", device_code)
.execute();
// This poll should NOT get slow_down (10s > 5s interval)
const poll = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll.success).toBe(false);
// Should be authorization_pending, not slow_down
expect(poll.deviceFlowError).toBe("authorization_pending");
});
});

View File

@@ -0,0 +1,352 @@
/**
* E2E tests for CLI commands against a real Astro dev server.
*
* Shells out to the actual `emdash` binary with --url and --token
* flags, verifying real command output and exit codes.
*
* Runs by default. Requires built artifacts (auto-builds if missing).
*/
import { execFile } from "node:child_process";
import { resolve } from "node:path";
import { promisify } from "node:util";
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const exec = promisify(execFile);
const PORT = 4398; // Different port from client integration tests
// Path to the built CLI binary
const CLI_BIN = resolve(import.meta.dirname, "../../../dist/cli/index.mjs");
describe("CLI Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
});
afterAll(async () => {
await ctx?.cleanup();
});
/** Run an emdash CLI command and return stdout */
async function cli(...args: string[]): Promise<string> {
const { stdout } = await exec(
"node",
[CLI_BIN, ...args, "--url", ctx.baseUrl, "--token", ctx.token, "--json"],
{
timeout: 15_000,
},
);
return stdout;
}
/** Run CLI and parse JSON output */
async function cliJson<T = unknown>(...args: string[]): Promise<T> {
const stdout = await cli(...args);
return JSON.parse(stdout) as T;
}
// -----------------------------------------------------------------------
// Schema commands
// -----------------------------------------------------------------------
describe("schema", () => {
it("lists collections", async () => {
const result = await cliJson<{ slug: string }[]>("schema", "list");
expect(Array.isArray(result)).toBe(true);
const slugs = result.map((c) => c.slug);
expect(slugs).toContain("posts");
expect(slugs).toContain("pages");
});
it("gets a single collection", async () => {
const result = await cliJson<{ slug: string; label: string }>("schema", "get", "posts");
expect(result.slug).toBe("posts");
expect(result.label).toBe("Posts");
});
it("creates and deletes a collection", async () => {
const created = await cliJson<{ slug: string }>(
"schema",
"create",
"cli_temp",
"--label",
"CLI Temp",
);
expect(created.slug).toBe("cli_temp");
// Verify it exists
const list = await cliJson<{ slug: string }[]>("schema", "list");
expect(list.map((c) => c.slug)).toContain("cli_temp");
// Delete
await cli("schema", "delete", "cli_temp", "--force");
// Verify it's gone
const listAfter = await cliJson<{ slug: string }[]>("schema", "list");
expect(listAfter.map((c) => c.slug)).not.toContain("cli_temp");
});
it("adds and removes fields", async () => {
// Create a temp collection
await cli("schema", "create", "cli_fields", "--label", "Fields Test");
// Add a field
const field = await cliJson<{ slug: string; type: string }>(
"schema",
"add-field",
"cli_fields",
"name",
"--type",
"string",
"--label",
"Name",
);
expect(field.slug).toBe("name");
expect(field.type).toBe("string");
// Remove the field
await cli("schema", "remove-field", "cli_fields", "name");
// Clean up
await cli("schema", "delete", "cli_fields", "--force");
});
});
// -----------------------------------------------------------------------
// Content commands
// -----------------------------------------------------------------------
describe("content", () => {
it("lists content", async () => {
const result = await cliJson<{ items: { data: Record<string, unknown> }[] }>(
"content",
"list",
"posts",
);
expect(result.items.length).toBeGreaterThanOrEqual(2);
});
it("gets content by id", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const result = await cliJson<{ data: { title: string } }>("content", "get", "posts", postId);
expect(result.data.title).toBe("First Post");
});
it("creates, updates, and deletes content", async () => {
// Create
const created = await cliJson<{ id: string; slug: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Post", excerpt: "From CLI" }),
"--slug",
"cli-post",
);
expect(created.id).toBeDefined();
expect(created.slug).toBe("cli-post");
// Update (get first to obtain _rev, then update with it)
const fetched = await cliJson<{ _rev: string }>("content", "get", "posts", created.id);
const updated = await cliJson<{ data: { title: string } }>(
"content",
"update",
"posts",
created.id,
"--rev",
fetched._rev,
"--data",
JSON.stringify({ title: "Updated CLI Post" }),
);
expect(updated.data.title).toBe("Updated CLI Post");
// Delete
await cli("content", "delete", "posts", created.id);
});
it("publishes and unpublishes content", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "Pub Test" }),
);
await cli("content", "publish", "posts", item.id);
await cli("content", "unpublish", "posts", item.id);
// Clean up
await cli("content", "delete", "posts", item.id);
});
});
// -----------------------------------------------------------------------
// Content lifecycle: schedule and restore
// -----------------------------------------------------------------------
describe("content lifecycle", () => {
it("schedules content for publishing", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Schedule Test" }),
);
// Schedule does not produce JSON output, just a success message
await cli("content", "schedule", "posts", item.id, "--at", "2027-06-01T09:00:00Z");
// Verify via get
const fetched = await cliJson<{ scheduledAt: string }>("content", "get", "posts", item.id);
expect(fetched.scheduledAt).toBe("2027-06-01T09:00:00Z");
// Clean up
await cli("content", "delete", "posts", item.id);
});
it("restores a trashed item", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Restore Test" }),
);
// Delete (soft trash)
await cli("content", "delete", "posts", item.id);
// Restore
await cli("content", "restore", "posts", item.id);
// Should be accessible again (auto-published before deletion, so restored as published)
const fetched = await cliJson<{ status: string }>("content", "get", "posts", item.id);
expect(fetched.status).toBe("published");
// Final cleanup
await cli("content", "delete", "posts", item.id);
});
});
// -----------------------------------------------------------------------
// Media commands
// -----------------------------------------------------------------------
describe("media", () => {
it("uploads, lists, gets, and deletes media", async () => {
// Create a temp file to upload
const { writeFileSync } = await import("node:fs");
const { join } = await import("node:path");
const { tmpdir } = await import("node:os");
// 1x1 PNG pixel
const pngBytes = Buffer.from([
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44,
0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90,
0x77, 0x53, 0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8,
0xcf, 0xc0, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xe2, 0x21, 0xbc, 0x33, 0x00, 0x00, 0x00,
0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
]);
const tmpFile = join(tmpdir(), "emdash-cli-test.png");
writeFileSync(tmpFile, pngBytes);
// Upload
const uploaded = await cliJson<{ id: string; filename: string }>(
"media",
"upload",
tmpFile,
"--alt",
"CLI test image",
);
expect(uploaded.id).toBeDefined();
expect(uploaded.filename).toBe("emdash-cli-test.png");
// List
const list = await cliJson<{ items: { id: string }[] }>("media", "list");
const ids = list.items.map((m) => m.id);
expect(ids).toContain(uploaded.id);
// Get
const fetched = await cliJson<{ id: string; filename: string }>("media", "get", uploaded.id);
expect(fetched.id).toBe(uploaded.id);
// Delete
await cli("media", "delete", uploaded.id);
// Clean up temp file
const { unlinkSync } = await import("node:fs");
unlinkSync(tmpFile);
});
});
// -----------------------------------------------------------------------
// Search command
// -----------------------------------------------------------------------
describe("search", () => {
it("searches content", async () => {
// Search should work even if no results (the command shouldn't error)
const result = await cliJson<unknown[]>("search", "First Post");
expect(Array.isArray(result)).toBe(true);
});
});
// -----------------------------------------------------------------------
// Auth commands
// -----------------------------------------------------------------------
describe("auth", () => {
it("whoami returns user info with token auth", async () => {
const result = await cliJson<{ email: string; role: string }>("whoami");
expect(result.email).toBe("dev@emdash.local");
expect(result.role).toBe("admin");
});
});
// -----------------------------------------------------------------------
// Taxonomy commands
// -----------------------------------------------------------------------
describe("taxonomy", () => {
it("taxonomy list returns valid JSON array", async () => {
const result = await cliJson<{ name: string }[]>("taxonomy", "list");
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBeGreaterThanOrEqual(1);
const names = result.map((t) => t.name);
expect(names).toContain("categories");
});
it("taxonomy terms returns terms for a taxonomy", async () => {
const result = await cliJson<{ terms: { slug: string }[] }>(
"taxonomy",
"terms",
"categories",
);
expect(result.terms).toBeDefined();
expect(Array.isArray(result.terms)).toBe(true);
const slugs = result.terms.map((t) => t.slug);
expect(slugs).toContain("news");
});
});
// -----------------------------------------------------------------------
// Menu commands
// -----------------------------------------------------------------------
describe("menu", () => {
it("menu list returns valid JSON array", async () => {
const result = await cliJson<unknown[]>("menu", "list");
expect(Array.isArray(result)).toBe(true);
});
});
});

View File

@@ -0,0 +1,498 @@
/**
* Integration tests for EmDashClient.
*
* Tests full CRUD lifecycles against a mock HTTP backend that simulates
* the real API behavior including _rev tokens, schema caching, and
* content state transitions.
*/
import { describe, it, expect } from "vitest";
import { EmDashClient, EmDashApiError } from "../../../src/client/index.js";
import type { Interceptor } from "../../../src/client/transport.js";
// ---------------------------------------------------------------------------
// Simulated backend
// ---------------------------------------------------------------------------
const COLLECTION_MATCH_REGEX = /^\/schema\/collections\/([^/]+)$/;
const CONTENT_LIST_REGEX = /^\/content\/([^/]+)$/;
const CONTENT_ITEM_REGEX = /^\/content\/([^/]+)\/([^/]+)$/;
const CONTENT_ACTION_REGEX = /^\/content\/([^/]+)\/([^/]+)\/(publish|unpublish|schedule|restore)$/;
interface StoredItem {
id: string;
type: string;
slug: string | null;
status: string;
data: Record<string, unknown>;
authorId: string | null;
createdAt: string;
updatedAt: string;
publishedAt: string | null;
scheduledAt: string | null;
liveRevisionId: string | null;
draftRevisionId: string | null;
version: number;
}
function encodeRev(item: StoredItem): string {
return btoa(`${item.version}:${item.updatedAt}`);
}
/** Wraps body in `{ data: body }` to match the standard API response envelope. */
function jsonRes(body: unknown, status = 200): Response {
// Error responses (4xx/5xx) are NOT wrapped in { data }
const payload = status >= 400 ? body : { data: body };
return new Response(JSON.stringify(payload), {
status,
headers: { "Content-Type": "application/json" },
});
}
/**
* A stateful mock backend that simulates EmDash's REST API.
* Supports schema, content CRUD, _rev tokens, and conflict detection.
*/
function createStatefulBackend() {
const collections = new Map<
string,
{
slug: string;
label: string;
labelSingular: string;
fields: Array<{ slug: string; type: string; label: string; required?: boolean }>;
}
>();
const content = new Map<string, StoredItem>();
let idCounter = 0;
// Seed a collection
collections.set("posts", {
slug: "posts",
label: "Posts",
labelSingular: "Post",
fields: [
{ slug: "title", type: "string", label: "Title", required: true },
{ slug: "body", type: "portableText", label: "Body" },
{ slug: "excerpt", type: "text", label: "Excerpt" },
],
});
const interceptor: Interceptor = async (req) => {
const url = new URL(req.url);
const path = url.pathname.replace("/_emdash/api", "");
// --- Schema routes ---
if (req.method === "GET" && path === "/schema/collections") {
return jsonRes({
items: Array.from(collections.values(), ({ slug, label, labelSingular }) => ({
slug,
label,
labelSingular,
supports: [],
})),
});
}
const colMatch = path.match(COLLECTION_MATCH_REGEX);
if (req.method === "GET" && colMatch) {
const col = collections.get(colMatch[1]);
if (!col) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
return jsonRes({ item: { ...col, supports: [] } });
}
// --- Manifest ---
if (req.method === "GET" && path === "/manifest") {
const cols: Record<string, unknown> = {};
for (const [slug, col] of collections) {
const fields: Record<string, unknown> = {};
for (const f of col.fields) {
fields[f.slug] = { kind: f.type, label: f.label, required: f.required };
}
cols[slug] = {
label: col.label,
labelSingular: col.labelSingular,
supports: [],
fields,
};
}
return jsonRes({ version: "0.1.0", hash: "abc", collections: cols, plugins: {} });
}
// --- Content list ---
const listMatch = path.match(CONTENT_LIST_REGEX);
if (req.method === "GET" && listMatch) {
const collectionSlug = listMatch[1];
const status = url.searchParams.get("status");
const items = [...content.values()]
.filter((i) => i.type === collectionSlug)
.filter((i) => !status || i.status === status);
return jsonRes({ items, nextCursor: undefined });
}
// --- Content create ---
if (req.method === "POST" && listMatch) {
const collectionSlug = listMatch[1];
const body = (await req.json()) as {
data: Record<string, unknown>;
slug?: string;
status?: string;
};
const id = `item_${++idCounter}`;
const now = new Date().toISOString();
const item: StoredItem = {
id,
type: collectionSlug,
slug: body.slug ?? null,
status: body.status ?? "draft",
data: body.data,
authorId: null,
createdAt: now,
updatedAt: now,
publishedAt: null,
scheduledAt: null,
liveRevisionId: null,
draftRevisionId: null,
version: 1,
};
content.set(id, item);
return jsonRes({ item, _rev: encodeRev(item) });
}
// --- Content get/update/delete ---
const itemMatch = path.match(CONTENT_ITEM_REGEX);
if (itemMatch) {
const itemId = itemMatch[2];
const item = content.get(itemId);
if (req.method === "GET") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
return jsonRes({ item, _rev: encodeRev(item) });
}
if (req.method === "PUT") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
const body = (await req.json()) as {
data?: Record<string, unknown>;
slug?: string;
status?: string;
_rev?: string;
};
// Check _rev for conflict
if (body._rev) {
const expected = encodeRev(item);
if (body._rev !== expected) {
return jsonRes(
{
error: {
code: "CONFLICT",
message: "Entry has been modified since last read",
},
},
409,
);
}
}
// Apply updates
if (body.data) item.data = { ...item.data, ...body.data };
if (body.slug !== undefined) item.slug = body.slug;
if (body.status) item.status = body.status;
item.updatedAt = new Date().toISOString();
item.version++;
return jsonRes({ item, _rev: encodeRev(item) });
}
if (req.method === "DELETE") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
item.status = "trashed";
item.updatedAt = new Date().toISOString();
return jsonRes({});
}
}
// --- Content actions ---
const actionMatch = path.match(CONTENT_ACTION_REGEX);
if (req.method === "POST" && actionMatch) {
const itemId = actionMatch[2];
const action = actionMatch[3];
const item = content.get(itemId);
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
switch (action) {
case "publish":
item.status = "published";
item.publishedAt = new Date().toISOString();
break;
case "unpublish":
item.status = "draft";
item.publishedAt = null;
break;
case "schedule": {
const body = (await req.json()) as { scheduledAt: string };
item.scheduledAt = body.scheduledAt;
break;
}
case "restore":
item.status = "draft";
break;
}
item.updatedAt = new Date().toISOString();
return jsonRes({});
}
// --- Search ---
if (req.method === "GET" && path === "/search") {
const q = url.searchParams.get("q") ?? "";
const items = [...content.values()]
.filter((i) => JSON.stringify(i.data).toLowerCase().includes(q.toLowerCase()))
.map((i) => ({
id: i.id,
collection: i.type,
title: typeof i.data.title === "string" ? i.data.title : "",
score: 1,
}));
return jsonRes({ items });
}
return jsonRes(
{ error: { code: "NOT_FOUND", message: `No route: ${req.method} ${path}` } },
404,
);
};
return { interceptor, collections, content };
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe("EmDashClient lifecycle (integration)", () => {
function createClient() {
const { interceptor, content } = createStatefulBackend();
const client = new EmDashClient({
baseUrl: "http://localhost:4321",
token: "test",
interceptors: [interceptor],
});
return { client, content };
}
it("full content CRUD lifecycle", async () => {
const { client } = createClient();
// Create
const created = await client.create("posts", {
data: { title: "My Post", body: "Hello **world**" },
slug: "my-post",
status: "draft",
});
expect(created.id).toBeDefined();
expect(created.slug).toBe("my-post");
expect(created.status).toBe("draft");
// body was converted from markdown to PT
expect(Array.isArray(created.data.body)).toBe(true);
// List
const list = await client.list("posts");
expect(list.items).toHaveLength(1);
expect(list.items[0].id).toBe(created.id);
// Get — returns _rev for optimistic concurrency
const fetched = await client.get("posts", created.id);
expect(fetched.id).toBe(created.id);
expect(typeof fetched.data.body).toBe("string"); // PT -> markdown
expect(fetched.data.body).toContain("world");
expect(fetched._rev).toBeDefined();
// Update with explicit _rev
const updated = await client.update("posts", created.id, {
data: { title: "Updated Title" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Updated Title");
// Publish
await client.publish("posts", created.id);
// List published
const published = await client.list("posts", { status: "published" });
expect(published.items).toHaveLength(1);
// Unpublish
await client.unpublish("posts", created.id);
// Delete (soft)
await client.delete("posts", created.id);
});
it("blind update succeeds without _rev", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "Test" },
});
// Update without reading — blind write (no _rev) should succeed
const updated = await client.update("posts", item.id, {
data: { title: "Blind Write OK" },
});
expect(updated.data.title).toBe("Blind Write OK");
});
it("get() returns _rev and update() accepts it for conflict detection", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "Test" },
});
// Read — should return _rev on the item
const fetched = await client.get("posts", item.id);
expect(fetched._rev).toBeDefined();
// Update with explicit _rev
const updated = await client.update("posts", item.id, {
data: { title: "Safe Update" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Safe Update");
});
it("multiple sequential updates work with explicit _rev", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "V1" },
});
// First read
const v1 = await client.get("posts", item.id);
// First update with _rev
await client.update("posts", item.id, {
data: { title: "V2" },
_rev: v1._rev,
});
// Re-read for fresh _rev (previous rev is now stale)
const v2 = await client.get("posts", item.id);
// Second update with new _rev
const v3 = await client.update("posts", item.id, {
data: { title: "V3" },
_rev: v2._rev,
});
expect(v3.data.title).toBe("V3");
});
it("listAll() iterates through all items", async () => {
const { client } = createClient();
// Create multiple items
await client.create("posts", { data: { title: "A" } });
await client.create("posts", { data: { title: "B" } });
await client.create("posts", { data: { title: "C" } });
const all = [];
for await (const item of client.listAll("posts")) {
all.push(item);
}
expect(all).toHaveLength(3);
});
it("schedule() sets scheduling metadata", async () => {
const { client } = createClient();
const item = await client.create("posts", { data: { title: "Scheduled" } });
await client.schedule("posts", item.id, { at: "2026-06-01T09:00:00Z" });
// Verify via get
const fetched = await client.get("posts", item.id);
expect(fetched.scheduledAt).toBe("2026-06-01T09:00:00Z");
});
it("search() finds matching content", async () => {
const { client } = createClient();
await client.create("posts", { data: { title: "Deployment Guide" } });
await client.create("posts", { data: { title: "Getting Started" } });
const results = await client.search("deployment");
expect(results).toHaveLength(1);
expect(results[0].title).toBe("Deployment Guide");
});
it("schema operations work", async () => {
const { client } = createClient();
const cols = await client.collections();
expect(cols.length).toBeGreaterThan(0);
expect(cols[0].slug).toBe("posts");
const col = await client.collection("posts");
expect(col.fields).toHaveLength(3);
expect(col.fields[0].slug).toBe("title");
});
it("manifest() returns full schema", async () => {
const { client } = createClient();
const manifest = await client.manifest();
expect(manifest.version).toBe("0.1.0");
expect(manifest.collections.posts).toBeDefined();
expect(manifest.collections.posts.fields.title).toBeDefined();
});
it("API errors are typed correctly", async () => {
const { client } = createClient();
try {
await client.get("posts", "nonexistent");
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(404);
expect(apiErr.code).toBe("NOT_FOUND");
}
});
it("PT conversion round-trips through create and get", async () => {
const { client } = createClient();
// Create with markdown
const item = await client.create("posts", {
data: {
title: "Markdown Post",
body: "# Hello\n\nSome **bold** text\n\n- Item 1\n- Item 2",
},
});
// Data stored as PT
expect(Array.isArray(item.data.body)).toBe(true);
// Get returns markdown
const fetched = await client.get("posts", item.id);
expect(typeof fetched.data.body).toBe("string");
const body = fetched.data.body as string;
expect(body).toContain("# Hello");
expect(body).toContain("**bold**");
expect(body).toContain("- Item 1");
});
});

View File

@@ -0,0 +1,394 @@
/**
* E2E tests for EmDashClient against a real Astro dev server.
*
* Uses an isolated fixture (not the demo site). The test helper
* creates a temp directory, starts a fresh dev server, runs setup,
* and seeds collections with test data.
*
* Runs by default. Requires built artifacts (auto-builds if missing).
*/
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { EmDashClient, EmDashApiError } from "../../../src/client/index.js";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4399;
describe("EmDashClient Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
});
afterAll(async () => {
await ctx?.cleanup();
});
it("fetches the manifest", async () => {
const manifest = await ctx.client.manifest();
expect(manifest.version).toBeDefined();
expect(typeof manifest.collections).toBe("object");
});
it("lists collections", async () => {
const collections = await ctx.client.collections();
expect(Array.isArray(collections)).toBe(true);
// Seeded collections should be present
const slugs = collections.map((c: { slug: string }) => c.slug);
expect(slugs).toContain("posts");
expect(slugs).toContain("pages");
});
it("lists seeded content", async () => {
const posts = await ctx.client.list("posts");
expect(posts.items.length).toBeGreaterThanOrEqual(2);
// Check published posts are returned
const titles = posts.items.map((p: { data: Record<string, unknown> }) => p.data.title);
expect(titles).toContain("First Post");
expect(titles).toContain("Second Post");
});
it("creates, reads, updates, and deletes content", async () => {
// Create
const item = await ctx.client.create("posts", {
data: { title: "E2E Article", body: "Hello **e2e**", excerpt: "Testing" },
slug: "e2e-article",
});
expect(item.id).toBeDefined();
expect(item.slug).toBe("e2e-article");
// Read — returns _rev for optimistic concurrency
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("E2E Article");
expect(typeof fetched.data.body).toBe("string"); // PT→Markdown
expect(fetched._rev).toBeDefined();
// Update — pass _rev explicitly
const updated = await ctx.client.update("posts", item.id, {
data: { title: "Updated E2E Article" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Updated E2E Article");
// Publish / unpublish
await ctx.client.publish("posts", item.id);
await ctx.client.unpublish("posts", item.id);
// Delete
await ctx.client.delete("posts", item.id);
});
it("blind update succeeds without _rev", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Blind Update Test" },
});
// Fresh client — no prior get(), no _rev — blind write should succeed
const freshClient = new EmDashClient({
baseUrl: ctx.baseUrl,
devBypass: true,
});
const updated = await freshClient.update("posts", item.id, {
data: { title: "Blind Write OK" },
});
expect(updated.data.title).toBe("Blind Write OK");
await ctx.client.delete("posts", item.id);
});
it("returns Portable Text arrays in raw mode", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Raw Test", body: "Some **bold** text" },
});
// Normal get — body as markdown string
const normal = await ctx.client.get("posts", item.id);
expect(typeof normal.data.body).toBe("string");
// Raw get — body as PT array
const raw = await ctx.client.get("posts", item.id, { raw: true });
expect(Array.isArray(raw.data.body)).toBe(true);
await ctx.client.delete("posts", item.id);
});
it("authenticates with PAT token", async () => {
// Use the PAT token directly via fetch (not the devBypass client)
const res = await fetch(`${ctx.baseUrl}/_emdash/api/content/posts`, {
headers: { Authorization: `Bearer ${ctx.token}` },
});
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: unknown[] } };
expect(Array.isArray(json.data.items)).toBe(true);
});
// -----------------------------------------------------------------------
// Rendered output tests
// -----------------------------------------------------------------------
/** Fetch a page and return the HTML body text */
async function fetchHtml(path: string): Promise<string> {
const res = await fetch(`${ctx.baseUrl}${path}`);
return res.text();
}
it("renders seeded posts on the index page", async () => {
const html = await fetchHtml("/");
// Published posts should appear
expect(html).toContain("First Post");
expect(html).toContain("Second Post");
// Draft post should NOT appear on the public page
expect(html).not.toContain("Draft Post");
});
it("renders a single post by slug", async () => {
const html = await fetchHtml("/posts/first-post");
expect(html).toContain('<h1 id="title">First Post</h1>');
expect(html).toContain("The very first post"); // excerpt
});
it("returns 404 for a nonexistent slug", async () => {
const res = await fetch(`${ctx.baseUrl}/posts/does-not-exist`);
expect(res.status).toBe(404);
});
it("reflects API edits in rendered output", async () => {
// Create and publish a new post
const item = await ctx.client.create("posts", {
data: { title: "Render Test Post", excerpt: "Check the HTML" },
slug: "render-test",
});
await ctx.client.publish("posts", item.id);
// Index page should include the new post
const indexHtml = await fetchHtml("/");
expect(indexHtml).toContain("Render Test Post");
// Single page should render it
const postHtml = await fetchHtml("/posts/render-test");
expect(postHtml).toContain("Render Test Post");
expect(postHtml).toContain("Check the HTML");
// Update the title via API — pass _rev from get()
const current = await ctx.client.get("posts", item.id);
await ctx.client.update("posts", item.id, {
data: { title: "Edited Render Test" },
_rev: current._rev,
});
// Rendered page should reflect the edit
const updatedHtml = await fetchHtml("/posts/render-test");
expect(updatedHtml).toContain("Edited Render Test");
expect(updatedHtml).not.toContain("Render Test Post");
// Unpublish — should disappear from index
await ctx.client.unpublish("posts", item.id);
const afterUnpublish = await fetchHtml("/");
expect(afterUnpublish).not.toContain("Edited Render Test");
// Clean up
await ctx.client.delete("posts", item.id);
});
it("creates and deletes collections", async () => {
const col = await ctx.client.createCollection({
slug: "e2e_temp",
label: "Temp",
});
expect(col.slug).toBe("e2e_temp");
const titleField = await ctx.client.createField("e2e_temp", {
slug: "title",
type: "string",
label: "Title",
});
expect(titleField.slug).toBe("title");
await ctx.client.deleteCollection("e2e_temp");
// Collection should be gone
const collections = await ctx.client.collections();
const slugs = collections.map((c: { slug: string }) => c.slug);
expect(slugs).not.toContain("e2e_temp");
});
// -----------------------------------------------------------------------
// Media tests
// -----------------------------------------------------------------------
it("uploads, gets, lists, and deletes media", async () => {
// Create a small PNG file (1x1 pixel)
const pngBytes = new Uint8Array([
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44,
0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90,
0x77, 0x53, 0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8,
0xcf, 0xc0, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xe2, 0x21, 0xbc, 0x33, 0x00, 0x00, 0x00,
0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
]);
// Upload
const uploaded = await ctx.client.mediaUpload(pngBytes, "test-pixel.png", {
alt: "A test pixel",
});
expect(uploaded.id).toBeDefined();
expect(uploaded.filename).toBe("test-pixel.png");
expect(uploaded.mimeType).toBe("image/png");
// Get by ID
const fetched = await ctx.client.mediaGet(uploaded.id);
expect(fetched.id).toBe(uploaded.id);
expect(fetched.filename).toBe("test-pixel.png");
// List — should include the uploaded item
const list = await ctx.client.mediaList();
expect(list.items.length).toBeGreaterThanOrEqual(1);
const ids = list.items.map((m: { id: string }) => m.id);
expect(ids).toContain(uploaded.id);
// Delete
await ctx.client.mediaDelete(uploaded.id);
// Should be gone
await expect(ctx.client.mediaGet(uploaded.id)).rejects.toThrow();
});
// -----------------------------------------------------------------------
// Conflict detection
// -----------------------------------------------------------------------
it("returns 409 on _rev conflict", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Conflict Test" },
});
// Two clients both read the same version
const clientA = new EmDashClient({ baseUrl: ctx.baseUrl, token: ctx.token });
const clientB = new EmDashClient({ baseUrl: ctx.baseUrl, token: ctx.token });
const fetchedA = await clientA.get("posts", item.id);
const fetchedB = await clientB.get("posts", item.id);
// A updates first — succeeds (passes _rev explicitly)
await clientA.update("posts", item.id, {
data: { title: "A wins" },
_rev: fetchedA._rev,
});
// B's _rev is now stale — should get 409
try {
await clientB.update("posts", item.id, {
data: { title: "B loses" },
_rev: fetchedB._rev,
});
expect.fail("Should have thrown a conflict error");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(409);
expect(apiErr.code).toBe("CONFLICT");
}
// Clean up
await ctx.client.delete("posts", item.id);
});
// -----------------------------------------------------------------------
// Schedule and restore
// -----------------------------------------------------------------------
it("schedules and restores content", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Schedule Test" },
});
// Schedule for a future date
await ctx.client.schedule("posts", item.id, { at: "2027-06-01T09:00:00Z" });
// Verify via get
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.scheduledAt).toBe("2027-06-01T09:00:00Z");
// Trash and restore
await ctx.client.delete("posts", item.id);
await ctx.client.restore("posts", item.id);
// Should be accessible again (restore preserves the previous status)
const restored = await ctx.client.get("posts", item.id);
expect(restored.status).toBe("scheduled");
// Final cleanup
await ctx.client.delete("posts", item.id);
});
// -----------------------------------------------------------------------
// listAll cursor pagination
// -----------------------------------------------------------------------
it("listAll iterates through paginated results", async () => {
// Create enough items to potentially page (use limit=2 to force pagination)
const ids: string[] = [];
for (let i = 0; i < 5; i++) {
const item = await ctx.client.create("posts", {
data: { title: `Paginate ${i}` },
});
ids.push(item.id);
}
// listAll with small limit should still get all items
const all: { id: string }[] = [];
for await (const item of ctx.client.listAll("posts", { limit: 2 })) {
all.push(item);
}
// Should have at least our 5 + the seeded posts
expect(all.length).toBeGreaterThanOrEqual(5);
// All our created IDs should be in the results
const resultIds = all.map((a) => a.id);
for (const id of ids) {
expect(resultIds).toContain(id);
}
// Clean up
for (const id of ids) {
await ctx.client.delete("posts", id);
}
});
// -----------------------------------------------------------------------
// Error paths
// -----------------------------------------------------------------------
it("throws EmDashApiError on 404", async () => {
try {
await ctx.client.get("posts", "nonexistent-id-12345");
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(404);
expect(apiErr.code).toBe("NOT_FOUND");
}
});
it("throws on unauthorized request (no token)", async () => {
const noAuthClient = new EmDashClient({
baseUrl: ctx.baseUrl,
// No token, no devBypass
});
try {
await noAuthClient.collections();
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
expect((error as EmDashApiError).status).toBe(401);
}
});
});

View File

@@ -0,0 +1,349 @@
/**
* E2E tests for comment frontend components and API.
*
* Tests the full flow: rendering comments on pages, submitting via the
* public API, approving via admin API, and verifying display.
*
* Note: the public comment API has a rate limit (5 per 10 min per IP).
* Tests are ordered to stay within the limit — avoid adding submissions
* without accounting for the budget.
*/
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4396;
/** Helper: raw fetch with auth headers */
async function adminFetch(
ctx: TestServerContext,
path: string,
init?: RequestInit,
): Promise<Response> {
return fetch(`${ctx.baseUrl}${path}`, {
...init,
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
"Content-Type": "application/json",
...(init?.headers as Record<string, string>),
},
});
}
/** Helper: fetch HTML page */
async function fetchHtml(ctx: TestServerContext, path: string): Promise<string> {
const res = await fetch(`${ctx.baseUrl}${path}`);
return res.text();
}
/** Helper: submit a comment via the public API */
async function submitComment(
ctx: TestServerContext,
collection: string,
contentId: string,
data: {
authorName: string;
authorEmail: string;
body: string;
parentId?: string;
website_url?: string;
},
): Promise<Response> {
return fetch(
`${ctx.baseUrl}/_emdash/api/comments/${encodeURIComponent(collection)}/${encodeURIComponent(contentId)}`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Origin: ctx.baseUrl,
},
body: JSON.stringify(data),
},
);
}
const COMMENT_COUNT_RE = /\d+ Comments/;
describe("Comments Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
// Enable comments on the posts collection with "none" moderation
// so comments are auto-approved for most tests
const res = await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({
commentsEnabled: true,
commentsModeration: "none",
}),
});
if (!res.ok) {
const body = await res.text().catch(() => "");
throw new Error(`Failed to enable comments on posts (${res.status}): ${body}`);
}
});
afterAll(async () => {
await ctx?.cleanup();
});
// -----------------------------------------------------------------------
// Server-rendered component (no submissions)
// -----------------------------------------------------------------------
it("renders 'No comments yet' for a post with no comments", async () => {
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain("No comments yet");
expect(html).toContain("ec-comments");
expect(html).toContain("ec-comment-form");
});
it("renders the comment form with correct fields", async () => {
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain('name="authorName"');
expect(html).toContain('name="authorEmail"');
expect(html).toContain('name="body"');
expect(html).toContain('name="website_url"');
expect(html).toContain("Post Comment");
});
// -----------------------------------------------------------------------
// Submission #1: basic submit + rendering + auto-link + XSS escape
// -----------------------------------------------------------------------
it("submits a comment and renders it with auto-linked URLs and escaped HTML", async () => {
const postId = ctx.contentIds["posts"]![0]!;
// Submit a comment with a URL and HTML in the body
const res = await submitComment(ctx, "posts", postId, {
authorName: "Test User",
authorEmail: "test@example.com",
body: 'Check https://example.com and <script>alert("xss")</script>',
});
expect(res.status).toBe(201);
const json = (await res.json()) as { data: { id: string; status: string; message: string } };
expect(json.data.id).toBeDefined();
expect(json.data.status).toBe("approved");
expect(json.data.message).toBe("Comment published");
// Verify rendered page
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain("Test User");
expect(html).not.toContain("No comments yet");
// Auto-linked URL
expect(html).toContain('href="https://example.com"');
expect(html).toContain('rel="nofollow ugc noopener"');
// HTML escaped (not rendered as real script tag)
expect(html).toContain("&lt;script&gt;");
expect(html).not.toContain('<script>alert("xss")</script>');
});
// -----------------------------------------------------------------------
// Submission #2: honeypot (early exit, doesn't count toward rate limit)
// -----------------------------------------------------------------------
it("silently accepts honeypot submissions", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await submitComment(ctx, "posts", postId, {
authorName: "Bot",
authorEmail: "bot@spam.com",
body: "Buy cheap pills",
website_url: "http://spam.com",
});
// Honeypot: returns 200 OK but doesn't actually create the comment
expect(res.status).toBe(200);
const json = (await res.json()) as { data: { status: string; message: string } };
expect(json.data.status).toBe("pending");
});
// -----------------------------------------------------------------------
// No submission: validation and disabled collection
// -----------------------------------------------------------------------
it("rejects comments when collection has comments disabled", async () => {
const pageId = ctx.contentIds["pages"]![0]!;
const res = await submitComment(ctx, "pages", pageId, {
authorName: "Test",
authorEmail: "test@example.com",
body: "Should fail",
});
expect(res.status).toBe(403);
const data = (await res.json()) as { error: { code: string } };
expect(data.error.code).toBe("COMMENTS_DISABLED");
});
it("returns validation error for missing required fields", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/posts/${postId}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Origin: ctx.baseUrl,
},
body: JSON.stringify({ authorName: "Test" }),
});
expect(res.status).toBe(400);
});
// -----------------------------------------------------------------------
// No submission: public GET API
// -----------------------------------------------------------------------
it("lists approved comments via the public GET API", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/posts/${postId}`);
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { authorName: string; body: string }[] } };
expect(Array.isArray(json.data.items)).toBe(true);
expect(json.data.items.length).toBeGreaterThan(0);
});
// -----------------------------------------------------------------------
// Submissions #3-4: threading (on second-post)
// -----------------------------------------------------------------------
it("submits and renders threaded replies", async () => {
const postId = ctx.contentIds["posts"]![1]!;
const rootRes = await submitComment(ctx, "posts", postId, {
authorName: "Thread Root",
authorEmail: "root@example.com",
body: "Root comment for threading test",
});
expect(rootRes.status).toBe(201);
const rootJson = (await rootRes.json()) as { data: { id: string } };
const replyRes = await submitComment(ctx, "posts", postId, {
authorName: "Thread Reply",
authorEmail: "reply@example.com",
body: "Reply to root comment",
parentId: rootJson.data.id,
});
expect(replyRes.status).toBe(201);
const html = await fetchHtml(ctx, "/posts/second-post");
expect(html).toContain("Thread Root");
expect(html).toContain("Thread Reply");
expect(html).toContain("ec-comment-replies");
});
// -----------------------------------------------------------------------
// Submission #5: moderation (last one within rate limit)
// -----------------------------------------------------------------------
it("holds comments for moderation and allows admin approval", async () => {
const updateRes = await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({ commentsModeration: "all" }),
});
expect(updateRes.ok).toBe(true);
const postId = ctx.contentIds["posts"]![1]!;
const submitRes = await submitComment(ctx, "posts", postId, {
authorName: "Pending Author",
authorEmail: "pending@example.com",
body: "This needs approval",
});
expect(submitRes.status).toBe(201);
const submitJson = (await submitRes.json()) as { data: { id: string; status: string } };
expect(submitJson.data.status).toBe("pending");
// Pending comment should NOT appear on the rendered page
const htmlBefore = await fetchHtml(ctx, "/posts/second-post");
expect(htmlBefore).not.toContain("This needs approval");
// Approve via admin API
const approveRes = await adminFetch(
ctx,
`/_emdash/api/admin/comments/${submitJson.data.id}/status`,
{
method: "PUT",
body: JSON.stringify({ status: "approved" }),
},
);
expect(approveRes.ok).toBe(true);
// Now it should appear on the rendered page
const htmlAfter = await fetchHtml(ctx, "/posts/second-post");
expect(htmlAfter).toContain("This needs approval");
expect(htmlAfter).toContain("Pending Author");
// Restore "none" moderation
await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({ commentsModeration: "none" }),
});
});
// -----------------------------------------------------------------------
// No submission: comment count, admin inbox
// -----------------------------------------------------------------------
it("updates the comment count heading as comments are added", async () => {
const html = await fetchHtml(ctx, "/posts/second-post");
expect(html).toMatch(COMMENT_COUNT_RE);
});
it("lists comments in the admin inbox", async () => {
// Default inbox lists all statuses; filter to approved to find our comments
const res = await adminFetch(ctx, "/_emdash/api/admin/comments?status=approved");
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { id: string; status: string }[] } };
expect(Array.isArray(json.data.items)).toBe(true);
expect(json.data.items.length).toBeGreaterThan(0);
});
it("filters admin inbox by status", async () => {
const res = await adminFetch(ctx, "/_emdash/api/admin/comments?status=approved");
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { status: string }[] } };
for (const item of json.data.items) {
expect(item.status).toBe("approved");
}
});
// -----------------------------------------------------------------------
// No submission: edge cases (GET-only or expected failures)
// -----------------------------------------------------------------------
it("returns 404 for comments on nonexistent collection", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/nonexistent/some-id`);
expect(res.status).toBe(404);
});
it("returns 404 for comments on nonexistent content", async () => {
const res = await submitComment(ctx, "posts", "nonexistent-id", {
authorName: "Test",
authorEmail: "test@example.com",
body: "Should fail",
});
// 404 (content not found) or 429 (rate limited) are both acceptable
expect([404, 429]).toContain(res.status);
});
it("returns 400 for reply to nonexistent parent", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await submitComment(ctx, "posts", postId, {
authorName: "Test",
authorEmail: "test@example.com",
body: "Orphan reply",
parentId: "nonexistent-parent-id",
});
// 400 (parent not found) or 429 (rate limited) are both acceptable
expect([400, 429]).toContain(res.status);
});
});

View File

@@ -0,0 +1,189 @@
/**
* Integration tests for plugin field widgets.
*
* Tests the full pipeline:
* - Manifest includes widget property on fields
* - Manifest includes plugin fieldWidgets declarations
* - Content CRUD works with widget-annotated fields
* - Widget data roundtrips correctly through the API
*
* The integration fixture is configured with the color plugin and a
* "theme_color" field with widget "color:picker" on the posts collection.
*/
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4397;
describe("Field Widgets Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
});
afterAll(async () => {
await ctx?.cleanup();
});
describe("manifest", () => {
it("includes widget property on the theme_color field", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
expect(res.ok).toBe(true);
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const collections = manifest.collections as Record<string, Record<string, unknown>>;
expect(collections.posts).toBeTruthy();
const fields = collections.posts.fields as Record<string, { kind: string; widget?: string }>;
expect(fields.theme_color).toBeTruthy();
expect(fields.theme_color.kind).toBe("string");
expect(fields.theme_color.widget).toBe("color:picker");
});
it("does not include widget on fields without one", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const collections = manifest.collections as Record<string, Record<string, unknown>>;
const fields = collections.posts.fields as Record<string, { kind: string; widget?: string }>;
expect(fields.title).toBeTruthy();
expect(fields.title.widget).toBeUndefined();
});
it("includes color plugin with fieldWidgets in plugin manifest", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const plugins = manifest.plugins as Record<string, Record<string, unknown>>;
expect(plugins.color).toBeTruthy();
expect(plugins.color.enabled).toBe(true);
const fieldWidgets = plugins.color.fieldWidgets as Array<{
name: string;
label: string;
fieldTypes: string[];
}>;
expect(fieldWidgets).toBeTruthy();
expect(fieldWidgets.length).toBe(1);
expect(fieldWidgets[0]!.name).toBe("picker");
expect(fieldWidgets[0]!.label).toBe("Color Picker");
expect(fieldWidgets[0]!.fieldTypes).toEqual(["string"]);
});
});
describe("content CRUD with widget fields", () => {
it("creates content with a color widget field value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Colorful Post",
theme_color: "#ff6600",
},
slug: "colorful-post",
});
expect(item.id).toBeDefined();
expect(item.slug).toBe("colorful-post");
});
it("reads back the color value correctly", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Read Color Test",
theme_color: "#00ff88",
},
slug: "read-color-test",
});
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("Read Color Test");
expect(fetched.data.theme_color).toBe("#00ff88");
});
it("updates the color value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Update Color Test",
theme_color: "#111111",
},
slug: "update-color-test",
});
const fetched = await ctx.client.get("posts", item.id);
const updated = await ctx.client.update("posts", item.id, {
data: { theme_color: "#222222" },
_rev: fetched._rev,
});
expect(updated.data.theme_color).toBe("#222222");
});
it("allows null/empty color value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "No Color Post",
},
slug: "no-color-post",
});
const fetched = await ctx.client.get("posts", item.id);
// Color field is optional, so it should be null/undefined
expect(fetched.data.theme_color == null || fetched.data.theme_color === "").toBe(true);
});
it("stores color value alongside other content fields", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Full Post",
excerpt: "A post with color",
theme_color: "#abcdef",
},
slug: "full-post-with-color",
});
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("Full Post");
expect(fetched.data.excerpt).toBe("A post with color");
expect(fetched.data.theme_color).toBe("#abcdef");
});
});
describe("content list with widget fields", () => {
it("includes widget field values in list results", async () => {
await ctx.client.create("posts", {
data: {
title: "Listed Color Post",
theme_color: "#ff0000",
},
slug: "listed-color-post",
});
const list = await ctx.client.list("posts");
const post = list.items.find(
(p: { data: Record<string, unknown> }) => p.data.title === "Listed Color Post",
);
expect(post).toBeTruthy();
expect((post as { data: Record<string, unknown> }).data.theme_color).toBe("#ff0000");
});
});
});

View File

@@ -0,0 +1,85 @@
/**
* Integration test for MCP OAuth discovery against a real Astro dev server.
*
* Uses the MCP SDK's own discovery functions with real fetch() so we test
* the actual Astro route registration, not just the handler logic. This
* catches mismatches between the paths we register in routes.ts and the
* paths the SDK constructs per RFC 8414 / RFC 9728.
*/
import {
discoverOAuthProtectedResourceMetadata,
discoverAuthorizationServerMetadata,
} from "@modelcontextprotocol/sdk/client/auth.js";
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4401;
describe("MCP OAuth Discovery (real server)", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
});
afterAll(async () => {
await ctx?.cleanup();
});
it("discovers protected resource metadata from the MCP server URL", async () => {
const metadata = await discoverOAuthProtectedResourceMetadata(`${ctx.baseUrl}/_emdash/api/mcp`);
expect(metadata.resource).toBe(`${ctx.baseUrl}/_emdash/api/mcp`);
expect(metadata.authorization_servers).toContain(`${ctx.baseUrl}/_emdash`);
expect(metadata.scopes_supported).toContain("content:read");
expect(metadata.bearer_methods_supported).toContain("header");
});
it("discovers authorization server metadata via the RFC 8414 path", async () => {
// Step 1: get the authorization server URL from protected resource metadata
const resourceMeta = await discoverOAuthProtectedResourceMetadata(
`${ctx.baseUrl}/_emdash/api/mcp`,
);
const authServerUrl = resourceMeta.authorization_servers![0]!;
// Step 2: the SDK constructs /.well-known/oauth-authorization-server/_emdash
// per RFC 8414 (path component appended after well-known prefix).
// This must resolve to a real route, not 404.
const metadata = await discoverAuthorizationServerMetadata(authServerUrl);
expect(metadata).toBeDefined();
expect(metadata!.issuer).toBe(`${ctx.baseUrl}/_emdash`);
expect(metadata!.authorization_endpoint).toBe(`${ctx.baseUrl}/_emdash/oauth/authorize`);
expect(metadata!.token_endpoint).toBe(`${ctx.baseUrl}/_emdash/api/oauth/token`);
expect(metadata!.code_challenge_methods_supported).toContain("S256");
expect(metadata!.response_types_supported).toContain("code");
expect(metadata!.grant_types_supported).toContain("authorization_code");
});
it("MCP endpoint returns 401 with resource_metadata in WWW-Authenticate", async () => {
// Unauthenticated POST to MCP should return 401 with the discovery hint
const res = await fetch(`${ctx.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
jsonrpc: "2.0",
method: "initialize",
params: {
protocolVersion: "2025-03-26",
capabilities: {},
clientInfo: { name: "test", version: "1.0" },
},
id: 1,
}),
});
expect(res.status).toBe(401);
const wwwAuth = res.headers.get("WWW-Authenticate");
expect(wwwAuth).toContain("resource_metadata=");
expect(wwwAuth).toContain("/.well-known/oauth-protected-resource");
});
});

View File

@@ -0,0 +1,518 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { defaultCommentModerate } from "../../../src/comments/moderator.js";
import {
createComment,
moderateComment,
type CommentHookRunner,
} from "../../../src/comments/service.js";
import type { Database } from "../../../src/database/types.js";
import { definePlugin } from "../../../src/plugins/define-plugin.js";
import { createHookPipeline, resolveExclusiveHooks } from "../../../src/plugins/hooks.js";
import type {
CollectionCommentSettings,
CommentBeforeCreateEvent,
CommentModerateEvent,
ModerationDecision,
PluginContext,
} from "../../../src/plugins/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function defaultSettings(
overrides: Partial<CollectionCommentSettings> = {},
): CollectionCommentSettings {
return {
commentsEnabled: true,
commentsModeration: "first_time",
commentsClosedAfterDays: 90,
commentsAutoApproveUsers: true,
...overrides,
};
}
const defaultInput = {
collection: "post",
contentId: "content-1",
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
};
// ---------------------------------------------------------------------------
// Group 1: Service with mocked CommentHookRunner
// ---------------------------------------------------------------------------
describe("Comment Service with CommentHookRunner", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
function makeHookRunner(overrides: Partial<CommentHookRunner> = {}): CommentHookRunner {
return {
runBeforeCreate: vi.fn(async (event: CommentBeforeCreateEvent) => event),
runModerate: vi.fn(async () => ({
status: "approved" as const,
reason: "Test",
})),
fireAfterCreate: vi.fn(),
fireAfterModerate: vi.fn(),
...overrides,
};
}
it("creates comment with status from runModerate", async () => {
const hooks = makeHookRunner({
runModerate: vi.fn(async () => ({ status: "pending" as const, reason: "Held" })),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.status).toBe("pending");
expect(result!.decision.status).toBe("pending");
});
it("transforms comment data via beforeCreate", async () => {
const hooks = makeHookRunner({
runBeforeCreate: vi.fn(async (event: CommentBeforeCreateEvent) => ({
...event,
comment: { ...event.comment, body: "Modified body" },
})),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.body).toBe("Modified body");
});
it("returns null when beforeCreate returns false (rejected)", async () => {
const hooks = makeHookRunner({
runBeforeCreate: vi.fn(async () => false as const),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).toBeNull();
});
it("saves as spam when runModerate returns spam", async () => {
const hooks = makeHookRunner({
runModerate: vi.fn(async () => ({ status: "spam" as const, reason: "Spam detected" })),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.status).toBe("spam");
});
it("fires fireAfterCreate with correct shape", async () => {
const hooks = makeHookRunner();
await createComment(db, defaultInput, defaultSettings(), hooks, {
id: "content-1",
collection: "post",
slug: "my-post",
title: "My Post",
});
expect(hooks.fireAfterCreate).toHaveBeenCalledOnce();
const event = (hooks.fireAfterCreate as ReturnType<typeof vi.fn>).mock.calls[0]![0];
expect(event.comment.collection).toBe("post");
expect(event.comment.contentId).toBe("content-1");
expect(event.content.slug).toBe("my-post");
});
it("moderateComment updates status and fires fireAfterModerate", async () => {
const hooks = makeHookRunner();
const created = await createComment(db, defaultInput, defaultSettings(), hooks);
const updated = await moderateComment(
db,
created!.comment.id,
"spam",
{ id: "admin-1", name: "Admin" },
hooks,
);
expect(updated).not.toBeNull();
expect(updated!.status).toBe("spam");
expect(hooks.fireAfterModerate).toHaveBeenCalledOnce();
const event = (hooks.fireAfterModerate as ReturnType<typeof vi.fn>).mock.calls[0]![0];
expect(event.previousStatus).toBe("approved");
expect(event.newStatus).toBe("spam");
expect(event.moderator.id).toBe("admin-1");
});
it("moderateComment returns null for non-existent id", async () => {
const hooks = makeHookRunner();
const result = await moderateComment(
db,
"nonexistent",
"approved",
{ id: "admin-1", name: "Admin" },
hooks,
);
expect(result).toBeNull();
expect(hooks.fireAfterModerate).not.toHaveBeenCalled();
});
});
// ---------------------------------------------------------------------------
// Group 2: Built-in moderator unit tests
// ---------------------------------------------------------------------------
describe("Built-in Default Comment Moderator", () => {
const ctx = {} as PluginContext;
function makeModerateEvent(overrides: Partial<CommentModerateEvent> = {}): CommentModerateEvent {
return {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings(),
priorApprovedCount: 0,
...overrides,
};
}
it("auto-approves authenticated CMS users when configured", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
comment: {
...makeModerateEvent().comment,
authorUserId: "user-1",
},
collectionSettings: defaultSettings({ commentsAutoApproveUsers: true }),
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("Authenticated");
});
it("does not auto-approve when commentsAutoApproveUsers is false", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
comment: {
...makeModerateEvent().comment,
authorUserId: "user-1",
},
collectionSettings: defaultSettings({
commentsAutoApproveUsers: false,
commentsModeration: "all",
}),
}),
ctx,
);
expect(decision.status).toBe("pending");
});
it("approves when moderation is 'none'", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "none" }),
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("disabled");
});
it("approves returning commenter with first_time moderation", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "first_time" }),
priorApprovedCount: 3,
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("Returning");
});
it("holds new commenter with first_time moderation", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "first_time" }),
priorApprovedCount: 0,
}),
ctx,
);
expect(decision.status).toBe("pending");
});
it("holds all comments when moderation is 'all'", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "all" }),
priorApprovedCount: 10,
}),
ctx,
);
expect(decision.status).toBe("pending");
});
});
// ---------------------------------------------------------------------------
// Group 3: Real HookPipeline integration
// ---------------------------------------------------------------------------
describe("Comment Hooks with HookPipeline", () => {
let pipelineDb: Kysely<Database>;
beforeEach(async () => {
pipelineDb = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(pipelineDb);
});
it("invokes comment:beforeCreate handler registered via definePlugin", async () => {
const spy = vi.fn(async (event: CommentBeforeCreateEvent) => ({
...event,
metadata: { ...event.metadata, enriched: true },
}));
const plugin = definePlugin({
id: "test-enricher",
version: "1.0.0",
capabilities: ["users:read"],
hooks: {
"comment:beforeCreate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
const event: CommentBeforeCreateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
};
const result = await pipeline.runCommentBeforeCreate(event);
expect(spy).toHaveBeenCalledOnce();
expect(result).not.toBe(false);
expect((result as CommentBeforeCreateEvent).metadata.enriched).toBe(true);
});
it("invokes exclusive comment:moderate plugin and returns decision", async () => {
const moderateHandler = vi.fn(async () => ({
status: "spam" as const,
reason: "Custom moderator",
}));
const plugin = definePlugin({
id: "test-moderator",
version: "1.0.0",
capabilities: ["users:read"],
hooks: {
"comment:moderate": {
exclusive: true,
handler: moderateHandler,
},
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
// Auto-select the sole provider
await resolveExclusiveHooks({
pipeline,
isActive: () => true,
getOption: async () => null,
setOption: async () => {},
deleteOption: async () => {},
});
const moderateEvent: CommentModerateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Buy cheap pills",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings(),
priorApprovedCount: 0,
};
const result = await pipeline.invokeExclusiveHook("comment:moderate", moderateEvent);
expect(result).not.toBeNull();
expect((result!.result as ModerationDecision).status).toBe("spam");
expect(moderateHandler).toHaveBeenCalledOnce();
});
it("built-in moderator is auto-selected when sole provider", async () => {
const { DEFAULT_COMMENT_MODERATOR_PLUGIN_ID } =
await import("../../../src/comments/moderator.js");
const plugin = definePlugin({
id: DEFAULT_COMMENT_MODERATOR_PLUGIN_ID,
version: "0.0.0",
capabilities: ["users:read"],
hooks: {
"comment:moderate": {
exclusive: true,
handler: defaultCommentModerate,
},
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await resolveExclusiveHooks({
pipeline,
isActive: () => true,
getOption: async () => null,
setOption: async () => {},
deleteOption: async () => {},
});
const selection = pipeline.getExclusiveSelection("comment:moderate");
expect(selection).toBe(DEFAULT_COMMENT_MODERATOR_PLUGIN_ID);
// Verify it actually works
const moderateEvent: CommentModerateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings({ commentsModeration: "none" }),
priorApprovedCount: 0,
};
const result = await pipeline.invokeExclusiveHook("comment:moderate", moderateEvent);
expect(result).not.toBeNull();
expect((result!.result as ModerationDecision).status).toBe("approved");
});
it("fires comment:afterCreate handlers", async () => {
const spy = vi.fn(async () => {});
const plugin = definePlugin({
id: "test-after-create",
version: "1.0.0",
capabilities: ["users:read"],
hooks: {
"comment:afterCreate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await pipeline.runCommentAfterCreate({
comment: {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
status: "approved",
moderationMetadata: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
},
metadata: {},
content: { id: "content-1", collection: "post", slug: "my-post" },
});
expect(spy).toHaveBeenCalledOnce();
});
it("fires comment:afterModerate handlers", async () => {
const spy = vi.fn(async () => {});
const plugin = definePlugin({
id: "test-after-moderate",
version: "1.0.0",
capabilities: ["users:read"],
hooks: {
"comment:afterModerate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await pipeline.runCommentAfterModerate({
comment: {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
status: "approved",
moderationMetadata: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
},
previousStatus: "pending",
newStatus: "approved",
moderator: { id: "admin-1", name: "Admin" },
});
expect(spy).toHaveBeenCalledOnce();
});
});

View File

@@ -0,0 +1,318 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import {
buildCommentNotificationEmail,
lookupContentAuthor,
sendCommentNotification,
} from "../../../src/comments/notifications.js";
import type { Database } from "../../../src/database/types.js";
import type { EmailPipeline } from "../../../src/plugins/email.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("Comment Notifications", () => {
describe("buildCommentNotificationEmail", () => {
it("builds email with content title", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Great post!",
contentTitle: "My Blog Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.to).toBe("author@example.com");
expect(email.subject).toBe('New comment on "My Blog Post"');
expect(email.text).toContain("Jane");
expect(email.text).toContain("Great post!");
expect(email.text).toContain("/_emdash/admin/comments");
expect(email.html).toContain("Jane");
expect(email.html).toContain("Great post!");
});
it("falls back to collection name when no title", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Nice!",
contentTitle: "",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.subject).toBe('New comment on "post item"');
});
it("truncates long comment bodies", () => {
const longBody = "x".repeat(600);
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: longBody,
contentTitle: "Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.text).toContain("...");
expect(email.text).not.toContain("x".repeat(600));
});
it("escapes HTML in author name and body", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: '<script>alert("xss")</script>',
commentBody: "<img src=x onerror=alert(1)>",
contentTitle: "Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.html).not.toContain("<script>");
expect(email.html).not.toContain("<img src=x");
expect(email.html).toContain("&lt;script&gt;");
});
it("strips CRLF from subject to prevent header injection", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Nice!",
contentTitle: "Post\r\nBcc: attacker@evil.com",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.subject).not.toContain("\r");
expect(email.subject).not.toContain("\n");
expect(email.subject).toContain("Post");
});
});
describe("sendCommentNotification", () => {
let mockEmail: EmailPipeline;
let sendSpy: ReturnType<typeof vi.fn>;
beforeEach(() => {
sendSpy = vi.fn().mockResolvedValue(undefined);
mockEmail = {
send: sendSpy,
isAvailable: () => true,
} as unknown as EmailPipeline;
});
it("sends notification for approved comments", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentTitle: "My Post",
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(true);
expect(sendSpy).toHaveBeenCalledOnce();
const [message, source] = sendSpy.mock.calls[0]!;
expect(message.to).toBe("author@example.com");
expect(message.subject).toContain("My Post");
expect(source).toBe("emdash-comments");
});
it("skips pending comments", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "pending",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when no content author", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentAuthor: undefined,
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when email provider not available", async () => {
mockEmail = {
send: sendSpy,
isAvailable: () => false,
} as unknown as EmailPipeline;
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when commenter is the content author", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Author",
authorEmail: "author@example.com",
body: "My own comment",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("compares emails case-insensitively for self-comment check", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Author",
authorEmail: "Author@Example.COM",
body: "My own comment",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
});
describe("lookupContentAuthor", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("returns null for non-existent content", async () => {
const result = await lookupContentAuthor(db, "post", "nonexistent");
expect(result).toBeNull();
});
it("returns slug and author for content with author", async () => {
await db
.insertInto("users")
.values({
id: "user1",
email: "author@example.com",
name: "Author Name",
role: 50,
email_verified: 1,
})
.execute();
await db
.insertInto("ec_post" as never)
.values({
id: "post1",
slug: "my-post",
status: "published",
author_id: "user1",
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post1");
expect(result).not.toBeNull();
expect(result!.slug).toBe("my-post");
expect(result!.author).toEqual({
id: "user1",
email: "author@example.com",
name: "Author Name",
});
});
it("excludes author with unverified email", async () => {
await db
.insertInto("users")
.values({
id: "unverified1",
email: "unverified@example.com",
name: "Unverified",
role: 50,
email_verified: 0,
})
.execute();
await db
.insertInto("ec_post" as never)
.values({
id: "post3",
slug: "unverified-post",
status: "published",
author_id: "unverified1",
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post3");
expect(result).not.toBeNull();
expect(result!.slug).toBe("unverified-post");
expect(result!.author).toBeUndefined();
});
it("rejects invalid collection names", async () => {
await expect(lookupContentAuthor(db, "'; DROP TABLE users; --", "post1")).rejects.toThrow(
"collection",
);
});
it("returns slug without author for content without author_id", async () => {
await db
.insertInto("ec_post" as never)
.values({
id: "post2",
slug: "orphan-post",
status: "published",
author_id: null,
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post2");
expect(result).not.toBeNull();
expect(result!.slug).toBe("orphan-post");
expect(result!.author).toBeUndefined();
});
});
});

View File

@@ -0,0 +1,412 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { CommentRepository, type Comment } from "../../../src/database/repositories/comment.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("CommentRepository", () => {
let db: Kysely<Database>;
let repo: CommentRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new CommentRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// -------------------------------------------------------------------------
// Helpers
// -------------------------------------------------------------------------
function makeInput(overrides: Partial<Parameters<CommentRepository["create"]>[0]> = {}) {
return {
collection: "post",
contentId: "content-1",
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
...overrides,
};
}
// -------------------------------------------------------------------------
// CRUD
// -------------------------------------------------------------------------
describe("CRUD", () => {
it("creates a comment and returns it with id and timestamps", async () => {
const comment = await repo.create(makeInput());
expect(comment.id).toBeTruthy();
expect(comment.collection).toBe("post");
expect(comment.contentId).toBe("content-1");
expect(comment.authorName).toBe("Jane");
expect(comment.authorEmail).toBe("jane@example.com");
expect(comment.body).toBe("Great post!");
expect(comment.status).toBe("pending");
expect(comment.createdAt).toBeTruthy();
expect(comment.updatedAt).toBeTruthy();
expect(comment.parentId).toBeNull();
});
it("findById returns the comment", async () => {
const created = await repo.create(makeInput());
const found = await repo.findById(created.id);
expect(found).not.toBeNull();
expect(found!.id).toBe(created.id);
expect(found!.authorName).toBe("Jane");
});
it("findById returns null for non-existent id", async () => {
const found = await repo.findById("nonexistent");
expect(found).toBeNull();
});
it("findByContent returns matching comments", async () => {
await repo.create(makeInput());
await repo.create(makeInput({ body: "Second comment" }));
await repo.create(makeInput({ contentId: "other-content" }));
const result = await repo.findByContent("post", "content-1");
expect(result.items).toHaveLength(2);
expect(result.items.every((c) => c.contentId === "content-1")).toBe(true);
});
it("findByStatus filters by status", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "spam" }));
const result = await repo.findByStatus("approved");
expect(result.items).toHaveLength(1);
expect(result.items[0]!.status).toBe("approved");
});
});
// -------------------------------------------------------------------------
// Status transitions
// -------------------------------------------------------------------------
describe("Status transitions", () => {
it("updateStatus changes status", async () => {
const created = await repo.create(makeInput());
const updated = await repo.updateStatus(created.id, "approved");
expect(updated).not.toBeNull();
expect(updated!.status).toBe("approved");
expect(updated!.id).toBe(created.id);
});
it("bulkUpdateStatus returns count of updated rows", async () => {
const c1 = await repo.create(makeInput());
const c2 = await repo.create(makeInput({ body: "Second" }));
const count = await repo.bulkUpdateStatus([c1.id, c2.id], "approved");
expect(count).toBe(2);
const found1 = await repo.findById(c1.id);
const found2 = await repo.findById(c2.id);
expect(found1!.status).toBe("approved");
expect(found2!.status).toBe("approved");
});
it("bulkUpdateStatus returns 0 for empty array", async () => {
const count = await repo.bulkUpdateStatus([], "approved");
expect(count).toBe(0);
});
});
// -------------------------------------------------------------------------
// Deletion
// -------------------------------------------------------------------------
describe("Deletion", () => {
it("delete hard-deletes and returns true", async () => {
const created = await repo.create(makeInput());
const deleted = await repo.delete(created.id);
expect(deleted).toBe(true);
expect(await repo.findById(created.id)).toBeNull();
});
it("delete returns false for non-existent id", async () => {
const deleted = await repo.delete("nonexistent");
expect(deleted).toBe(false);
});
it("bulkDelete returns count", async () => {
const c1 = await repo.create(makeInput());
const c2 = await repo.create(makeInput({ body: "Second" }));
const count = await repo.bulkDelete([c1.id, c2.id]);
expect(count).toBe(2);
});
it("bulkDelete returns 0 for empty array", async () => {
const count = await repo.bulkDelete([]);
expect(count).toBe(0);
});
it("deleteByContent removes all comments for content", async () => {
await repo.create(makeInput());
await repo.create(makeInput({ body: "Second" }));
await repo.create(makeInput({ contentId: "other-content" }));
const count = await repo.deleteByContent("post", "content-1");
expect(count).toBe(2);
const remaining = await repo.findByContent("post", "content-1");
expect(remaining.items).toHaveLength(0);
const other = await repo.findByContent("post", "other-content");
expect(other.items).toHaveLength(1);
});
it("parent FK cascade deletes replies", async () => {
const parent = await repo.create(makeInput());
const reply = await repo.create(makeInput({ parentId: parent.id, body: "Reply" }));
await repo.delete(parent.id);
expect(await repo.findById(parent.id)).toBeNull();
expect(await repo.findById(reply.id)).toBeNull();
});
});
// -------------------------------------------------------------------------
// Counting
// -------------------------------------------------------------------------
describe("Counting", () => {
it("countByContent with and without status filter", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "approved" }));
const total = await repo.countByContent("post", "content-1");
expect(total).toBe(3);
const approved = await repo.countByContent("post", "content-1", "approved");
expect(approved).toBe(2);
const pending = await repo.countByContent("post", "content-1", "pending");
expect(pending).toBe(1);
});
it("countByStatus returns grouped counts", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "spam" }));
const counts = await repo.countByStatus();
expect(counts.approved).toBe(2);
expect(counts.pending).toBe(1);
expect(counts.spam).toBe(1);
expect(counts.trash).toBe(0);
});
it("countApprovedByEmail counts only approved comments", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
const count = await repo.countApprovedByEmail("jane@example.com");
expect(count).toBe(2);
});
});
// -------------------------------------------------------------------------
// Cursor pagination
// -------------------------------------------------------------------------
describe("Cursor pagination", () => {
it("findByContent paginates with cursor", async () => {
// Create 5 comments
for (let i = 0; i < 5; i++) {
await repo.create(makeInput({ body: `Comment ${i}` }));
}
const page1 = await repo.findByContent("post", "content-1", { limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findByContent("post", "content-1", {
limit: 2,
cursor: page1.nextCursor,
});
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeTruthy();
const page3 = await repo.findByContent("post", "content-1", {
limit: 2,
cursor: page2.nextCursor,
});
expect(page3.items).toHaveLength(1);
expect(page3.nextCursor).toBeUndefined();
// Ensure no duplicates across pages
const allIds = [...page1.items, ...page2.items, ...page3.items].map((c) => c.id);
expect(new Set(allIds).size).toBe(5);
});
it("findByStatus paginates with cursor", async () => {
for (let i = 0; i < 4; i++) {
await repo.create(makeInput({ status: "approved", body: `Comment ${i}` }));
}
const page1 = await repo.findByStatus("approved", { limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findByStatus("approved", {
limit: 2,
cursor: page1.nextCursor,
});
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeUndefined();
});
});
// -------------------------------------------------------------------------
// Threading
// -------------------------------------------------------------------------
describe("Threading", () => {
it("assembleThreads produces 1-level nesting", () => {
const root: Comment = {
id: "root",
collection: "post",
contentId: "c1",
parentId: null,
authorName: "A",
authorEmail: "a@test.com",
authorUserId: null,
body: "Root",
status: "approved",
ipHash: null,
userAgent: null,
moderationMetadata: null,
createdAt: "2026-01-01T00:00:00.000Z",
updatedAt: "2026-01-01T00:00:00.000Z",
};
const reply: Comment = {
...root,
id: "reply1",
parentId: "root",
body: "Reply",
};
const threads = CommentRepository.assembleThreads([root, reply]);
expect(threads).toHaveLength(1);
expect((threads[0] as Comment & { _replies?: Comment[] })._replies).toHaveLength(1);
});
it("toPublicComment strips private fields", () => {
const comment: Comment & { _replies?: Comment[] } = {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: "user-1",
body: "Great!",
status: "approved",
ipHash: "abc123",
userAgent: "Mozilla/5.0",
moderationMetadata: { score: 0.9 },
createdAt: "2026-01-01T00:00:00.000Z",
updatedAt: "2026-01-01T00:00:00.000Z",
};
const pub = CommentRepository.toPublicComment(comment);
expect(pub.id).toBe("c1");
expect(pub.authorName).toBe("Jane");
expect(pub.isRegisteredUser).toBe(true);
expect(pub.body).toBe("Great!");
expect(pub.createdAt).toBe("2026-01-01T00:00:00.000Z");
// Private fields should not be present
expect("authorEmail" in pub).toBe(false);
expect("ipHash" in pub).toBe(false);
expect("userAgent" in pub).toBe(false);
expect("moderationMetadata" in pub).toBe(false);
expect("status" in pub).toBe(false);
});
});
// -------------------------------------------------------------------------
// Edge cases
// -------------------------------------------------------------------------
describe("Edge cases", () => {
it("returns empty results for non-existent content", async () => {
const result = await repo.findByContent("post", "nonexistent");
expect(result.items).toHaveLength(0);
expect(result.nextCursor).toBeUndefined();
});
it("moderationMetadata JSON round-trips correctly", async () => {
const metadata = {
aiScore: 0.95,
categories: ["safe"],
nested: { key: "value" },
};
const created = await repo.create(makeInput({ moderationMetadata: metadata }));
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toEqual(metadata);
});
it("moderationMetadata null round-trips", async () => {
const created = await repo.create(makeInput());
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toBeNull();
});
it("findByStatus with search filters by body", async () => {
await repo.create(makeInput({ status: "approved", body: "Hello world" }));
await repo.create(makeInput({ status: "approved", body: "Goodbye world" }));
const result = await repo.findByStatus("approved", { search: "Hello" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.body).toBe("Hello world");
});
it("findByStatus with search filters by author name", async () => {
await repo.create(makeInput({ status: "approved", authorName: "Alice" }));
await repo.create(makeInput({ status: "approved", authorName: "Bob" }));
const result = await repo.findByStatus("approved", { search: "Alice" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.authorName).toBe("Alice");
});
it("findByContent with status filter", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
const result = await repo.findByContent("post", "content-1", { status: "approved" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.status).toBe("approved");
});
it("updateModerationMetadata updates the JSON field", async () => {
const created = await repo.create(makeInput());
await repo.updateModerationMetadata(created.id, { score: 0.5 });
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toEqual({ score: 0.5 });
});
});
});

View File

@@ -0,0 +1,349 @@
/**
* Dialect compatibility tests
*
* Runs core database operations against every available dialect.
* SQLite always runs (in-memory). Postgres runs when EMDASH_TEST_PG is set.
*
* These tests verify that migrations, schema registry, and content CRUD
* work identically across dialects.
*/
import { it, expect, beforeEach, afterEach } from "vitest";
import {
runMigrations,
getMigrationStatus,
MIGRATION_COUNT,
} from "../../../src/database/migrations/runner.js";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import {
createForDialect,
describeEachDialect,
setupForDialect,
setupForDialectWithCollections,
teardownForDialect,
type DialectTestContext,
} from "../../utils/test-db.js";
// ---------------------------------------------------------------------------
// Migrations
// ---------------------------------------------------------------------------
describeEachDialect("Migrations", (dialect) => {
let ctx: DialectTestContext;
beforeEach(async () => {
// Bare database — no migrations yet. Tests run them explicitly.
ctx = await createForDialect(dialect);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("runs all migrations and creates system tables", async () => {
await runMigrations(ctx.db);
const tables = [
"revisions",
"taxonomies",
"content_taxonomies",
"media",
"users",
"options",
"audit_logs",
"_emdash_migrations",
"_emdash_collections",
"_emdash_fields",
"_plugin_storage",
"_plugin_state",
"_plugin_indexes",
"_emdash_sections",
"_emdash_bylines",
"_emdash_content_bylines",
];
for (const table of tables) {
const result = await ctx.db
.selectFrom(table as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(result), `table ${table} should exist`).toBe(true);
}
});
it("tracks migrations in _emdash_migrations", async () => {
await runMigrations(ctx.db);
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(MIGRATION_COUNT);
expect(migrations[0]?.name).toBe("001_initial");
});
it("is idempotent", async () => {
await runMigrations(ctx.db);
await runMigrations(ctx.db);
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(MIGRATION_COUNT);
});
it("reports correct migration status", async () => {
const before = await getMigrationStatus(ctx.db);
expect(before.pending).toContain("001_initial");
expect(before.applied).toHaveLength(0);
await runMigrations(ctx.db);
const after = await getMigrationStatus(ctx.db);
expect(after.applied).toContain("001_initial");
expect(after.pending).toHaveLength(0);
});
});
// ---------------------------------------------------------------------------
// Schema registry
// ---------------------------------------------------------------------------
describeEachDialect("Schema registry", (dialect) => {
let ctx: DialectTestContext;
let registry: SchemaRegistry;
beforeEach(async () => {
ctx = await setupForDialect(dialect);
await runMigrations(ctx.db);
registry = new SchemaRegistry(ctx.db);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("creates a collection and its dynamic table", async () => {
await registry.createCollection({
slug: "article",
label: "Articles",
labelSingular: "Article",
});
// Dynamic table should exist
const rows = await ctx.db
.selectFrom("ec_article" as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(rows)).toBe(true);
// Registry should have the collection
const collections = await registry.listCollections();
expect(collections.map((c) => c.slug)).toContain("article");
});
it("adds fields to a collection", async () => {
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("post", {
slug: "body",
label: "Body",
type: "portableText",
});
await registry.createField("post", {
slug: "views",
label: "Views",
type: "integer",
});
const coll = await registry.getCollectionWithFields("post");
expect(coll).not.toBeNull();
const slugs = coll!.fields.map((f) => f.slug);
expect(slugs).toContain("title");
expect(slugs).toContain("body");
expect(slugs).toContain("views");
});
it("deletes a collection and drops its table", async () => {
await registry.createCollection({
slug: "temp",
label: "Temp",
labelSingular: "Temp",
});
// Verify it exists
const before = await registry.listCollections();
expect(before.map((c) => c.slug)).toContain("temp");
await registry.deleteCollection("temp");
const after = await registry.listCollections();
expect(after.map((c) => c.slug)).not.toContain("temp");
});
});
// ---------------------------------------------------------------------------
// Content CRUD
// ---------------------------------------------------------------------------
describeEachDialect("Content CRUD", (dialect) => {
let ctx: DialectTestContext;
let repo: ContentRepository;
beforeEach(async () => {
ctx = await setupForDialectWithCollections(dialect);
repo = new ContentRepository(ctx.db);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("creates and retrieves content", async () => {
const created = await repo.create({
type: "post",
slug: "hello-world",
data: {
title: "Hello World",
content: [{ _type: "block", children: [{ _type: "span", text: "Content" }] }],
},
status: "draft",
});
expect(created.id).toBeDefined();
expect(created.slug).toBe("hello-world");
const found = await repo.findById("post", created.id);
expect(found).not.toBeNull();
expect(found!.data.title).toBe("Hello World");
expect(found!.slug).toBe("hello-world");
});
it("updates content", async () => {
const created = await repo.create({
type: "post",
slug: "original",
data: { title: "Original" },
status: "draft",
});
const updated = await repo.update("post", created.id, {
data: { title: "Updated" },
});
expect(updated.data.title).toBe("Updated");
expect(updated.slug).toBe("original");
});
it("lists content with pagination", async () => {
for (let i = 0; i < 5; i++) {
await repo.create({
type: "post",
slug: `post-${i}`,
data: { title: `Post ${i}` },
status: "draft",
});
}
const result = await repo.findMany("post", { limit: 3 });
expect(result.items).toHaveLength(3);
if (result.nextCursor) {
const page2 = await repo.findMany("post", {
limit: 3,
cursor: result.nextCursor,
});
expect(page2.items).toHaveLength(2);
}
});
it("soft-deletes content", async () => {
const created = await repo.create({
type: "post",
slug: "to-delete",
data: { title: "To Delete" },
status: "draft",
});
const deleted = await repo.delete("post", created.id);
expect(deleted).toBe(true);
const found = await repo.findById("post", created.id);
expect(found).toBeNull();
});
it("filters by status", async () => {
await repo.create({
type: "post",
slug: "draft-post",
data: { title: "Draft Post" },
status: "draft",
});
await repo.create({
type: "post",
slug: "published-post",
data: { title: "Published Post" },
status: "published",
});
const drafts = await repo.findMany("post", { where: { status: "draft" } });
expect(drafts.items).toHaveLength(1);
expect(drafts.items[0]?.data.title).toBe("Draft Post");
const published = await repo.findMany("post", { where: { status: "published" } });
expect(published.items).toHaveLength(1);
expect(published.items[0]?.data.title).toBe("Published Post");
});
it("enforces unique slug within a collection", async () => {
await repo.create({
type: "post",
slug: "same-slug",
data: { title: "First" },
status: "draft",
});
await expect(
repo.create({
type: "post",
slug: "same-slug",
data: { title: "Second" },
status: "draft",
}),
).rejects.toThrow();
});
it("isolates collections", async () => {
await repo.create({
type: "post",
slug: "shared-slug",
data: { title: "A Post" },
status: "draft",
});
await repo.create({
type: "page",
slug: "shared-slug",
data: { title: "A Page" },
status: "draft",
});
const posts = await repo.findMany("post");
const pages = await repo.findMany("page");
expect(posts.items).toHaveLength(1);
expect(pages.items).toHaveLength(1);
expect(posts.items[0]?.data.title).toBe("A Post");
expect(pages.items[0]?.data.title).toBe("A Page");
});
});

View File

@@ -0,0 +1,119 @@
import { mkdtempSync, rmSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { sql } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import { MIGRATION_COUNT, runMigrations } from "../../../src/database/migrations/runner.js";
/**
* Reproduces the issue from #762: when two callers run migrations
* concurrently against the same database (e.g. two Cloudflare Workers
* isolates handling parallel requests during a fresh deploy), the Kysely
* Migrator races on inserting into `_emdash_migrations` and the loser
* throws `UNIQUE constraint failed: _emdash_migrations.name`.
*
* The Kysely SqliteAdapter (which D1 inherits from kysely-d1) has a no-op
* `acquireMigrationLock`, so this race is unprotected on D1.
*
* We simulate the race here by pointing two independent Kysely instances
* at the same SQLite file and starting `runMigrations` on both
* concurrently. SQLite serializes writes, but both Migrators still race
* on the bookkeeping insert.
*/
describe("Migration race condition (#762)", () => {
let tmpDir: string;
let dbPath: string;
beforeEach(() => {
tmpDir = mkdtempSync(join(tmpdir(), "emdash-migration-race-"));
dbPath = join(tmpDir, "data.db");
});
afterEach(() => {
rmSync(tmpDir, { recursive: true, force: true });
});
it("should not throw when two callers run migrations concurrently", async () => {
const dbA = createDatabase({ url: `file:${dbPath}` });
const dbB = createDatabase({ url: `file:${dbPath}` });
try {
// Fire both migrators in parallel against the same database file.
// On D1, this is what happens when two Workers isolates spin up
// at once on first request after deploy.
const results = await Promise.allSettled([runMigrations(dbA), runMigrations(dbB)]);
const failures = results.filter((r) => r.status === "rejected");
if (failures.length > 0) {
const messages = failures.map((f) =>
f.status === "rejected" ? String(f.reason?.message ?? f.reason) : "",
);
throw new Error(
`Concurrent runMigrations should not throw, but got ${failures.length} failure(s):\n${messages.join("\n")}`,
);
}
// And the DB must actually be fully migrated — we don't want a
// fix that just swallows errors and leaves the schema half-built.
const verifyDb = createDatabase({ url: `file:${dbPath}` });
try {
const row = await sql<{ count: number }>`
SELECT COUNT(*) as count FROM _emdash_migrations
`.execute(verifyDb);
expect(Number(row.rows[0]?.count)).toBe(MIGRATION_COUNT);
} finally {
await verifyDb.destroy();
}
} finally {
await dbA.destroy();
await dbB.destroy();
}
});
it("should fast-path when the migration table has more rows than this build knows about", async () => {
// Simulates an old isolate observing a database that's already been
// migrated by a newer build (one extra migration recorded). The
// fast-path must treat this as "fully migrated" rather than falling
// through to the Kysely Migrator and risking the race-recovery path.
const db = createDatabase({ url: `file:${dbPath}` });
try {
await runMigrations(db);
// Insert a phantom future migration row to simulate a newer build.
await sql`
INSERT INTO _emdash_migrations (name, timestamp)
VALUES ('999_future_build', ${new Date().toISOString()})
`.execute(db);
// Should be a no-op via the fast-path — no errors, no extra work.
const result = await runMigrations(db);
expect(result.applied).toEqual([]);
// Row count is still MIGRATION_COUNT + 1 (we didn't truncate).
const row = await sql<{ count: number }>`
SELECT COUNT(*) as count FROM _emdash_migrations
`.execute(db);
expect(Number(row.rows[0]?.count)).toBe(MIGRATION_COUNT + 1);
} finally {
await db.destroy();
}
});
it("should still surface unrelated migration errors", async () => {
// Exercises the non-race error path so a regression that swallows
// real errors is caught. We migrate once, then delete a single row
// from `_emdash_migrations` so the migrator tries to re-run that
// migration and fails with `table ... already exists` — a non-race
// error that must NOT be swallowed.
const db = createDatabase({ url: `file:${dbPath}` });
try {
await runMigrations(db);
await sql`DELETE FROM _emdash_migrations WHERE name = '001_initial'`.execute(db);
await expect(runMigrations(db)).rejects.toThrow(/Migration failed/i);
} finally {
await db.destroy();
}
});
});

View File

@@ -0,0 +1,435 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import {
runMigrations,
getMigrationStatus,
MIGRATION_COUNT,
} from "../../../src/database/migrations/runner.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
describe("Database Migrations (Integration)", () => {
let db: Kysely<Database>;
beforeEach(() => {
// Create fresh in-memory database for each test
db = createDatabase({ url: ":memory:" });
});
afterEach(async () => {
// Close the database connection
await db.destroy();
});
it("should create all tables from migrations", async () => {
await runMigrations(db);
// Verify all tables exist by querying them
// Note: No generic "content" table - collections create ec_* tables dynamically
const tables = [
"revisions",
"taxonomies",
"content_taxonomies",
"media",
"users",
"options",
"audit_logs",
"_emdash_migrations",
"_emdash_collections",
"_emdash_fields",
"_plugin_storage",
"_plugin_state",
"_plugin_indexes",
"_emdash_sections",
"_emdash_bylines",
"_emdash_content_bylines",
];
for (const table of tables) {
// Query table to verify it exists
const result = await db
.selectFrom(table as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(result)).toBe(true);
}
});
it("should track migration in _emdash_migrations table", async () => {
await runMigrations(db);
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(MIGRATION_COUNT);
expect(migrations[0]?.name).toBe("001_initial");
expect(migrations[0]?.timestamp).toBeDefined();
expect(migrations[1]?.name).toBe("002_media_status");
expect(migrations[1]?.timestamp).toBeDefined();
expect(migrations[2]?.name).toBe("003_schema_registry");
expect(migrations[2]?.timestamp).toBeDefined();
expect(migrations[3]?.name).toBe("004_plugins");
expect(migrations[3]?.timestamp).toBeDefined();
expect(migrations[4]?.name).toBe("005_menus");
expect(migrations[4]?.timestamp).toBeDefined();
expect(migrations[5]?.name).toBe("006_taxonomy_defs");
expect(migrations[5]?.timestamp).toBeDefined();
expect(migrations[6]?.name).toBe("007_widgets");
expect(migrations[6]?.timestamp).toBeDefined();
expect(migrations[7]?.name).toBe("008_auth");
expect(migrations[7]?.timestamp).toBeDefined();
expect(migrations[8]?.name).toBe("009_user_disabled");
expect(migrations[8]?.timestamp).toBeDefined();
expect(migrations[9]?.name).toBe("011_sections");
expect(migrations[9]?.timestamp).toBeDefined();
expect(migrations[10]?.name).toBe("012_search");
expect(migrations[10]?.timestamp).toBeDefined();
expect(migrations[11]?.name).toBe("013_scheduled_publishing");
expect(migrations[11]?.timestamp).toBeDefined();
expect(migrations[12]?.name).toBe("014_draft_revisions");
expect(migrations[12]?.timestamp).toBeDefined();
expect(migrations[13]?.name).toBe("015_indexes");
expect(migrations[13]?.timestamp).toBeDefined();
expect(migrations[14]?.name).toBe("016_api_tokens");
expect(migrations[14]?.timestamp).toBeDefined();
expect(migrations[15]?.name).toBe("017_authorization_codes");
expect(migrations[15]?.timestamp).toBeDefined();
});
it("should be idempotent (running twice is safe)", async () => {
await runMigrations(db);
await runMigrations(db);
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
// Should still only have the same number of migration records
expect(migrations).toHaveLength(MIGRATION_COUNT);
});
it("should re-run migrations 034 and 035 when schema changes were partially applied", async () => {
await db.destroy();
db = await setupTestDatabaseWithCollections();
await db
.deleteFrom("_emdash_migrations")
.where("name", "in", ["034_published_at_index", "035_bounded_404_log"])
.execute();
const { applied } = await runMigrations(db);
expect(applied).toContain("034_published_at_index");
expect(applied).toContain("035_bounded_404_log");
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(MIGRATION_COUNT);
});
it("should report correct migration status", async () => {
const statusBefore = await getMigrationStatus(db);
expect(statusBefore.pending).toContain("001_initial");
expect(statusBefore.pending).toContain("002_media_status");
expect(statusBefore.applied).toHaveLength(0);
await runMigrations(db);
const statusAfter = await getMigrationStatus(db);
expect(statusAfter.applied).toContain("001_initial");
expect(statusAfter.applied).toContain("002_media_status");
expect(statusAfter.pending).toHaveLength(0);
});
it("should create schema registry tables", async () => {
await runMigrations(db);
// Test collections table
const testId = "test-collection";
await db
.insertInto("_emdash_collections")
.values({
id: testId,
slug: "posts",
label: "Posts",
label_singular: "Post",
})
.execute();
const collection = await db
.selectFrom("_emdash_collections")
.selectAll()
.where("id", "=", testId)
.executeTakeFirst();
expect(collection).toBeDefined();
expect(collection?.slug).toBe("posts");
expect(collection?.label).toBe("Posts");
expect(collection?.created_at).toBeDefined();
});
it("should enforce unique constraint on collection slug", async () => {
await runMigrations(db);
await db
.insertInto("_emdash_collections")
.values({
id: "id1",
slug: "posts",
label: "Posts",
})
.execute();
// Attempting to insert duplicate slug should fail
await expect(
db
.insertInto("_emdash_collections")
.values({
id: "id2",
slug: "posts",
label: "Posts Again",
})
.execute(),
).rejects.toThrow();
});
it("should create fields table with foreign key to collections", async () => {
await runMigrations(db);
// Create collection first
const collectionId = "collection-1";
await db
.insertInto("_emdash_collections")
.values({
id: collectionId,
slug: "posts",
label: "Posts",
})
.execute();
// Create field
await db
.insertInto("_emdash_fields")
.values({
id: "field-1",
collection_id: collectionId,
slug: "title",
label: "Title",
type: "string",
column_type: "TEXT",
required: 0,
unique: 0,
sort_order: 0,
})
.execute();
const fields = await db
.selectFrom("_emdash_fields")
.selectAll()
.where("collection_id", "=", collectionId)
.execute();
expect(fields).toHaveLength(1);
expect(fields[0]?.slug).toBe("title");
});
it("should create revisions table with collection+entry_id", async () => {
await runMigrations(db);
// Create revision for a content entry
await db
.insertInto("revisions")
.values({
id: "rev-1",
collection: "posts",
entry_id: "entry-1",
data: JSON.stringify({ title: "Revised" }),
})
.execute();
const revisions = await db
.selectFrom("revisions")
.selectAll()
.where("collection", "=", "posts")
.where("entry_id", "=", "entry-1")
.execute();
expect(revisions).toHaveLength(1);
expect(revisions[0]?.collection).toBe("posts");
});
it("should create users table with unique email constraint", async () => {
await runMigrations(db);
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50, // ADMIN
email_verified: 1,
})
.execute();
// Duplicate email should fail
await expect(
db
.insertInto("users")
.values({
id: "user-2",
email: "test@example.com",
role: 10, // SUBSCRIBER
email_verified: 1,
})
.execute(),
).rejects.toThrow();
});
it("should create taxonomies table with hierarchical support", async () => {
await runMigrations(db);
// Create parent category
const parentId = "cat-parent";
await db
.insertInto("taxonomies")
.values({
id: parentId,
name: "category",
slug: "parent",
label: "Parent Category",
})
.execute();
// Create child category
await db
.insertInto("taxonomies")
.values({
id: "cat-child",
name: "category",
slug: "child",
label: "Child Category",
parent_id: parentId,
})
.execute();
const child = await db
.selectFrom("taxonomies")
.selectAll()
.where("id", "=", "cat-child")
.executeTakeFirst();
expect(child?.parent_id).toBe(parentId);
});
it("should create content_taxonomies junction table", async () => {
await runMigrations(db);
const taxonomyId = "tax-1";
// Create taxonomy
await db
.insertInto("taxonomies")
.values({
id: taxonomyId,
name: "category",
slug: "tech",
label: "Technology",
})
.execute();
// Assign taxonomy to content entry (collection + entry_id)
await db
.insertInto("content_taxonomies")
.values({
collection: "posts",
entry_id: "entry-1",
taxonomy_id: taxonomyId,
})
.execute();
const assignments = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.where("entry_id", "=", "entry-1")
.execute();
expect(assignments).toHaveLength(1);
expect(assignments[0]?.taxonomy_id).toBe(taxonomyId);
});
it("should create media table", async () => {
await runMigrations(db);
await db
.insertInto("media")
.values({
id: "media-1",
filename: "photo.jpg",
mime_type: "image/jpeg",
size: 1024000,
width: 1920,
height: 1080,
alt: "Test photo",
storage_key: "uploads/photo.jpg",
status: "ready",
})
.execute();
const media = await db
.selectFrom("media")
.selectAll()
.where("id", "=", "media-1")
.executeTakeFirst();
expect(media).toBeDefined();
expect(media?.width).toBe(1920);
expect(media?.height).toBe(1080);
});
it("should create options table for key-value storage", async () => {
await runMigrations(db);
await db
.insertInto("options")
.values({
name: "site_title",
value: JSON.stringify("My Site"),
})
.execute();
const option = await db
.selectFrom("options")
.selectAll()
.where("name", "=", "site_title")
.executeTakeFirst();
expect(option).toBeDefined();
expect(JSON.parse(option!.value)).toBe("My Site");
});
it("should create audit_logs table with indexes", async () => {
await runMigrations(db);
await db
.insertInto("audit_logs")
.values({
id: "log-1",
actor_id: "user-1",
actor_ip: "192.168.1.1",
action: "content:create",
resource_type: "content",
resource_id: "post-1",
status: "success",
})
.execute();
const logs = await db
.selectFrom("audit_logs")
.selectAll()
.where("actor_id", "=", "user-1")
.execute();
expect(logs).toHaveLength(1);
expect(logs[0]?.action).toBe("content:create");
});
});

View File

@@ -0,0 +1,72 @@
/**
* OptionsRepository.setIfAbsent — atomic write-once semantics.
*
* Used by routes that must never overwrite a stored value once set
* (e.g. the setup wizard's emdash:site_url write). Correctness under
* concurrent writes is a security property: a non-atomic read-then-write
* lets a second caller win the race and poison the value.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("OptionsRepository.setIfAbsent", () => {
let db: Kysely<Database>;
let repo: OptionsRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new OptionsRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("inserts when no row exists and returns true", async () => {
const inserted = await repo.setIfAbsent("emdash:site_url", "https://example.com");
expect(inserted).toBe(true);
expect(await repo.get("emdash:site_url")).toBe("https://example.com");
});
it("does not overwrite an existing value and returns false", async () => {
await repo.set("emdash:site_url", "https://real.example");
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
expect(inserted).toBe(false);
expect(await repo.get("emdash:site_url")).toBe("https://real.example");
});
it("treats an empty string as present (does not overwrite)", async () => {
await repo.set("emdash:site_url", "");
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
expect(inserted).toBe(false);
expect(await repo.get("emdash:site_url")).toBe("");
});
it("treats a stored null as present (does not overwrite)", async () => {
await repo.set("emdash:site_url", null);
const inserted = await repo.setIfAbsent("emdash:site_url", "https://attacker.example");
expect(inserted).toBe(false);
expect(await repo.get("emdash:site_url")).toBeNull();
});
it("is atomic under concurrent callers — only one insert succeeds", async () => {
const results = await Promise.all([
repo.setIfAbsent("emdash:site_url", "https://a.example"),
repo.setIfAbsent("emdash:site_url", "https://b.example"),
repo.setIfAbsent("emdash:site_url", "https://c.example"),
]);
// Exactly one caller inserted; the others saw the existing row.
expect(results.filter((r) => r === true)).toHaveLength(1);
expect(results.filter((r) => r === false)).toHaveLength(2);
// And whichever value landed first now sticks.
const final = await repo.get("emdash:site_url");
expect(["https://a.example", "https://b.example", "https://c.example"]).toContain(final);
});
});

View File

@@ -0,0 +1,94 @@
{
"$schema": "https://emdashcms.com/seed.schema.json",
"version": "1",
"meta": {
"name": "E2E Test Fixture",
"description": "Schema for E2E tests"
},
"taxonomies": [
{
"name": "categories",
"label": "Categories",
"labelSingular": "Category",
"hierarchical": true,
"collections": ["posts"],
"terms": [
{ "slug": "news", "label": "News" },
{ "slug": "tutorials", "label": "Tutorials" },
{ "slug": "opinion", "label": "Opinion" }
]
}
],
"sections": [
{
"slug": "hero",
"title": "Hero Section",
"description": "Main hero area",
"content": [
{
"_type": "block",
"_key": "b1",
"style": "normal",
"children": [{ "_type": "span", "_key": "s1", "text": "Welcome to our site" }],
"markDefs": []
}
]
}
],
"collections": [
{
"slug": "posts",
"label": "Posts",
"labelSingular": "Post",
"fields": [
{
"slug": "title",
"label": "Title",
"type": "string",
"required": true
},
{
"slug": "body",
"label": "Body",
"type": "portableText"
},
{
"slug": "excerpt",
"label": "Excerpt",
"type": "text"
},
{
"slug": "theme_color",
"label": "Theme Color",
"type": "string",
"widget": "color:picker"
}
]
},
{
"slug": "pages",
"label": "Pages",
"labelSingular": "Page",
"fields": [
{
"slug": "title",
"label": "Title",
"type": "string",
"required": true
},
{
"slug": "body",
"label": "Body",
"type": "portableText"
}
]
}
],
"bylines": [
{
"id": "fixture-editorial",
"slug": "fixture-editorial",
"displayName": "Fixture Editorial"
}
]
}

View File

@@ -0,0 +1,51 @@
/**
* Minimal Astro config for e2e tests.
*
* Uses EMDASH_TEST_DB env var for the database path so each
* test run gets an isolated database.
*/
import node from "@astrojs/node";
import react from "@astrojs/react";
import { colorPlugin } from "@emdash-cms/plugin-color";
import { defineConfig } from "astro/config";
import emdash, { local } from "emdash/astro";
import { sqlite } from "emdash/db";
const dbUrl = process.env.EMDASH_TEST_DB || "file:./test.db";
const uploadsDir = process.env.EMDASH_TEST_UPLOADS || "./uploads";
const _rawMaxUploadSize = process.env.EMDASH_MAX_UPLOAD_SIZE
? parseInt(process.env.EMDASH_MAX_UPLOAD_SIZE, 10)
: undefined;
const maxUploadSize =
_rawMaxUploadSize !== undefined && Number.isFinite(_rawMaxUploadSize) && _rawMaxUploadSize > 0
? _rawMaxUploadSize
: undefined;
export default defineConfig({
output: "server",
adapter: node({ mode: "standalone" }),
integrations: [
react(),
emdash({
database: sqlite({ url: dbUrl }),
storage: local({ directory: uploadsDir, baseUrl: "/_emdash/api/media/file" }),
maxUploadSize,
plugins: [colorPlugin()],
}),
],
i18n: {
defaultLocale: "en",
locales: ["en", "fr", "es"],
fallback: { fr: "en", es: "en" },
},
devToolbar: { enabled: false },
vite: {
server: {
fs: {
// When running from a temp dir, node_modules is symlinked back to the
// monorepo. Vite needs permission to serve files from the real paths.
strict: false,
},
},
},
});

View File

@@ -0,0 +1,39 @@
// Generated by EmDash on dev server start
// Do not edit manually
/// <reference types="emdash/locals" />
import type { ContentBylineCredit, PortableTextBlock } from "emdash";
export interface Page {
id: string;
slug: string | null;
status: string;
title: string;
body?: PortableTextBlock[];
createdAt: Date;
updatedAt: Date;
publishedAt: Date | null;
bylines?: ContentBylineCredit[];
}
export interface Post {
id: string;
slug: string | null;
status: string;
title: string;
body?: PortableTextBlock[];
excerpt?: string;
theme_color?: string;
createdAt: Date;
updatedAt: Date;
publishedAt: Date | null;
bylines?: ContentBylineCredit[];
}
declare module "emdash" {
interface EmDashCollections {
pages: Page;
posts: Post;
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "emdash-integration-fixture",
"private": true,
"type": "module",
"dependencies": {
"@astrojs/node": "catalog:",
"@astrojs/react": "catalog:",
"@emdash-cms/auth": "workspace:*",
"@emdash-cms/plugin-color": "workspace:*",
"astro": "catalog:",
"better-sqlite3": "^11.10.0",
"emdash": "workspace:*",
"react": "^19.1.0",
"react-dom": "^19.1.0"
}
}

View File

@@ -0,0 +1 @@
/// <reference types="astro/client" />

View File

@@ -0,0 +1,6 @@
import { defineLiveCollection } from "astro:content";
import { emdashLoader } from "emdash/runtime";
export const collections = {
_emdash: defineLiveCollection({ loader: emdashLoader() }),
};

View File

@@ -0,0 +1,21 @@
---
import { getEmDashCollection } from "emdash";
const { entries: posts } = await getEmDashCollection("posts");
---
<html>
<body>
<h1>Posts</h1>
<ul id="post-list">
{
posts.map((p) => (
<li>
<a href={`/posts/${p.id}`}>{p.data.title}</a>
{p.data.excerpt && <span class="excerpt">{p.data.excerpt}</span>}
</li>
))
}
</ul>
{posts.length === 0 && <p id="empty">No posts</p>}
</body>
</html>

View File

@@ -0,0 +1,21 @@
---
import { getEmDashEntry } from "emdash";
import { PortableText, Comments, CommentForm } from "emdash/ui";
const { slug } = Astro.params;
if (!slug) return Astro.redirect("/404");
const { entry: post } = await getEmDashEntry("posts", slug);
if (!post) return new Response("Not found", { status: 404 });
---
<html>
<body>
<article>
<h1 id="title">{post.data.title}</h1>
{post.data.excerpt && <p id="excerpt">{post.data.excerpt}</p>}
<div id="body"><PortableText value={post.data.body} /></div>
</article>
<Comments collection="posts" contentId={post.data.id} threaded />
<CommentForm collection="posts" contentId={post.data.id} />
</body>
</html>

View File

@@ -0,0 +1,5 @@
{
"extends": "astro/tsconfigs/base",
"compilerOptions": { "types": ["node"] },
"include": ["src", ".astro/types.d.ts"]
}

View File

@@ -0,0 +1,839 @@
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import { searchWithDb } from "../../../src/search/query.js";
import { applySeed } from "../../../src/seed/apply.js";
import type { SeedFile } from "../../../src/seed/types.js";
import { validateSeed } from "../../../src/seed/validate.js";
import { createPostFixture } from "../../utils/fixtures.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("i18n (Integration)", () => {
let db: Kysely<Database>;
let repo: ContentRepository;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
repo = new ContentRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// ─── 1. Migration — i18n columns exist ──────────────────────────
describe("Migration — i18n columns", () => {
it("should have locale and translation_group columns on content tables", async () => {
const result = await sql<{ name: string }>`
PRAGMA table_info(ec_post)
`.execute(db);
const columnNames = result.rows.map((r) => r.name);
expect(columnNames).toContain("locale");
expect(columnNames).toContain("translation_group");
});
it("should default locale to 'en'", async () => {
const result = await sql<{ name: string; dflt_value: string | null }>`
PRAGMA table_info(ec_post)
`.execute(db);
const localeCol = result.rows.find((r) => r.name === "locale");
expect(localeCol).toBeDefined();
expect(localeCol!.dflt_value).toBe("'en'");
});
it("should have translatable column on _emdash_fields", async () => {
const result = await sql<{ name: string }>`
PRAGMA table_info(_emdash_fields)
`.execute(db);
const columnNames = result.rows.map((r) => r.name);
expect(columnNames).toContain("translatable");
});
it("should have compound unique constraint on slug+locale", async () => {
// Insert same slug, different locale — should succeed
await sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id1', 'hello', 'en', 'id1', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db);
await sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id2', 'hello', 'fr', 'id1', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db);
// Same slug, same locale — should fail
await expect(
sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id3', 'hello', 'en', 'id3', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db),
).rejects.toThrow();
});
it("should have locale and translation_group indexes", async () => {
const result = await sql<{ name: string }>`
PRAGMA index_list(ec_post)
`.execute(db);
const indexNames = result.rows.map((r) => r.name);
expect(indexNames).toContain("idx_ec_post_locale");
expect(indexNames).toContain("idx_ec_post_translation_group");
});
});
// ─── 2. ContentRepository — locale-aware CRUD ───────────────────
describe("ContentRepository — locale-aware CRUD", () => {
it("create() without locale defaults to 'en'", async () => {
const post = await repo.create(createPostFixture());
expect(post.locale).toBe("en");
});
it("create() with explicit locale stores it", async () => {
const post = await repo.create(createPostFixture({ locale: "fr", slug: "bonjour" }));
expect(post.locale).toBe("fr");
});
it("create() with translationOf links via translation_group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello-world", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour-monde",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour le Monde" },
}),
);
// Both should share the same translation_group
expect(frPost.translationGroup).toBe(enPost.translationGroup);
// The group should be the original item's id (since it was first)
expect(enPost.translationGroup).toBe(enPost.id);
});
it("create() with translationOf on a chained translation uses the root group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
// Create a third translation linked to the French version
const dePost = await repo.create(
createPostFixture({
slug: "hallo",
locale: "de",
translationOf: frPost.id,
data: { title: "Hallo" },
}),
);
// All three should share the same translation_group
expect(dePost.translationGroup).toBe(enPost.id);
expect(frPost.translationGroup).toBe(enPost.id);
});
it("create() with translationOf pointing to non-existent ID throws", async () => {
await expect(
repo.create(
createPostFixture({
slug: "orphan",
locale: "fr",
translationOf: "NONEXISTENT_ID_12345678",
}),
),
).rejects.toThrow("Translation source content not found");
});
it("same slug different locales are allowed", async () => {
const en = await repo.create(createPostFixture({ slug: "about", locale: "en" }));
const fr = await repo.create(
createPostFixture({
slug: "about",
locale: "fr",
data: { title: "À propos" },
}),
);
expect(en.slug).toBe("about");
expect(fr.slug).toBe("about");
expect(en.id).not.toBe(fr.id);
});
it("same slug same locale is rejected", async () => {
await repo.create(createPostFixture({ slug: "unique-slug", locale: "en" }));
await expect(
repo.create(
createPostFixture({
slug: "unique-slug",
locale: "en",
data: { title: "Duplicate" },
}),
),
).rejects.toThrow();
});
// ── findBySlug ────────────────────────────────────────────────
it("findBySlug() without locale returns any match", async () => {
await repo.create(createPostFixture({ slug: "shared-slug", locale: "en" }));
await repo.create(
createPostFixture({
slug: "shared-slug",
locale: "fr",
data: { title: "Version FR" },
}),
);
const found = await repo.findBySlug("post", "shared-slug");
expect(found).not.toBeNull();
expect(found!.slug).toBe("shared-slug");
});
it("findBySlug() with locale filters to that locale", async () => {
await repo.create(createPostFixture({ slug: "about", locale: "en" }));
await repo.create(
createPostFixture({
slug: "about",
locale: "fr",
data: { title: "À propos" },
}),
);
const en = await repo.findBySlug("post", "about", "en");
expect(en).not.toBeNull();
expect(en!.locale).toBe("en");
const fr = await repo.findBySlug("post", "about", "fr");
expect(fr).not.toBeNull();
expect(fr!.locale).toBe("fr");
const de = await repo.findBySlug("post", "about", "de");
expect(de).toBeNull();
});
// ── findByIdOrSlug ────────────────────────────────────────────
it("findByIdOrSlug() — ID lookup ignores locale param", async () => {
const post = await repo.create(createPostFixture({ slug: "test-post", locale: "en" }));
// ID lookup should find it regardless of locale param
const found = await repo.findByIdOrSlug("post", post.id, "fr");
expect(found).not.toBeNull();
expect(found!.id).toBe(post.id);
expect(found!.locale).toBe("en");
});
it("findByIdOrSlug() — slug lookup respects locale", async () => {
const enPost = await repo.create(createPostFixture({ slug: "test", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "test",
locale: "fr",
data: { title: "Test FR" },
}),
);
const foundEn = await repo.findByIdOrSlug("post", "test", "en");
expect(foundEn).not.toBeNull();
expect(foundEn!.id).toBe(enPost.id);
const foundFr = await repo.findByIdOrSlug("post", "test", "fr");
expect(foundFr).not.toBeNull();
expect(foundFr!.id).toBe(frPost.id);
});
// ── findMany ──────────────────────────────────────────────────
it("findMany() without locale returns all locales", async () => {
await repo.create(createPostFixture({ slug: "en-post", locale: "en" }));
await repo.create(
createPostFixture({
slug: "fr-post",
locale: "fr",
data: { title: "Post FR" },
}),
);
await repo.create(
createPostFixture({
slug: "de-post",
locale: "de",
data: { title: "Post DE" },
}),
);
const result = await repo.findMany("post");
expect(result.items).toHaveLength(3);
});
it("findMany() with locale filters to that locale", async () => {
await repo.create(createPostFixture({ slug: "en-post", locale: "en" }));
await repo.create(
createPostFixture({
slug: "fr-post",
locale: "fr",
data: { title: "Post FR" },
}),
);
await repo.create(
createPostFixture({
slug: "de-post",
locale: "de",
data: { title: "Post DE" },
}),
);
const frResult = await repo.findMany("post", {
where: { locale: "fr" },
});
expect(frResult.items).toHaveLength(1);
expect(frResult.items[0]!.locale).toBe("fr");
});
// ── count ─────────────────────────────────────────────────────
it("count() without locale counts all", async () => {
await repo.create(createPostFixture({ slug: "post-en", locale: "en" }));
await repo.create(
createPostFixture({
slug: "post-fr",
locale: "fr",
data: { title: "FR" },
}),
);
const total = await repo.count("post");
expect(total).toBe(2);
});
it("count() with locale counts only that locale", async () => {
await repo.create(createPostFixture({ slug: "post-en", locale: "en" }));
await repo.create(
createPostFixture({
slug: "post-fr",
locale: "fr",
data: { title: "FR" },
}),
);
const enCount = await repo.count("post", { locale: "en" });
expect(enCount).toBe(1);
const deCount = await repo.count("post", { locale: "de" });
expect(deCount).toBe(0);
});
// ── findTranslations ──────────────────────────────────────────
it("findTranslations() returns all locales for a translation group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
await repo.create(
createPostFixture({
slug: "hallo",
locale: "de",
translationOf: enPost.id,
data: { title: "Hallo" },
}),
);
const translations = await repo.findTranslations("post", enPost.translationGroup!);
expect(translations).toHaveLength(3);
const locales = translations
.map((t) => t.locale)
.toSorted((a, b) => (a ?? "").localeCompare(b ?? ""));
expect(locales).toEqual(["de", "en", "fr"]);
});
it("findTranslations() returns only non-deleted items", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
// Soft-delete the French translation
await repo.delete("post", frPost.id);
const translations = await repo.findTranslations("post", enPost.translationGroup!);
expect(translations).toHaveLength(1);
expect(translations[0]!.locale).toBe("en");
});
});
// ─── 3. FTS — locale-aware search ───────────────────────────────
describe("FTS — locale-aware search", () => {
let registry: SchemaRegistry;
let ftsManager: FTSManager;
beforeEach(async () => {
registry = new SchemaRegistry(db);
ftsManager = new FTSManager(db);
// Mark title as searchable and enable FTS
await registry.updateField("post", "title", { searchable: true });
await ftsManager.enableSearch("post");
});
it("search with locale filter returns only that locale's results", async () => {
// Create published posts in different locales
const enPost = await repo.create(
createPostFixture({
slug: "hello-world",
locale: "en",
status: "published",
data: { title: "Hello World" },
}),
);
const frPost = await repo.create(
createPostFixture({
slug: "bonjour-monde",
locale: "fr",
status: "published",
data: { title: "Bonjour le Monde" },
}),
);
// Search for "world" — English only
const enResults = await searchWithDb(db, "Hello", {
collections: ["post"],
locale: "en",
status: "published",
});
expect(enResults.items.length).toBeGreaterThanOrEqual(1);
expect(enResults.items.every((r) => r.locale === "en")).toBe(true);
expect(enResults.items.some((r) => r.id === enPost.id)).toBe(true);
// Search for "Bonjour" — French only
const frResults = await searchWithDb(db, "Bonjour", {
collections: ["post"],
locale: "fr",
status: "published",
});
expect(frResults.items.length).toBeGreaterThanOrEqual(1);
expect(frResults.items.every((r) => r.locale === "fr")).toBe(true);
expect(frResults.items.some((r) => r.id === frPost.id)).toBe(true);
});
it("search without locale returns results from all locales", async () => {
await repo.create(
createPostFixture({
slug: "universal-en",
locale: "en",
status: "published",
data: { title: "Universal Content" },
}),
);
await repo.create(
createPostFixture({
slug: "universal-fr",
locale: "fr",
status: "published",
data: { title: "Universal Contenu" },
}),
);
const results = await searchWithDb(db, "Universal", {
collections: ["post"],
status: "published",
});
expect(results.items).toHaveLength(2);
const locales = results.items.map((r) => r.locale).toSorted();
expect(locales).toEqual(["en", "fr"]);
});
it("FTS index includes locale column", async () => {
// Verify the FTS table has the locale column by checking structure
const exists = await ftsManager.ftsTableExists("post");
expect(exists).toBe(true);
// Create a post and verify it appears in FTS results with locale
await repo.create(
createPostFixture({
slug: "fts-test",
locale: "ja",
status: "published",
data: { title: "FTS Locale Test" },
}),
);
const results = await searchWithDb(db, "FTS Locale", {
collections: ["post"],
locale: "ja",
status: "published",
});
expect(results.items).toHaveLength(1);
expect(results.items[0]!.locale).toBe("ja");
});
it("rebuilt index preserves locale-aware search", async () => {
// Create content before rebuild
await repo.create(
createPostFixture({
slug: "pre-rebuild-en",
locale: "en",
status: "published",
data: { title: "Rebuild Test English" },
}),
);
await repo.create(
createPostFixture({
slug: "pre-rebuild-fr",
locale: "fr",
status: "published",
data: { title: "Rebuild Test French" },
}),
);
// Rebuild the index
await ftsManager.rebuildIndex("post", ["title"]);
// Verify locale-aware search still works
const enResults = await searchWithDb(db, "Rebuild", {
collections: ["post"],
locale: "en",
status: "published",
});
expect(enResults.items).toHaveLength(1);
expect(enResults.items[0]!.locale).toBe("en");
});
});
// ─── 4. Seed — locale-aware content ─────────────────────────────
describe("Seed — locale-aware content", () => {
it("applySeed() creates content with locale and translationOf", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "welcome",
slug: "welcome",
locale: "en",
status: "published",
data: { title: "Welcome" },
},
{
id: "welcome-fr",
slug: "bienvenue",
locale: "fr",
translationOf: "welcome",
status: "draft",
data: { title: "Bienvenue" },
},
{
id: "welcome-de",
slug: "willkommen",
locale: "de",
translationOf: "welcome",
status: "published",
data: { title: "Willkommen" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(3);
expect(result.content.skipped).toBe(0);
// Verify the entries exist with correct locales
const seedRepo = new ContentRepository(db);
const enPost = await seedRepo.findBySlug("post", "welcome", "en");
const frPost = await seedRepo.findBySlug("post", "bienvenue", "fr");
const dePost = await seedRepo.findBySlug("post", "willkommen", "de");
expect(enPost).not.toBeNull();
expect(frPost).not.toBeNull();
expect(dePost).not.toBeNull();
expect(enPost!.locale).toBe("en");
expect(frPost!.locale).toBe("fr");
expect(dePost!.locale).toBe("de");
// All should share the same translation_group
expect(frPost!.translationGroup).toBe(enPost!.translationGroup);
expect(dePost!.translationGroup).toBe(enPost!.translationGroup);
});
it("applySeed() without locale falls back to default", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "plain",
slug: "plain-post",
data: { title: "No Locale" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(1);
const plainRepo = new ContentRepository(db);
const post = await plainRepo.findBySlug("post", "plain-post");
expect(post).not.toBeNull();
expect(post!.locale).toBe("en"); // default
expect(post!.translationGroup).toBe(post!.id); // self-reference
});
it("applySeed() skips existing entries with locale-aware lookup", async () => {
// Pre-create an entry
const skipRepo = new ContentRepository(db);
await skipRepo.create(createPostFixture({ slug: "existing", locale: "fr" }));
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "existing",
slug: "existing",
locale: "fr",
data: { title: "Should Skip" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.skipped).toBe(1);
expect(result.content.created).toBe(0);
});
it("applySeed() rejects missing translationOf via validation", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "orphan-fr",
slug: "orphelin",
locale: "fr",
translationOf: "nonexistent",
data: { title: "Orphan" },
},
],
},
};
// Validation catches the bad reference before applySeed runs
await expect(applySeed(db, seed, { includeContent: true })).rejects.toThrow(
'references "nonexistent" which is not in this collection',
);
});
});
// ─── 5. Seed validation — i18n fields ───────────────────────────
describe("Seed validation — i18n fields", () => {
it("validates translationOf requires locale", () => {
const seed = {
version: "1",
content: {
posts: [
{ id: "en", slug: "hello", data: { title: "Hello" } },
{
id: "fr",
slug: "bonjour",
translationOf: "en",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(false);
expect(result.errors.some((e) => e.includes("locale is required when translationOf"))).toBe(
true,
);
});
it("validates translationOf references exist", () => {
const seed = {
version: "1",
content: {
posts: [
{
id: "fr",
slug: "bonjour",
locale: "fr",
translationOf: "nonexistent",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(false);
expect(
result.errors.some((e) => e.includes('references "nonexistent" which is not in')),
).toBe(true);
});
it("valid seed with i18n fields passes validation", () => {
const seed = {
version: "1",
content: {
posts: [
{ id: "en", slug: "hello", locale: "en", data: { title: "Hello" } },
{
id: "fr",
slug: "bonjour",
locale: "fr",
translationOf: "en",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
});
// ─── 6. Non-i18n regression ─────────────────────────────────────
describe("Non-i18n regression", () => {
it("content created without locale has locale 'en'", async () => {
const post = await repo.create({
type: "post",
slug: "no-locale",
data: { title: "No Locale Specified" },
});
expect(post.locale).toBe("en");
});
it("findMany without locale param returns all results", async () => {
await repo.create(createPostFixture({ slug: "post-1" }));
await repo.create(createPostFixture({ slug: "post-2" }));
const result = await repo.findMany("post");
expect(result.items).toHaveLength(2);
});
it("findBySlug works without locale param", async () => {
const created = await repo.create(createPostFixture({ slug: "find-me" }));
const found = await repo.findBySlug("post", "find-me");
expect(found).not.toBeNull();
expect(found!.id).toBe(created.id);
});
it("findByIdOrSlug works without locale param", async () => {
const created = await repo.create(createPostFixture({ slug: "lookup-test" }));
// By slug
const bySlug = await repo.findByIdOrSlug("post", "lookup-test");
expect(bySlug).not.toBeNull();
expect(bySlug!.id).toBe(created.id);
// By ID
const byId = await repo.findByIdOrSlug("post", created.id);
expect(byId).not.toBeNull();
expect(byId!.id).toBe(created.id);
});
it("slug uniqueness is still enforced within the same locale", async () => {
await repo.create(createPostFixture({ slug: "dupe-test" }));
// Same slug, same default locale — should fail
await expect(repo.create(createPostFixture({ slug: "dupe-test" }))).rejects.toThrow();
});
it("count works without locale param", async () => {
await repo.create(createPostFixture({ slug: "count-1" }));
await repo.create(createPostFixture({ slug: "count-2" }));
const count = await repo.count("post");
expect(count).toBe(2);
});
it("translation_group is auto-set to item id when no translationOf", async () => {
const post = await repo.create(createPostFixture({ slug: "standalone" }));
expect(post.translationGroup).toBe(post.id);
});
it("existing CRUD operations are unaffected by i18n columns", async () => {
// Create
const post = await repo.create(createPostFixture({ slug: "crud-test", status: "draft" }));
expect(post.status).toBe("draft");
// Update
const updated = await repo.update("post", post.id, {
data: { title: "Updated Title" },
});
expect(updated.data.title).toBe("Updated Title");
expect(updated.locale).toBe("en"); // locale unchanged
// Delete (soft)
const deleted = await repo.delete("post", post.id);
expect(deleted).toBe(true);
// Should not be found
const notFound = await repo.findById("post", post.id);
expect(notFound).toBeNull();
// Restore
const restored = await repo.restore("post", post.id);
expect(restored).toBe(true);
const found = await repo.findById("post", post.id);
expect(found).not.toBeNull();
expect(found!.locale).toBe("en");
});
});
});

View File

@@ -0,0 +1,183 @@
/**
* MCP concurrency tests — InMemoryTransport surface.
*
* Exercises the runtime + handler + tool dispatch under concurrent
* invocation: shared mutable state, race conditions in tool registration,
* draft revision creation under load. The HTTP-transport-level 401 race
* (where parallel requests sometimes lose the runtime singleton during
* cold-start) lives in the smoke test against a live server, since
* InMemoryTransport doesn't exercise the auth middleware path.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
describe("MCP concurrency — in-memory transport (bug #8 partial)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("14 parallel read calls all succeed (no spurious failures)", async () => {
// 14 batched calls — covers the same fan-out a real session sees.
// Over the in-memory transport the auth path is bypassed, so a
// failure here would indicate a runtime-level race rather than the
// HTTP-transport 401 issue (which the smoke test covers).
// Each iteration must call the tool fresh — .fill() would reuse one
// Promise. `void i` keeps the lint rule from misreading the callback
// as constant.
const callPromises = Array.from({ length: 14 }, (_, i) => {
void i;
return harness.client.callTool({ name: "schema_list_collections", arguments: {} });
});
const results = await Promise.all(callPromises);
for (const result of results) {
expect(result.isError, extractText(result)).toBeFalsy();
}
});
it("mixed read/write calls in parallel maintain correctness", async () => {
// 5 creates + 5 lists running concurrently. Final list count must
// equal initial count + creates that succeeded.
const initial = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post" },
});
const initialCount = extractJson<{ items: unknown[] }>(initial).items.length;
const work = [
...Array.from({ length: 5 }, (_, i) =>
harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: `parallel ${i}` } },
}),
),
...Array.from({ length: 5 }, (_, i) => {
void i;
return harness.client.callTool({
name: "content_list",
arguments: { collection: "post" },
});
}),
];
const results = await Promise.all(work);
// Count successful creates
const createsSuccessful = results
.slice(0, 5)
.filter((r) => !(r as { isError?: boolean }).isError).length;
expect(createsSuccessful).toBe(5);
// Final list should reflect all creates
const final = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post" },
});
const finalCount = extractJson<{ items: unknown[] }>(final).items.length;
expect(finalCount).toBe(initialCount + 5);
});
it("parallel updates to the same item don't corrupt state", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// 10 concurrent updates with different titles
const work = Array.from({ length: 10 }, (_, i) =>
harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: `update ${i}` } },
}),
);
const results = await Promise.all(work);
for (const result of results) {
expect(result.isError, extractText(result)).toBeFalsy();
}
// Final state should be a valid title from one of the updates,
// not corrupted or empty.
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{ item: { title?: unknown; data?: { title?: unknown } } }>(got).item;
const title = item.data?.title ?? item.title;
expect(typeof title).toBe("string");
expect(title).toMatch(/^(Original|update \d)$/);
});
it("parallel calls don't leak data across users", async () => {
// Two harnesses on the same DB, one ADMIN, one CONTRIBUTOR.
// Concurrent reads should each see their own permitted view.
const userTwo = await connectMcpHarness({
db,
userId: "user_contrib",
userRole: Role.CONTRIBUTOR,
});
try {
// Admin creates an item
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "by admin" } },
});
expect(created.isError, extractText(created)).toBeFalsy();
const id = extractJson<{ item: { id: string } }>(created).item.id;
// 10 concurrent updates: 5 from admin (allowed), 5 from contributor
// who isn't the author (denied). All admin updates should succeed,
// all contributor updates should fail — no cross-contamination.
const adminWork = Array.from({ length: 5 }, (_, i) =>
harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: `admin ${i}` } },
}),
);
const contribWork = Array.from({ length: 5 }, (_, i) =>
userTwo.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: `contrib ${i}` } },
}),
);
const [adminResults, contribResults] = await Promise.all([
Promise.all(adminWork),
Promise.all(contribWork),
]);
for (const r of adminResults) {
expect(r.isError, extractText(r)).toBeFalsy();
}
for (const r of contribResults) {
expect(r.isError).toBe(true);
}
} finally {
await userTwo.cleanup();
}
});
});

View File

@@ -0,0 +1,735 @@
/**
* MCP content tools — coverage for the remaining tools and edges.
*
* Covers:
* - content_duplicate
* - content_permanent_delete
* - content_translations + locale handling on create/get
* - _rev optimistic concurrency (happy + race)
* - Soft-delete visibility (content_get / content_list filtering)
* - Edit-while-trashed
* - Idempotency (publish twice, unpublish-on-draft, schedule + publish)
*/
import { Role } from "@emdash-cms/auth";
import { sql, type Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
// ---------------------------------------------------------------------------
// content_duplicate
// ---------------------------------------------------------------------------
describe("content_duplicate", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creates a copy with new id and slug", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" }, slug: "original" },
});
const original = extractJson<{ item: { id: string; slug: string } }>(created).item;
const dup = await harness.client.callTool({
name: "content_duplicate",
arguments: { collection: "post", id: original.id },
});
expect(dup.isError, extractText(dup)).toBeFalsy();
const copy = extractJson<{ item: { id: string; slug: string; status: string } }>(dup).item;
expect(copy.id).not.toBe(original.id);
expect(copy.slug).not.toBe(original.slug);
// Created as draft per tool description
expect(copy.status).toBe("draft");
});
it("rejects duplicating a missing item", async () => {
const result = await harness.client.callTool({
name: "content_duplicate",
arguments: { collection: "post", id: "01NEVER" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
});
it("rejects duplicating in non-existent collection", async () => {
const result = await harness.client.callTool({
name: "content_duplicate",
arguments: { collection: "ghost", id: "01NEVER" },
});
expect(result.isError).toBe(true);
});
it("requires CONTRIBUTOR or higher", async () => {
await harness.cleanup();
harness = await connectMcpHarness({
db,
userId: "user_subscriber",
userRole: Role.SUBSCRIBER,
});
const result = await harness.client.callTool({
name: "content_duplicate",
arguments: { collection: "post", id: "01ANY" },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// content_permanent_delete
// ---------------------------------------------------------------------------
describe("content_permanent_delete", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
async function seedTrashedItem(): Promise<string> {
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "T" },
slug: `t-${Math.random().toString(36).slice(2, 6)}`,
status: "draft",
authorId: ADMIN_ID,
});
await repo.delete("post", item.id);
return item.id;
}
it("permanently deletes a trashed item (ADMIN)", async () => {
const id = await seedTrashedItem();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "content_permanent_delete",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
// Verify it's gone — not even in trash
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(got.isError).toBe(true);
});
it("EDITOR cannot permanent-delete (ADMIN-only)", async () => {
const id = await seedTrashedItem();
harness = await connectMcpHarness({ db, userId: "user_editor", userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "content_permanent_delete",
arguments: { collection: "post", id },
});
expect(result.isError).toBe(true);
});
it("returns NOT_FOUND for missing id", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "content_permanent_delete",
arguments: { collection: "post", id: "01NEVEREXISTED" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("01NEVEREXISTED");
});
});
// ---------------------------------------------------------------------------
// content_translations + locale handling
// ---------------------------------------------------------------------------
describe("content_translations + locale", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creates a translation linked via translationOf", async () => {
const en = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Hello" }, locale: "en" },
});
const enId = extractJson<{ item: { id: string } }>(en).item.id;
const fr = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "Bonjour" },
locale: "fr",
translationOf: enId,
},
});
expect(fr.isError, extractText(fr)).toBeFalsy();
const trans = await harness.client.callTool({
name: "content_translations",
arguments: { collection: "post", id: enId },
});
expect(trans.isError, extractText(trans)).toBeFalsy();
const data = extractJson<{
translations: Array<{ id: string; locale: string }>;
}>(trans);
const locales = data.translations.map((t) => t.locale).toSorted();
expect(locales).toEqual(["en", "fr"]);
});
it("returns single-locale translations array for content with no other translations", async () => {
const en = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Standalone" }, locale: "en" },
});
const id = extractJson<{ item: { id: string } }>(en).item.id;
const result = await harness.client.callTool({
name: "content_translations",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ translations: unknown[] }>(result);
expect(data.translations.length).toBeGreaterThanOrEqual(1);
});
it("content_get with locale param resolves slug per-locale", async () => {
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "EN" }, slug: "shared", locale: "en" },
});
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "FR" }, slug: "shared", locale: "fr" },
});
const en = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: "shared", locale: "en" },
});
expect(en.isError, extractText(en)).toBeFalsy();
const fr = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: "shared", locale: "fr" },
});
expect(fr.isError, extractText(fr)).toBeFalsy();
const enItem = extractJson<{
item: { locale: string; data?: { title?: unknown }; title?: unknown };
}>(en).item;
const frItem = extractJson<{
item: { locale: string; data?: { title?: unknown }; title?: unknown };
}>(fr).item;
const enTitle = enItem.data?.title ?? enItem.title;
const frTitle = frItem.data?.title ?? frItem.title;
expect(enTitle).toBe("EN");
expect(frTitle).toBe("FR");
});
it("rejects translationOf pointing to a non-existent item", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "Orphan" },
locale: "fr",
translationOf: "01NEVEREXISTED",
},
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// _rev optimistic concurrency
// ---------------------------------------------------------------------------
describe("_rev optimistic concurrency", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_get returns a _rev token", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const data = extractJson<{ item: { id: string }; _rev?: string }>(got);
expect(data._rev).toBeTruthy();
});
it("content_update with current _rev succeeds", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const rev = extractJson<{ _rev: string }>(got)._rev;
const updated = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Updated" }, _rev: rev },
});
expect(updated.isError, extractText(updated)).toBeFalsy();
});
it("content_update with stale _rev returns CONFLICT-style error", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const oldRev = extractJson<{ _rev: string }>(got)._rev;
// First update: succeeds and bumps the rev
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Update 1" }, _rev: oldRev },
});
// Second update with stale rev: should conflict
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Update 2" }, _rev: oldRev },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/conflict|stale|outdated|modified|rev/i);
});
it("content_update without _rev still succeeds (opt-in concurrency)", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "U" } },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// Soft-delete visibility
// ---------------------------------------------------------------------------
describe("soft-delete visibility", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_get on a trashed item returns NOT_FOUND (not the item)", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(got.isError).toBe(true);
expect(extractText(got)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
});
it("content_list does NOT include trashed items by default", async () => {
const a = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Live" } },
});
const b = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Trashed" } },
});
const trashedId = extractJson<{ item: { id: string } }>(b).item.id;
await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id: trashedId },
});
const list = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post" },
});
const ids = extractJson<{ items: Array<{ id: string }> }>(list).items.map((i) => i.id);
expect(ids).not.toContain(trashedId);
expect(ids).toContain(extractJson<{ item: { id: string } }>(a).item.id);
});
it("content_list_trashed returns only trashed items", async () => {
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Live" } },
});
const b = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Trashed" } },
});
await harness.client.callTool({
name: "content_delete",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(b).item.id,
},
});
const trashed = await harness.client.callTool({
name: "content_list_trashed",
arguments: { collection: "post" },
});
const items = extractJson<{ items: Array<{ id: string }> }>(trashed).items;
expect(items).toHaveLength(1);
expect(items[0]?.id).toBe(extractJson<{ item: { id: string } }>(b).item.id);
});
});
describe("edit-while-trashed", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_update on a trashed item is rejected (item not visible)", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
const updated = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Edit while dead" } },
});
expect(updated.isError).toBe(true);
expect(extractText(updated)).toMatch(/\bNOT_FOUND\b|\bnot found\b|trash/i);
});
it("content_publish on a trashed item is rejected", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
const result = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// Idempotency
// ---------------------------------------------------------------------------
describe("idempotency", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("publish twice is idempotent: second call succeeds, status stays published, publishedAt is preserved", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const first = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
expect(first.isError, extractText(first)).toBeFalsy();
const firstItem = extractJson<{
item: { status: string; publishedAt: string | null };
}>(first).item;
expect(firstItem.status).toBe("published");
expect(firstItem.publishedAt).toBeTruthy();
// Pin publishedAt to a known fixed value so the comparison can't be
// satisfied by coincidence (two publishes within the same ms would
// produce identical ISO strings even on a regression that drops the
// COALESCE preservation).
const KNOWN = "2020-01-01T00:00:00.000Z";
await sql`UPDATE ec_post SET published_at = ${KNOWN} WHERE id = ${id}`.execute(db);
const second = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
// Contract: publish is idempotent. Second call succeeds, status
// remains published, and publishedAt is preserved (the repository
// uses COALESCE so the existing timestamp survives a re-publish).
expect(second.isError, extractText(second)).toBeFalsy();
const secondItem = extractJson<{
item: { status: string; publishedAt: string | null };
}>(second).item;
expect(secondItem.status).toBe("published");
expect(secondItem.publishedAt).toBe(KNOWN);
});
it("unpublish on a draft (already unpublished) is idempotent: status stays draft", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const createdItem = extractJson<{
item: { id: string; version: number };
}>(created).item;
const id = createdItem.id;
const versionBefore = createdItem.version;
// Item is born as draft. Contract: unpublish is idempotent — succeeds
// and the item stays draft.
const result = await harness.client.callTool({
name: "content_unpublish",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
const item = extractJson<{
item: { status: string; publishedAt: string | null; version: number };
}>(result).item;
expect(item.status).toBe("draft");
expect(item.publishedAt).toBeNull();
// Idempotent: nothing meaningful changed. A regression that always
// bumps the version or creates a phantom revision would surface here.
// (updated_at can tick because the UPDATE re-runs; version is the
// stricter invariant.)
expect(item.version).toBe(versionBefore);
});
it("schedule then publish: schedule is preserved or cleared cleanly", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const future = new Date(Date.now() + 3600_000).toISOString();
await harness.client.callTool({
name: "content_schedule",
arguments: { collection: "post", id, scheduledAt: future },
});
const publish = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
expect(publish.isError, extractText(publish)).toBeFalsy();
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: { status: string; scheduledAt: string | null };
}>(got).item;
expect(item.status).toBe("published");
// Once published, the future schedule is moot — should be cleared.
expect(item.scheduledAt).toBeNull();
});
it("delete twice is safe — second call returns NOT_FOUND, not a crash", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
const second = await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
expect(second.isError).toBe(true);
expect(extractText(second)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
});
});
// ---------------------------------------------------------------------------
// content_unschedule gap (no MCP tool for this, only on runtime)
// ---------------------------------------------------------------------------
describe("content_unschedule gap", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("MCP exposes content_unschedule", async () => {
const tools = await harness.client.listTools();
const names = tools.tools.map((t) => t.name);
expect(names).toContain("content_unschedule");
});
it("schedule + unschedule clears scheduledAt and re-publish still works (F12)", async () => {
// Create a draft item.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Scheduled post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Schedule for the near future.
const future = new Date(Date.now() + 60_000).toISOString();
const schedule = await harness.client.callTool({
name: "content_schedule",
arguments: { collection: "post", id, scheduledAt: future },
});
expect(schedule.isError, extractText(schedule)).toBeFalsy();
// Sanity: scheduledAt is set.
const afterSchedule = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const scheduled = extractJson<{ item: { scheduledAt: string | null; status: string } }>(
afterSchedule,
).item;
expect(scheduled.scheduledAt).toBeTruthy();
// Unschedule.
const unschedule = await harness.client.callTool({
name: "content_unschedule",
arguments: { collection: "post", id },
});
expect(unschedule.isError, extractText(unschedule)).toBeFalsy();
// scheduledAt is now null.
const afterUnschedule = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const cleared = extractJson<{ item: { scheduledAt: string | null } }>(afterUnschedule).item;
expect(cleared.scheduledAt).toBeNull();
// Re-publish still works after unschedule.
const republish = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
expect(republish.isError, extractText(republish)).toBeFalsy();
const final = extractJson<{ item: { status: string } }>(republish).item;
expect(final.status).toBe("published");
});
});

View File

@@ -0,0 +1,645 @@
/**
* MCP draft / revision data round-trip tests.
*
* For collections that support revisions, `content_update` writes the
* new data into a draft revision rather than the content table columns
* (the columns hold the live/published values). `content_get` and
* `content_update` hydrate the response item with the draft revision's
* data when one exists, exposing the previously-published values as
* `liveData` alongside.
*
* The user-visible contract: "if I update X to Y, then read back, I see Y"
* — even for revision-supporting collections.
*
* Slug updates and `revision_restore` round-trips share the same response
* shape, so they're tested here too.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
interface ItemEnvelope {
item: {
id: string;
slug: string | null;
status: string;
liveRevisionId: string | null;
draftRevisionId: string | null;
version: number;
publishedAt: string | null;
updatedAt: string;
// Field columns flattened onto item — title is what we care about
title?: unknown;
// Some response variants nest the typed values under `data`
data?: { title?: unknown };
};
_rev?: string;
}
/** Read whatever the response thinks the current title is, regardless of shape. */
function readTitle(item: ItemEnvelope["item"]): unknown {
if (item.data && typeof item.data === "object" && "title" in item.data) {
return item.data.title;
}
return item.title;
}
describe("MCP drafts — content_get and content_update round-trip (bug #2)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
// Collection that supports revisions — this is the surface area
// where the bug surfaces. Without "revisions" in supports, updates
// write directly to content columns and the round-trip is trivially
// correct.
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
supports: ["drafts", "revisions"],
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
await registry.createField("post", { slug: "body", label: "Body", type: "text" });
// Collection without revision support — for contrast/regression
await registry.createCollection({
slug: "page",
label: "Pages",
labelSingular: "Page",
supports: [],
});
await registry.createField("page", { slug: "title", label: "Title", type: "string" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
// ----- Core round-trip: update should be visible on get -----
describe("revision-supporting collection", () => {
it("content_update response data reflects the new title", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const createdItem = extractJson<ItemEnvelope>(created);
const updated = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id: createdItem.item.id,
data: { title: "Updated" },
},
});
expect(updated.isError, extractText(updated)).toBeFalsy();
const updatedItem = extractJson<ItemEnvelope>(updated);
// Bug #2: today this returns "Original". After fix: "Updated".
expect(readTitle(updatedItem.item)).toBe("Updated");
});
it("content_get returns the latest draft data after update", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const createdItem = extractJson<ItemEnvelope>(created);
await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id: createdItem.item.id,
data: { title: "Updated via draft" },
},
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: createdItem.item.id },
});
const gotItem = extractJson<ItemEnvelope>(got);
expect(readTitle(gotItem.item)).toBe("Updated via draft");
});
it("multiple sequential updates all reflect on read", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "v1" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
for (const title of ["v2", "v3", "v4"]) {
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title } },
});
}
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(readTitle(extractJson<ItemEnvelope>(got).item)).toBe("v4");
});
it("publishing a draft makes its data the new live data on read", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
// Publish initial as live
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
// Update creates a draft revision
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Draft change" } },
});
// Publish promotes draft to live
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(readTitle(extractJson<ItemEnvelope>(got).item)).toBe("Draft change");
});
it("partial updates merge with current draft (only title changes, body preserved)", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T1", body: "B1" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "T2" } },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<ItemEnvelope>(got).item;
expect(readTitle(item)).toBe("T2");
// Read body the same way
const body =
item.data && typeof item.data === "object" && "body" in item.data
? // eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- shape narrowed by 'in' check
(item.data as { body?: unknown }).body
: (item as Record<string, unknown>).body;
expect(body).toBe("B1");
});
});
// ----- content_compare must still expose both sides -----
describe("content_compare", () => {
it("returns both live and draft data when a draft exists", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
// Publish, then update to create a draft on top of live
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Drafted" } },
});
const compare = await harness.client.callTool({
name: "content_compare",
arguments: { collection: "post", id },
});
expect(compare.isError, extractText(compare)).toBeFalsy();
const result = extractJson<{
live: { title?: unknown; data?: { title?: unknown } } | null;
draft: { title?: unknown; data?: { title?: unknown } } | null;
hasChanges?: boolean;
}>(compare);
expect(result.live).not.toBeNull();
expect(result.draft).not.toBeNull();
const liveTitle = result.live?.data?.title ?? result.live?.title;
const draftTitle = result.draft?.data?.title ?? result.draft?.title;
expect(liveTitle).toBe("Original");
expect(draftTitle).toBe("Drafted");
});
});
// ----- content_discard_draft -----
describe("content_discard_draft", () => {
it("after discard, content_get returns published live data", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Live title" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Draft title" } },
});
await harness.client.callTool({
name: "content_discard_draft",
arguments: { collection: "post", id },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(readTitle(extractJson<ItemEnvelope>(got).item)).toBe("Live title");
});
});
// ----- regression guard: non-revision collection still works -----
describe("non-revision-supporting collection (regression guard)", () => {
it("content_update on collection without revisions support reflects on read", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "page", data: { title: "Page A" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_update",
arguments: { collection: "page", id, data: { title: "Page A Updated" } },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "page", id },
});
expect(readTitle(extractJson<ItemEnvelope>(got).item)).toBe("Page A Updated");
});
});
});
describe("MCP drafts — slug updates (bug #9)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["drafts", "revisions"],
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_update with a new slug actually changes the slug visible on read", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" }, slug: "original-slug" },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, slug: "new-slug" },
});
// After publish, slug change should be visible.
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
expect(extractJson<ItemEnvelope>(got).item.slug).toBe("new-slug");
});
it("content_get by new slug works after slug update + publish", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" }, slug: "old" },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, slug: "new" },
});
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const gotByNew = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: "new" },
});
expect(gotByNew.isError, extractText(gotByNew)).toBeFalsy();
expect(extractJson<ItemEnvelope>(gotByNew).item.id).toBe(id);
});
});
describe("MCP drafts — revision_restore semantics (bug #17)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["drafts", "revisions"],
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
/**
* Bug #17 repro from MCP_BUGS.md: live=v1, draft=v2 (unpublished),
* restore v1. Per the tool contract ("Replaces the current draft..."),
* the live row must remain v1 and the draft must become v1. The
* pre-fix behavior wrote v1 onto the live row and left the draft
* pointing at v2.
*/
it("restore replaces the current draft and leaves the live row alone", async () => {
// Create v1, publish so live = v1.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "v1" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
// Find the v1 revision id BEFORE updating to v2 — once we update
// without publishing, v1 is still in revision history.
const revsBeforeUpdate = await harness.client.callTool({
name: "revision_list",
arguments: { collection: "post", id },
});
const v1Rev = extractJson<{
items: Array<{ id: string; data?: { title?: unknown } }>;
}>(revsBeforeUpdate).items.find((r) => r.data?.title === "v1");
expect(v1Rev, "v1 revision must exist after publish").toBeTruthy();
// Update to v2 (creates a draft revision; live remains v1).
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "v2" } },
});
// Sanity: before restore, get returns v2 (the draft) and liveData=v1.
const preRestore = extractJson<ItemEnvelope>(
await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
}),
).item as ItemEnvelope["item"] & { liveData?: { title?: unknown } };
expect(readTitle(preRestore)).toBe("v2");
expect(preRestore.liveData?.title).toBe("v1");
const v2DraftId = preRestore.draftRevisionId;
expect(v2DraftId, "v2 draft revision id must be set").toBeTruthy();
// Restore v1.
const restored = await harness.client.callTool({
name: "revision_restore",
arguments: { collection: "post", id, revisionId: v1Rev!.id },
});
expect(restored.isError, extractText(restored)).toBeFalsy();
// The restore response itself must show the new draft state (v1),
// not stale data. Same shape as the bug-#2 fix for content_update.
const restoredItem = extractJson<ItemEnvelope>(restored).item;
expect(readTitle(restoredItem)).toBe("v1");
// And a follow-up content_get must agree.
const postRestore = extractJson<ItemEnvelope>(
await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
}),
).item;
expect(readTitle(postRestore)).toBe("v1");
// The live row must still hold v1 (unchanged from the original
// publish — restore must NOT overwrite live).
const dbRow = (await db
.selectFrom("ec_post" as never)
.select(["title", "live_revision_id", "draft_revision_id"] as never)
.where("id" as never, "=", id)
.executeTakeFirst()) as
| {
title: unknown;
live_revision_id: string | null;
draft_revision_id: string | null;
}
| undefined;
expect(dbRow?.title).toBe("v1");
// A new draft revision was created. It is distinct from BOTH the
// original v1 revision id (we created a new revision row carrying
// v1's data — we don't reuse history rows) AND the v2 draft id
// (the v2 draft was abandoned). This is the strongest differentia
// from the pre-fix behavior, which left v2's draft pointer
// in place.
expect(dbRow?.draft_revision_id).toBeTruthy();
expect(dbRow?.draft_revision_id).not.toBe(v1Rev!.id);
expect(dbRow?.draft_revision_id).not.toBe(v2DraftId);
});
/**
* Companion case: restoring while no draft exists should still create
* a new draft (rather than no-op or overwrite live).
*/
it("restore creates a new draft when none exists", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "v1" } },
});
const id = extractJson<ItemEnvelope>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
// Update + publish v2 so there's no live draft.
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "v2" } },
});
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const revs = extractJson<{
items: Array<{ id: string; data?: { title?: unknown } }>;
}>(
await harness.client.callTool({
name: "revision_list",
arguments: { collection: "post", id },
}),
);
const v1Rev = revs.items.find((r) => r.data?.title === "v1");
expect(v1Rev).toBeTruthy();
// Now live = v2, no draft. Restore v1.
const restored = await harness.client.callTool({
name: "revision_restore",
arguments: { collection: "post", id, revisionId: v1Rev!.id },
});
expect(restored.isError, extractText(restored)).toBeFalsy();
expect(readTitle(extractJson<ItemEnvelope>(restored).item)).toBe("v1");
// Live row should still hold v2; a new draft now exists pointing
// at v1.
const dbRow = (await db
.selectFrom("ec_post" as never)
.select(["title", "draft_revision_id"] as never)
.where("id" as never, "=", id)
.executeTakeFirst()) as
| {
title: unknown;
draft_revision_id: string | null;
}
| undefined;
expect(dbRow?.title).toBe("v2");
expect(dbRow?.draft_revision_id).toBeTruthy();
});
});
// ---------------------------------------------------------------------------
// F13: liveData carries the published values when a draft revision exists.
// When no draft exists, liveData is undefined.
// ---------------------------------------------------------------------------
describe("MCP drafts — liveData hydration (F13)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["drafts", "revisions"],
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("liveData is undefined when there is no draft revision", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "First" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: { data: { title: string }; liveData?: { title?: string } };
}>(got).item;
expect(item.data.title).toBe("First");
expect(item.liveData).toBeUndefined();
});
it("liveData carries the published values when a draft revision exists", async () => {
// Create + publish, so the live value is "published title".
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "published title" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
// Update writes a draft revision (data column stays at "published title").
await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "draft title" } },
});
// Read back: data reflects the draft, liveData carries the published value.
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: { data: { title: string }; liveData?: { title?: string } };
}>(got).item;
expect(item.data.title).toBe("draft title");
expect(item.liveData?.title).toBe("published title");
});
});

View File

@@ -0,0 +1,403 @@
/**
* MCP error envelope fidelity tests.
*
* Specific failure modes (unknown collection, duplicate slug, unknown
* field, bad orderBy, etc.) must return discriminated error codes so
* callers can act on them programmatically:
*
* - Handlers detect known failure shapes and return one of:
* `SLUG_CONFLICT`, `COLLECTION_NOT_FOUND`, `UNKNOWN_FIELD`,
* `INVALID_ORDER_BY`, `VALIDATION_ERROR`.
* - The MCP envelope emits the code as a `[CODE]` prefix on the
* message text and as `_meta.code` for SDK-aware clients.
*
* Each test asserts:
* (a) the response is `isError: true`
* (b) the code/message names the specific failure, not a generic
* "Failed to ..." string
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { connectMcpHarness, extractText, type McpHarness } from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
// Generic placeholders that should NOT survive after the fix.
const GENERIC_CREATE = /^Failed to create content$/;
const GENERIC_LIST = /^Failed to list content$/;
const GENERIC_UPDATE = /^Failed to update content$/;
const UNKNOWN_ERROR = /^Unknown error$/;
describe("MCP error envelope — content_create (bug #3)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("unknown collection slug returns a discriminated NOT_FOUND-style error", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "nonexistent", data: { title: "Hi" } },
});
expect(result.isError).toBe(true);
const text = extractText(result);
// Message should name the specific failure (collection not found).
expect(text).not.toMatch(GENERIC_CREATE);
expect(text).not.toMatch(UNKNOWN_ERROR);
// Tight match: explicitly the COLLECTION_NOT_FOUND code (or message),
// rather than any text that happens to contain "collection".
expect(text).toMatch(/COLLECTION_NOT_FOUND|Collection ['"]?nonexistent['"]? not found/i);
});
it("duplicate slug returns a SLUG_CONFLICT-style error", async () => {
// Seed an item with a known slug
const repo = new ContentRepository(db);
await repo.create({
type: "post",
data: { title: "First" },
slug: "duplicate-me",
status: "draft",
authorId: "seed",
});
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "Second" },
slug: "duplicate-me",
},
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_CREATE);
expect(text).not.toMatch(UNKNOWN_ERROR);
// Either explicit "slug" wording or a UNIQUE/conflict signal.
expect(text).toMatch(/slug|unique|conflict|duplicate|exists/i);
});
it("unknown field in data returns an UNKNOWN_FIELD-style error", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
// `nonexistent_field` was never created on the post collection
data: { title: "Hello", nonexistent_field: "boom" },
},
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_CREATE);
expect(text).not.toMatch(UNKNOWN_ERROR);
expect(text).toMatch(/field|unknown|nonexistent_field|column/i);
});
});
describe("MCP error envelope — content_list (bug #3)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("unknown collection returns a COLLECTION_NOT_FOUND-style error, not a generic one", async () => {
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "nonexistent" },
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_LIST);
expect(text).not.toMatch(UNKNOWN_ERROR);
expect(text).toMatch(/COLLECTION_NOT_FOUND|Collection ['"]?nonexistent['"]? not found/i);
});
it("invalid orderBy column returns an INVALID_ORDER_BY-style error", async () => {
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", orderBy: "definitely_not_a_column" },
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_LIST);
expect(text).not.toMatch(UNKNOWN_ERROR);
// Concrete: response must echo the offending column AND carry a
// stable validation-style code. Avoids matching unrelated phrases
// that happen to contain "order" or "column".
expect(text).toContain("definitely_not_a_column");
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("VALIDATION_ERROR");
});
});
describe("MCP error envelope — content_get (bug #3)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("missing item returns a clear NOT_FOUND error including the id (already works — regression guard)", async () => {
const result = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: "01NONEXISTENT" },
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(text).toContain("01NONEXISTENT");
});
});
describe("MCP error envelope — content_update (bug #3)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("update on missing id returns a NOT_FOUND-style error", async () => {
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id: "01NEVEREXISTED", data: { title: "x" } },
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_UPDATE);
expect(text).not.toMatch(UNKNOWN_ERROR);
expect(text).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(text).toContain("01NEVEREXISTED");
});
it("stale _rev returns a CONFLICT-style error (not a generic one)", async () => {
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "Original" },
slug: "rev-test",
status: "draft",
authorId: "user_admin",
});
const result = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id: item.id,
data: { title: "x" },
_rev: "obviously-stale-rev",
},
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_UPDATE);
expect(text).not.toMatch(UNKNOWN_ERROR);
expect(text).toMatch(/conflict|rev|stale|outdated|modified/i);
});
});
describe("MCP error envelope — error code preservation through unwrap()", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
/**
* The MCP SDK forwards `_meta` on tool results when present — once
* `unwrap()` propagates it, callers can read structured codes
* programmatically. Until then, codes must at least appear in the
* message text so callers can match on a stable token.
*/
it("a NOT_FOUND error from a handler surfaces 'NOT_FOUND' or equivalent", async () => {
const result = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: "01MISSING" },
});
expect(result.isError).toBe(true);
// Either the structured _meta carries the code, or the message
// includes a stable token. Today: only `Content item not found:` —
// no machine-readable code.
const text = extractText(result);
const meta = (result as { _meta?: { code?: string } })._meta;
const codeFromMeta = meta?.code;
expect(codeFromMeta === "NOT_FOUND" || /\bNOT_FOUND\b/.test(text)).toBe(true);
});
});
// ---------------------------------------------------------------------------
// F7: error envelope correctly carries codes for SchemaError, McpError,
// and SDK-thrown auth errors.
// ---------------------------------------------------------------------------
describe("MCP error envelope — F7 (codes propagated for SchemaError + auth)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("INSUFFICIENT_SCOPE for a token without the required scope", async () => {
db = await setupTestDatabaseWithCollections();
// Only grant content:read; content_create needs content:write.
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
tokenScopes: ["content:read"],
});
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "x" } },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_SCOPE");
expect(extractText(result)).toMatch(/INSUFFICIENT_SCOPE/);
});
it("backwards compat: content:write token can call menu_create (implicit grant)", async () => {
// PATs issued before menus:manage was split out of content:write
// must continue to work. Verify the implicit grant flows through
// the full MCP stack.
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
tokenScopes: ["content:write"],
});
const result = await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("menus:manage token cannot call content_create (no reverse grant)", async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
tokenScopes: ["menus:manage"],
});
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "x" } },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_SCOPE");
});
it("INSUFFICIENT_PERMISSIONS for a role that's too low", async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_subscriber",
userRole: Role.SUBSCRIBER,
});
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "x" } },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
it("SchemaError code (RESERVED_SLUG) propagates through schema_create_collection", async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
// '_emdash_collections' is the prefix used for system tables — that
// kind of slug is reserved. Pick a guaranteed reserved slug
// (the '_emdash' prefix or e.g. 'media' — see RESERVED_COLLECTION_SLUGS).
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "media", label: "Reserved" },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
// SchemaError carries `code` directly; respondHandlerError should
// forward it. Whichever specific reserved-slug code applies is fine
// — just assert it's a stable string that isn't the generic fallback.
expect(meta?.code).toBeDefined();
expect(meta?.code).not.toBe("INTERNAL_ERROR");
expect(meta?.code).not.toBe("");
});
});

View File

@@ -0,0 +1,72 @@
/**
* Smoke test for the MCP integration harness.
*
* Verifies the `connectMcpHarness()` plumbing is sound: real DB, real
* runtime, real MCP client/server pair. This is not bug coverage — it
* just guards against regressions in the harness itself. Bug-specific
* tests live in the other files in this directory.
*/
import { Role } from "@emdash-cms/auth";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { connectMcpHarness, extractJson, type McpHarness } from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("MCP harness smoke", () => {
let harness: McpHarness;
let dbCleanup: () => Promise<void>;
beforeEach(async () => {
const db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({
db,
userId: "user_admin",
userRole: Role.ADMIN,
});
dbCleanup = () => teardownTestDatabase(db);
});
afterEach(async () => {
await harness.cleanup();
await dbCleanup();
});
it("exposes registered MCP tools via tools/list", async () => {
const tools = await harness.client.listTools();
const names = tools.tools.map((t) => t.name);
expect(names).toContain("content_list");
expect(names).toContain("content_create");
expect(names).toContain("schema_list_collections");
});
it("can call schema_list_collections and get the seeded test collections", async () => {
const result = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
expect(result.isError).toBeFalsy();
const { items } = extractJson<{ items: Array<{ slug: string }> }>(result);
const slugs = items.map((c) => c.slug);
expect(slugs).toContain("post");
expect(slugs).toContain("page");
});
it("can round-trip a simple content_create + content_get", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Hello" } },
});
expect(created.isError).toBeFalsy();
const createdItem = extractJson<{ item: { id: string; slug: string } }>(created);
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id: createdItem.item.id },
});
expect(got.isError).toBeFalsy();
const gotItem = extractJson<{ item: { id: string; slug: string } }>(got);
expect(gotItem.item.id).toBe(createdItem.item.id);
expect(gotItem.item.slug).toBe("hello");
});
});

View File

@@ -0,0 +1,162 @@
/**
* MCP tool input schema tests.
*
* The MCP SDK validates `arguments` against each tool's `inputSchema`
* (Zod) before the handler runs. These tests pin down what happens at
* that boundary: missing required fields, wrong types, invalid enum
* values, out-of-range numeric inputs, etc.
*
* The expected behavior is consistent: invalid arguments produce a
* structured error response (`isError: true`) with a message that names
* the offending field. We assert specifically that errors at this layer
* remain user-friendly across the omnibus fix.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { connectMcpHarness, extractText, type McpHarness } from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
describe("MCP input schema validation", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_create rejects missing required collection argument", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { data: { title: "T" } } as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("content_create rejects wrong-type for data field (string instead of object)", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: "not-an-object" } as unknown as Record<
string,
unknown
>,
});
expect(result.isError).toBe(true);
});
it("content_create with status enum value outside the enum is rejected", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T" },
status: "weird-status",
} as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("content_list rejects out-of-range limit (e.g. negative)", async () => {
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: -5 },
});
expect(result.isError).toBe(true);
});
it("content_list rejects non-integer limit", async () => {
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 5.7 },
});
expect(result.isError).toBe(true);
});
it("content_list rejects order outside enum", async () => {
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", order: "sideways" } as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("schema_create_collection rejects supports value outside enum", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: {
slug: "x",
label: "X",
supports: ["drafts", "garbage"],
} as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("schema_create_field rejects type outside enum", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "x",
label: "X",
type: "magic",
} as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("content_get rejects missing id", async () => {
const result = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post" } as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("content_schedule rejects missing scheduledAt", async () => {
const result = await harness.client.callTool({
name: "content_schedule",
arguments: { collection: "post", id: "01ANY" } as unknown as Record<string, unknown>,
});
expect(result.isError).toBe(true);
});
it("media_list with limit > 100 is rejected by inputSchema", async () => {
const result = await harness.client.callTool({
name: "media_list",
arguments: { limit: 500 },
});
expect(result.isError).toBe(true);
});
it("revision_list with limit > 50 is rejected by inputSchema", async () => {
const result = await harness.client.callTool({
name: "revision_list",
arguments: { collection: "post", id: "01x", limit: 500 },
});
expect(result.isError).toBe(true);
});
it("input validation error messages name the offending field", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "Has-Caps", label: "Bad" },
});
expect(result.isError).toBe(true);
// Ideally the error names the field "slug" or shows the regex /
// pattern violation. Today the SDK error usually does — pin that
// behavior so it doesn't regress.
expect(extractText(result)).toMatch(/slug|pattern|regex|invalid/i);
});
});

View File

@@ -0,0 +1,228 @@
/**
* MCP content lifecycle tests.
*
* Covers two contracts that callers rely on:
*
* - `content_unpublish` clears `published_at` so a missing/null timestamp
* unambiguously means the item is not currently live. Re-publishing
* assigns a fresh timestamp.
* - `schema_create_collection` applies its documented default of
* `['drafts', 'revisions']` for `supports` when the caller omits it.
* Explicit `[]` is preserved as an opt-out.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import {
setupTestDatabaseWithCollections,
teardownTestDatabase,
setupTestDatabase,
} from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
// ---------------------------------------------------------------------------
// Bug #10: unpublish publishedAt
// ---------------------------------------------------------------------------
describe("MCP content_unpublish — publishedAt clearing (bug #10)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("unpublish clears publishedAt so 'currently live' is unambiguous", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Will publish" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Publish — populates publishedAt
const published = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const publishedItem = extractJson<{ item: { publishedAt: string | null } }>(published);
expect(publishedItem.item.publishedAt).toBeTruthy();
// Unpublish — should clear publishedAt
const unpublished = await harness.client.callTool({
name: "content_unpublish",
arguments: { collection: "post", id },
});
const unpubItem = extractJson<{
item: { publishedAt: string | null; status: string };
}>(unpublished);
expect(unpubItem.item.status).toBe("draft");
// Bug #10: today, publishedAt is still the old timestamp.
expect(unpubItem.item.publishedAt).toBeNull();
});
it("content_get after unpublish reflects null publishedAt and status=draft", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
await harness.client.callTool({
name: "content_unpublish",
arguments: { collection: "post", id },
});
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const gotItem = extractJson<{
item: { publishedAt: string | null; status: string };
}>(got);
expect(gotItem.item.status).toBe("draft");
expect(gotItem.item.publishedAt).toBeNull();
});
it("re-publish after unpublish gets a fresh publishedAt timestamp", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const firstPub = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const firstTs = extractJson<{ item: { publishedAt: string } }>(firstPub).item.publishedAt;
expect(firstTs).toBeTruthy();
await harness.client.callTool({
name: "content_unpublish",
arguments: { collection: "post", id },
});
// Wait briefly so the new timestamp is distinguishable
await new Promise((r) => setTimeout(r, 5));
const secondPub = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const secondTs = extractJson<{ item: { publishedAt: string } }>(secondPub).item.publishedAt;
expect(secondTs).toBeTruthy();
// Should be a new timestamp, not the old one.
expect(secondTs).not.toBe(firstTs);
});
});
// ---------------------------------------------------------------------------
// Bug #11: schema_create_collection supports default
// ---------------------------------------------------------------------------
describe("MCP schema_create_collection — supports default (bug #11)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creating a collection without `supports` uses documented default ['drafts', 'revisions']", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "article", label: "Articles" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const created = extractJson<{ supports: string[] }>(result);
// Bug #11: today this is [] or null. After fix: ['drafts', 'revisions'].
expect(created.supports).toEqual(expect.arrayContaining(["drafts", "revisions"]));
});
it("explicit empty supports array is preserved (regression guard — opt-out)", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "minimal", label: "Minimal", supports: [] },
});
expect(result.isError, extractText(result)).toBeFalsy();
const created = extractJson<{ supports: string[] }>(result);
expect(created.supports).toEqual([]);
});
it("explicit supports list is preserved exactly (regression guard)", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: {
slug: "blog",
label: "Blog",
supports: ["drafts", "revisions", "scheduling"],
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const created = extractJson<{ supports: string[] }>(result);
expect(created.supports.toSorted()).toEqual(["drafts", "revisions", "scheduling"].toSorted());
});
it("default-supports collection accepts publish/unpublish/revision flows immediately", async () => {
// Default supports should include drafts + revisions, so the standard
// publish/unpublish lifecycle should work without further config.
await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "story", label: "Stories" },
});
await harness.client.callTool({
name: "schema_create_field",
arguments: { collection: "story", slug: "title", label: "Title", type: "string" },
});
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "story", data: { title: "T" } },
});
expect(created.isError, extractText(created)).toBeFalsy();
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Update should create a draft revision (only meaningful if 'revisions' is in supports)
await harness.client.callTool({
name: "content_update",
arguments: { collection: "story", id, data: { title: "Updated" } },
});
const revs = await harness.client.callTool({
name: "revision_list",
arguments: { collection: "story", id },
});
// If supports doesn't include 'revisions', revision_list returns empty
// or fails. After fix: revisions exist.
expect(revs.isError, extractText(revs)).toBeFalsy();
const items = extractJson<{ items: unknown[] }>(revs).items;
expect(items.length).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,469 @@
/**
* MCP media tools — comprehensive integration tests.
*
* Covers:
* - media_list (incl. mimeType filter, pagination)
* - media_get
* - media_update (incl. ownership)
* - media_delete (incl. ownership)
*
* Plus regression for bug #14 (no media_upload tool gap) and bug #1
* variants for media (the MCP code already handles null authorId
* correctly for media — `media_update`/`media_delete` use `... || ""`,
* unlike content extraction).
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { MediaRepository } from "../../../src/database/repositories/media.js";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const EDITOR_ID = "user_editor";
const AUTHOR_ID = "user_author";
const OTHER_AUTHOR_ID = "user_other_author";
const SUBSCRIBER_ID = "user_subscriber";
async function seedMedia(
db: Kysely<Database>,
overrides: Partial<{
filename: string;
mimeType: string;
size: number;
authorId: string | null;
}> = {},
): Promise<string> {
const repo = new MediaRepository(db);
const item = await repo.create({
filename: overrides.filename ?? `file-${Math.random().toString(36).slice(2, 8)}.png`,
mimeType: overrides.mimeType ?? "image/png",
size: overrides.size ?? 1024,
storageKey: `media/${Math.random().toString(36).slice(2, 10)}`,
...(overrides.authorId !== null ? { authorId: overrides.authorId ?? ADMIN_ID } : {}),
});
return item.id;
}
// ---------------------------------------------------------------------------
// media_list
// ---------------------------------------------------------------------------
describe("media_list", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns empty list when no media exists", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const { items } = extractJson<{ items: unknown[] }>(result);
expect(items).toEqual([]);
});
it("lists all uploaded media", async () => {
await seedMedia(db);
await seedMedia(db);
await seedMedia(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_list",
arguments: {},
});
const { items } = extractJson<{ items: unknown[] }>(result);
expect(items).toHaveLength(3);
});
it("filters by mimeType prefix", async () => {
await seedMedia(db, { mimeType: "image/png" });
await seedMedia(db, { mimeType: "image/jpeg" });
await seedMedia(db, { mimeType: "application/pdf" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_list",
arguments: { mimeType: "image/" },
});
const { items } = extractJson<{ items: Array<{ mimeType: string }> }>(result);
expect(items).toHaveLength(2);
for (const item of items) {
expect(item.mimeType.startsWith("image/")).toBe(true);
}
});
it("paginates with cursor", async () => {
for (let i = 0; i < 5; i++) await seedMedia(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const page1 = await harness.client.callTool({
name: "media_list",
arguments: { limit: 2 },
});
const p1 = extractJson<{ items: Array<{ id: string }>; nextCursor?: string }>(page1);
expect(p1.items).toHaveLength(2);
expect(p1.nextCursor).toBeTruthy();
const page2 = await harness.client.callTool({
name: "media_list",
arguments: { limit: 2, cursor: p1.nextCursor },
});
const p2 = extractJson<{ items: Array<{ id: string }> }>(page2);
expect(p2.items).toHaveLength(2);
const p1Ids = p1.items.map((i) => i.id);
for (const item of p2.items) expect(p1Ids).not.toContain(item.id);
});
it("any logged-in user can list media", async () => {
await seedMedia(db);
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "media_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// media_get
// ---------------------------------------------------------------------------
describe("media_get", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns full media metadata", async () => {
const id = await seedMedia(db, {
filename: "logo.png",
mimeType: "image/png",
size: 2048,
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_get",
arguments: { id },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{
item: { id: string; filename: string; mimeType: string; size: number };
}>(result);
expect(data.item.id).toBe(id);
expect(data.item.filename).toBe("logo.png");
expect(data.item.mimeType).toBe("image/png");
expect(data.item.size).toBe(2048);
});
it("returns NOT_FOUND for missing id", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_get",
arguments: { id: "01NOTAMEDIAID" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("01NOTAMEDIAID");
});
it("any logged-in user can get media", async () => {
const id = await seedMedia(db);
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "media_get",
arguments: { id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// media_update
// ---------------------------------------------------------------------------
describe("media_update", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("updates alt text and caption", async () => {
const id = await seedMedia(db, { authorId: ADMIN_ID });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "Logo image", caption: "Brand logo" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ item: { alt: string; caption: string } }>(result);
expect(data.item.alt).toBe("Logo image");
expect(data.item.caption).toBe("Brand logo");
});
it("updates dimensions", async () => {
const id = await seedMedia(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, width: 1920, height: 1080 },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ item: { width: number; height: number } }>(result);
expect(data.item.width).toBe(1920);
expect(data.item.height).toBe(1080);
});
it("AUTHOR can update their own media", async () => {
const id = await seedMedia(db, { authorId: AUTHOR_ID });
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "Mine" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("AUTHOR cannot update another user's media", async () => {
const id = await seedMedia(db, { authorId: OTHER_AUTHOR_ID });
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "Theirs" },
});
expect(result.isError).toBe(true);
});
it("EDITOR can update any user's media", async () => {
const id = await seedMedia(db, { authorId: OTHER_AUTHOR_ID });
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "Editor override" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("ADMIN can update media with null authorId (regression: this works for media but not content — bug #1 inconsistency)", async () => {
const id = await seedMedia(db, { authorId: null });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "No author" },
});
// Already works correctly for media — confirms the fix path for
// content (use `... || ""` instead of throwing).
expect(result.isError, extractText(result)).toBeFalsy();
});
it("AUTHOR cannot update media with null authorId (no ownership claim)", async () => {
const id = await seedMedia(db, { authorId: null });
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id, alt: "Should fail" },
});
expect(result.isError).toBe(true);
});
it("returns NOT_FOUND-style error for missing id", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_update",
arguments: { id: "01NEVEREXISTED", alt: "x" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("01NEVEREXISTED");
});
});
// ---------------------------------------------------------------------------
// media_delete
// ---------------------------------------------------------------------------
describe("media_delete", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("deletes a media item", async () => {
const id = await seedMedia(db, { authorId: ADMIN_ID });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_delete",
arguments: { id },
});
expect(result.isError, extractText(result)).toBeFalsy();
// Verify it's gone
const got = await harness.client.callTool({
name: "media_get",
arguments: { id },
});
expect(got.isError).toBe(true);
});
it("AUTHOR cannot delete another user's media", async () => {
const id = await seedMedia(db, { authorId: OTHER_AUTHOR_ID });
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "media_delete",
arguments: { id },
});
expect(result.isError).toBe(true);
});
it("EDITOR can delete any user's media", async () => {
const id = await seedMedia(db, { authorId: OTHER_AUTHOR_ID });
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "media_delete",
arguments: { id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("returns NOT_FOUND for missing id", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "media_delete",
arguments: { id: "01NOPE" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("01NOPE");
});
it("delete is idempotent — second delete on same id returns NOT_FOUND, not crash", async () => {
const id = await seedMedia(db, { authorId: ADMIN_ID });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({ name: "media_delete", arguments: { id } });
const result = await harness.client.callTool({
name: "media_delete",
arguments: { id },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
});
});
// ---------------------------------------------------------------------------
// Bug #14 — gap: media_create tool is now available
// F1: media_create persists authorId so ownership checks subsequently succeed
// ---------------------------------------------------------------------------
describe("media_create (bug #14 / F1)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("MCP exposes media_create", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const tools = await harness.client.listTools();
const names = new Set(tools.tools.map((t) => t.name));
expect(names.has("media_create")).toBe(true);
});
it("AUTHOR creates media; subsequent media_get returns it; same author can update; different author cannot", async () => {
// AUTHOR creates the media item via media_create.
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const create = await harness.client.callTool({
name: "media_create",
arguments: {
filename: "logo.png",
mimeType: "image/png",
storageKey: "media/logo-key",
size: 4096,
},
});
expect(create.isError, extractText(create)).toBeFalsy();
const created = extractJson<{ item: { id: string; filename: string } }>(create);
expect(created.item.filename).toBe("logo.png");
// media_get returns the same id.
const got = await harness.client.callTool({
name: "media_get",
arguments: { id: created.item.id },
});
expect(got.isError, extractText(got)).toBeFalsy();
const fetched = extractJson<{ item: { id: string } }>(got);
expect(fetched.item.id).toBe(created.item.id);
// Same AUTHOR can update — proves authorId was persisted.
const ownUpdate = await harness.client.callTool({
name: "media_update",
arguments: { id: created.item.id, alt: "company logo" },
});
expect(ownUpdate.isError, extractText(ownUpdate)).toBeFalsy();
await harness.cleanup();
// A different AUTHOR is denied.
harness = await connectMcpHarness({
db,
userId: OTHER_AUTHOR_ID,
userRole: Role.AUTHOR,
});
const otherUpdate = await harness.client.callTool({
name: "media_update",
arguments: { id: created.item.id, alt: "intruder caption" },
});
expect(otherUpdate.isError).toBe(true);
expect(extractText(otherUpdate)).toMatch(/insufficient|permission|forbidden/i);
});
});

View File

@@ -0,0 +1,421 @@
/**
* MCP menu tools — comprehensive integration tests.
*
* Covers:
* - menu_list
* - menu_get
*
* Plus regression for bug #15 (no menu mutation tools — gap).
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const SUBSCRIBER_ID = "user_subscriber";
async function seedMenu(
db: Kysely<Database>,
name: string,
label: string,
items: Array<{
label: string;
url?: string;
sort_order?: number;
parent_id?: string | null;
}> = [],
): Promise<string> {
const menuId = ulid();
const now = new Date().toISOString();
await db
.insertInto("_emdash_menus" as never)
.values({ id: menuId, name, label, created_at: now, updated_at: now } as never)
.execute();
for (const [i, item] of items.entries()) {
await db
.insertInto("_emdash_menu_items" as never)
.values({
id: ulid(),
menu_id: menuId,
label: item.label,
custom_url: item.url ?? null,
type: "custom",
sort_order: item.sort_order ?? i,
parent_id: item.parent_id ?? null,
created_at: now,
} as never)
.execute();
}
return menuId;
}
// ---------------------------------------------------------------------------
// menu_list
// ---------------------------------------------------------------------------
describe("menu_list", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns empty list when no menus exist", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "menu_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson(result);
expect(Array.isArray(data) ? data : []).toEqual([]);
});
it("lists multiple menus in alphabetical order", async () => {
await seedMenu(db, "main", "Main Menu");
await seedMenu(db, "footer", "Footer");
await seedMenu(db, "sidebar", "Sidebar");
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "menu_list",
arguments: {},
});
const data = extractJson<Array<{ name: string; label: string }>>(result);
expect(data.map((m) => m.name)).toEqual(["footer", "main", "sidebar"]);
});
it("itemCount reflects per-menu item count (LEFT JOIN correctness)", async () => {
// handleMenuList uses a single LEFT JOIN + GROUP BY for the count.
// A regression to INNER JOIN would drop empty menus; a regression
// in the count column or join key would silently report wrong
// numbers per menu. Seed three menus with known, distinct counts.
await seedMenu(db, "empty", "Empty");
await seedMenu(db, "single", "Single", [{ label: "Home", url: "/" }]);
await seedMenu(db, "triple", "Triple", [
{ label: "Home", url: "/" },
{ label: "About", url: "/about" },
{ label: "Blog", url: "/blog" },
]);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({ name: "menu_list", arguments: {} });
const data = extractJson<Array<{ name: string; itemCount: number }>>(result);
const empty = data.find((m) => m.name === "empty");
const single = data.find((m) => m.name === "single");
const triple = data.find((m) => m.name === "triple");
expect(empty?.itemCount).toBe(0);
expect(single?.itemCount).toBe(1);
expect(triple?.itemCount).toBe(3);
// Empty menu must still be present — guards against an INNER JOIN
// regression where it would disappear.
expect(data.map((m) => m.name)).toContain("empty");
});
it("any logged-in user can list menus", async () => {
await seedMenu(db, "main", "Main");
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "menu_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// menu_get
// ---------------------------------------------------------------------------
describe("menu_get", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns menu with items in sort order", async () => {
await seedMenu(db, "main", "Main", [
{ label: "Home", url: "/", sort_order: 0 },
{ label: "Blog", url: "/blog", sort_order: 1 },
{ label: "About", url: "/about", sort_order: 2 },
]);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const menu = extractJson<{
name: string;
items: Array<{ label: string; sort_order: number }>;
}>(result);
expect(menu.name).toBe("main");
expect(menu.items).toHaveLength(3);
expect(menu.items.map((i) => i.label)).toEqual(["Home", "Blog", "About"]);
});
it("returns NOT_FOUND error for missing menu", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "menu_get",
arguments: { name: "ghost" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("ghost");
});
it("empty menu returns empty items array", async () => {
await seedMenu(db, "empty", "Empty Menu", []);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "menu_get",
arguments: { name: "empty" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const menu = extractJson<{ items: unknown[] }>(result);
expect(menu.items).toEqual([]);
});
it("any logged-in user can get a menu", async () => {
await seedMenu(db, "main", "Main", [{ label: "Home", url: "/" }]);
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// Bug #15 / F6 / F12 — happy paths for menu mutation tools.
// ---------------------------------------------------------------------------
describe("menu mutations (bug #15 / F6 / F12)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("MCP exposes menu_create, menu_update, menu_set_items, menu_delete", async () => {
const tools = await harness.client.listTools();
const names = new Set(tools.tools.map((t) => t.name));
expect(names.has("menu_create")).toBe(true);
expect(names.has("menu_update")).toBe(true);
expect(names.has("menu_set_items")).toBe(true);
expect(names.has("menu_delete")).toBe(true);
});
it("menu_create + menu_get round-trip", async () => {
const create = await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main Menu" },
});
expect(create.isError, extractText(create)).toBeFalsy();
const get = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
expect(get.isError, extractText(get)).toBeFalsy();
const menu = extractJson<{ name: string; label: string; items: unknown[] }>(get);
expect(menu.name).toBe("main");
expect(menu.label).toBe("Main Menu");
expect(menu.items).toEqual([]);
});
it("menu_create with a duplicate name returns CONFLICT", async () => {
await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main" },
});
const dup = await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Other" },
});
expect(dup.isError).toBe(true);
expect(extractText(dup)).toMatch(/CONFLICT|already exists/i);
});
it("menu_update changes the label", async () => {
await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Original" },
});
const update = await harness.client.callTool({
name: "menu_update",
arguments: { name: "main", label: "Renamed" },
});
expect(update.isError, extractText(update)).toBeFalsy();
const get = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
const menu = extractJson<{ label: string }>(get);
expect(menu.label).toBe("Renamed");
});
it("menu_set_items with empty list clears all items", async () => {
await seedMenu(db, "main", "Main", [
{ label: "Home", url: "/" },
{ label: "Blog", url: "/blog" },
]);
const result = await harness.client.callTool({
name: "menu_set_items",
arguments: { name: "main", items: [] },
});
expect(result.isError, extractText(result)).toBeFalsy();
const get = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
const menu = extractJson<{ items: unknown[] }>(get);
expect(menu.items).toEqual([]);
});
it("menu_set_items supports 3-level nesting via parentIndex chain", async () => {
await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main" },
});
const result = await harness.client.callTool({
name: "menu_set_items",
arguments: {
name: "main",
items: [
{ label: "Root", type: "custom", customUrl: "/" },
{ label: "Child", type: "custom", customUrl: "/child", parentIndex: 0 },
{ label: "Grandchild", type: "custom", customUrl: "/gc", parentIndex: 1 },
],
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const get = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
const menu = extractJson<{
items: Array<{ id: string; label: string; parent_id: string | null; sort_order: number }>;
}>(get);
expect(menu.items).toHaveLength(3);
const byLabel = new Map(menu.items.map((i) => [i.label, i]));
const root = byLabel.get("Root");
const child = byLabel.get("Child");
const grand = byLabel.get("Grandchild");
expect(root?.parent_id).toBeNull();
expect(child?.parent_id).toBe(root?.id);
expect(grand?.parent_id).toBe(child?.id);
});
it("menu_set_items rejects parentIndex >= i (must be earlier)", async () => {
await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main" },
});
const result = await harness.client.callTool({
name: "menu_set_items",
arguments: {
name: "main",
items: [
{ label: "A", type: "custom", customUrl: "/a", parentIndex: 0 }, // self-ref
],
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/VALIDATION_ERROR|parentIndex/);
});
it("F6: menu_delete removes both menu and items (D1 cascade safe)", async () => {
await harness.client.callTool({
name: "menu_create",
arguments: { name: "main", label: "Main" },
});
await harness.client.callTool({
name: "menu_set_items",
arguments: {
name: "main",
items: [
{ label: "A", type: "custom", customUrl: "/a" },
{ label: "B", type: "custom", customUrl: "/b" },
{ label: "C", type: "custom", customUrl: "/c" },
],
},
});
// Sanity: menu_get sees 3 items.
const before = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
const menuBefore = extractJson<{
id: string;
items: unknown[];
}>(before);
expect(menuBefore.items).toHaveLength(3);
// Delete.
const del = await harness.client.callTool({
name: "menu_delete",
arguments: { name: "main" },
});
expect(del.isError, extractText(del)).toBeFalsy();
// Items table is empty for that menu_id.
const orphans = await db
.selectFrom("_emdash_menu_items" as never)
.select(["id" as never])
.where("menu_id" as never, "=", menuBefore.id as never)
.execute();
expect(orphans).toEqual([]);
// menu_get returns NOT_FOUND.
const after = await harness.client.callTool({
name: "menu_get",
arguments: { name: "main" },
});
expect(after.isError).toBe(true);
expect(extractText(after)).toMatch(/NOT_FOUND/);
});
});

View File

@@ -0,0 +1,331 @@
/**
* MCP ownership / authorization integration tests.
*
* The MCP server's `extractContentAuthorId()` returns "" (empty string)
* for content with null authorId — mirroring the REST handler. Then
* `canActOnOwn(user, "", own, any)` defers to the "any" permission so
* EDITOR+ can edit seed-imported content while CONTRIBUTOR/AUTHOR are
* denied with a clean permission error.
*
* These tests cover every permutation of role × ownership × null-author.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { connectMcpHarness, extractText, type McpHarness } from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const EDITOR_ID = "user_editor";
const AUTHOR_ID = "user_author";
const CONTRIBUTOR_ID = "user_contributor";
const NULL_AUTHOR_ERROR = /no.*authorId|content has no authorId/i;
describe("MCP ownership — null authorId (bug #1)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
async function seedItemWithAuthor(authorId: string | null): Promise<string> {
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "Seeded Post" },
slug: `seeded-${Math.random().toString(36).slice(2, 8)}`,
status: "published",
...(authorId !== null ? { authorId } : {}),
});
return item.id;
}
async function connect(role: keyof typeof userIdByRole): Promise<void> {
harness = await connectMcpHarness({
db,
userId: userIdByRole[role],
userRole: roleByName[role],
});
}
const userIdByRole = {
admin: ADMIN_ID,
editor: EDITOR_ID,
author: AUTHOR_ID,
contributor: CONTRIBUTOR_ID,
} as const;
const roleByName = {
admin: Role.ADMIN,
editor: Role.EDITOR,
author: Role.AUTHOR,
contributor: Role.CONTRIBUTOR,
} as const;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
// ----- content_update -----
describe("content_update", () => {
it("ADMIN can update content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("admin");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Updated by admin" } },
});
// Currently fails with NULL_AUTHOR_ERROR. After fix: succeeds.
expect(result.isError, extractText(result)).toBeFalsy();
});
it("EDITOR can update content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("editor");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Updated by editor" } },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("AUTHOR cannot update content with null authorId (no ownership claim)", async () => {
const id = await seedItemWithAuthor(null);
await connect("author");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Should fail" } },
});
// AUTHOR has only content:edit_own — without an authorId match,
// they have no "own" claim and lack content:edit_any.
expect(result.isError).toBe(true);
// Negative: NOT the null-author internal error.
expect(extractText(result)).not.toMatch(NULL_AUTHOR_ERROR);
// Positive: clean permission error with the structured code.
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
it("CONTRIBUTOR cannot update content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("contributor");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Should fail" } },
});
expect(result.isError).toBe(true);
expect(extractText(result)).not.toMatch(NULL_AUTHOR_ERROR);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
});
// ----- content_delete -----
describe("content_delete (trash)", () => {
it("ADMIN can trash content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("admin");
const result = await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("AUTHOR cannot trash content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("author");
const result = await harness.client.callTool({
name: "content_delete",
arguments: { collection: "post", id },
});
expect(result.isError).toBe(true);
expect(extractText(result)).not.toMatch(NULL_AUTHOR_ERROR);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
});
// ----- content_publish / content_unpublish -----
describe("publish / unpublish", () => {
it("ADMIN can publish content with null authorId", async () => {
// Create as draft so publish is meaningful
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "Draft" },
slug: "draft-null-author",
status: "draft",
});
await connect("admin");
const result = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id: item.id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("ADMIN can unpublish content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
await connect("admin");
const result = await harness.client.callTool({
name: "content_unpublish",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ----- content_schedule -----
describe("content_schedule", () => {
it("ADMIN can schedule content with null authorId", async () => {
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "Sched draft" },
slug: "sched-null-author",
status: "draft",
});
await connect("admin");
const future = new Date(Date.now() + 60 * 60 * 1000).toISOString();
const result = await harness.client.callTool({
name: "content_schedule",
arguments: { collection: "post", id: item.id, scheduledAt: future },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ----- content_restore (from trash) -----
describe("content_restore", () => {
it("ADMIN can restore trashed content with null authorId", async () => {
const id = await seedItemWithAuthor(null);
// Trash via repo to bypass MCP (which we're testing)
const repo = new ContentRepository(db);
await repo.delete("post", id);
await connect("admin");
const result = await harness.client.callTool({
name: "content_restore",
arguments: { collection: "post", id },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ----- Sanity checks: ownership behavior unchanged for non-null cases -----
describe("regression guard — ownership still enforced when authorId is set", () => {
it("AUTHOR can update their own content (authorId matches)", async () => {
const id = await seedItemWithAuthor(AUTHOR_ID);
await connect("author");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Updated own" } },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("AUTHOR cannot update someone else's content (authorId set to other user)", async () => {
const id = await seedItemWithAuthor("user_someone_else");
await connect("author");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Updated other" } },
});
expect(result.isError).toBe(true);
});
it("EDITOR can update anyone's content (any-permission)", async () => {
const id = await seedItemWithAuthor("user_someone_else");
await connect("editor");
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "Editor override" } },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
});
describe("MCP ownership — error shape consistency", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("denied-by-permissions error does NOT mention 'authorId' (internal detail)", async () => {
const repo = new ContentRepository(db);
const item = await repo.create({
type: "post",
data: { title: "Test" },
slug: "perm-test",
status: "published",
});
harness = await connectMcpHarness({
db,
userId: AUTHOR_ID,
userRole: Role.AUTHOR,
});
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id: item.id, data: { title: "Nope" } },
});
expect(result.isError).toBe(true);
// Negative: "authorId" is an internal column name and must not leak
// to the user-facing message.
expect(extractText(result)).not.toMatch(/authorId/);
// Positive: the response carries a permissions code so callers can
// distinguish "you can't do this" from any other failure mode.
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
});

View File

@@ -0,0 +1,264 @@
/**
* MCP pagination / cursor tests.
*
* Malformed cursors must produce a structured `INVALID_CURSOR` error
* instead of silently returning the first page (the latter would let UI
* pagination bugs re-fetch the whole table without any signal).
*
* `decodeCursor()` throws `InvalidCursorError` on invalid input; handler
* catches translate that to `INVALID_CURSOR`. The MCP boundary also
* applies `z.string().min(1).max(2048)` to reject obvious DoS attempts
* before they reach the decoder.
*
* Tests cover the MCP-visible list surface: content_list,
* content_list_trashed, media_list, revision_list, taxonomy_list_terms.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const INVALID_CURSOR = /cursor|invalid|malformed/i;
async function seedPosts(db: Kysely<Database>, count: number, prefix = "post"): Promise<string[]> {
const repo = new ContentRepository(db);
const ids: string[] = [];
for (let i = 0; i < count; i++) {
const item = await repo.create({
type: "post",
data: { title: `${prefix} ${i}` },
slug: `${prefix}-${i}`,
status: "draft",
authorId: ADMIN_ID,
});
ids.push(item.id);
}
return ids;
}
describe("MCP cursor pagination — content_list (bug #12)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects garbage cursor with a structured error (does NOT silently return first page)", async () => {
await seedPosts(db, 5);
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", cursor: "obviously-malformed-cursor" },
});
// Currently: returns the full first page.
// After fix: returns isError with INVALID_CURSOR-style message.
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(INVALID_CURSOR);
});
it("rejects empty-string cursor with a structured error", async () => {
await seedPosts(db, 5);
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", cursor: "" },
});
// An empty cursor is unambiguously invalid — should error rather
// than silently treating it as "no cursor".
expect(result.isError).toBe(true);
});
it("rejects base64-decodable but structurally-wrong cursor", async () => {
await seedPosts(db, 5);
// Valid base64 but doesn't match the expected `{orderValue, id}` shape.
const bogus = Buffer.from(JSON.stringify({ wrong: "shape" })).toString("base64");
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", cursor: bogus },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(INVALID_CURSOR);
});
it("rejects cursor with non-string id field", async () => {
await seedPosts(db, 5);
const bogus = Buffer.from(JSON.stringify({ orderValue: "x", id: 42 })).toString("base64");
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", cursor: bogus },
});
expect(result.isError).toBe(true);
});
it("valid cursor returns the correct next page (regression guard)", async () => {
await seedPosts(db, 5);
const first = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 2 },
});
const firstData = extractJson<{
items: Array<{ id: string }>;
nextCursor?: string;
}>(first);
expect(firstData.items).toHaveLength(2);
expect(firstData.nextCursor).toBeTruthy();
const second = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 2, cursor: firstData.nextCursor },
});
const secondData = extractJson<{
items: Array<{ id: string }>;
}>(second);
expect(secondData.items).toHaveLength(2);
// Different ids than the first page
const firstIds = firstData.items.map((i) => i.id);
const secondIds = secondData.items.map((i) => i.id);
for (const id of secondIds) {
expect(firstIds).not.toContain(id);
}
});
it("rejects oversized cursor without attempting to decode it (DoS guard)", async () => {
await seedPosts(db, 3);
// Cursors we issue are well under 200 chars. A multi-KB cursor is
// almost certainly an attacker probing the base64 decoder. The
// MCP input schema caps cursors at 2048 chars; this test forces a
// rejection at the schema boundary rather than letting the
// decoder allocate against a giant string.
const huge = "A".repeat(10_000);
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", cursor: huge },
});
expect(result.isError).toBe(true);
});
it("malformed cursor on second page does not skip back to start", async () => {
await seedPosts(db, 5);
const first = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 2 },
});
const firstData = extractJson<{ items: Array<{ id: string }>; nextCursor?: string }>(first);
// Tamper with the cursor — change one character
const tampered = firstData.nextCursor ? firstData.nextCursor.slice(0, -1) + "X" : "garbage";
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 2, cursor: tampered },
});
// Bug today: returns first page again (callers re-process duplicates).
// After fix: errors so callers can detect the bug.
expect(result.isError).toBe(true);
});
});
describe("MCP cursor pagination — other list tools (bug #12 propagation)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("content_list_trashed rejects malformed cursor", async () => {
const ids = await seedPosts(db, 3);
const repo = new ContentRepository(db);
for (const id of ids) await repo.delete("post", id);
const result = await harness.client.callTool({
name: "content_list_trashed",
arguments: { collection: "post", cursor: "garbage" },
});
expect(result.isError).toBe(true);
});
// (revision_list cursor test deleted: the tool's input schema doesn't
// declare a cursor parameter, and Zod's default behavior is to drop
// unknown keys silently — so the previous "expect(result).toBeDefined()"
// was meaningless. Forcing schema strict-mode is out of scope.)
it("media_list rejects malformed cursor", async () => {
const result = await harness.client.callTool({
name: "media_list",
arguments: { cursor: "garbage" },
});
expect(result.isError).toBe(true);
});
});
describe("MCP cursor pagination — limit clamping (regression guard)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("limit beyond max is clamped, not rejected", async () => {
await seedPosts(db, 3);
// Per AGENTS.md: max limit is 100. Higher should be clamped, not error.
const result = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post", limit: 1000 },
});
// Either Zod rejects via inputSchema (also fine) or the handler clamps.
// Both are valid; what's NOT valid is silently honoring 1000 against
// a real backend.
if (result.isError) {
// Rejection branch: must be a validation error, not a generic 500.
expect(extractText(result)).toMatch(/limit|max|exceed|invalid/i);
} else {
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items.length).toBeLessThanOrEqual(100);
}
});
});

View File

@@ -0,0 +1,560 @@
/**
* MCP content_publish + content_update field-coverage tests.
*
* Pins the contracts for:
*
* - **#622** `content_publish` accepts an optional `publishedAt` ISO 8601
* datetime that overrides the publication timestamp. The behavior is
* gated on `content:publish_any` because backdating overwrites historical
* record. Without `publishedAt`, idempotent re-publish preserves the
* existing timestamp (regression guard for the COALESCE behavior).
*
* - **#621** `content_update` persists `seo`, `bylines`, and `publishedAt`
* alongside field updates. The MCP tool exposes the same fields the REST
* API has accepted since #777; before this PR the tool's input schema
* silently dropped them.
*
* Failure modes covered:
* - non-admin (AUTHOR) trying to set `publishedAt` -> INSUFFICIENT_PERMISSIONS
* - SEO on a collection that doesn't have SEO enabled -> VALIDATION_ERROR
* - bylines pointing at a non-existent byline ID -> handler-level FK error
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { BylineRepository } from "../../../src/database/repositories/byline.js";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
isErrorResult,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const AUTHOR_ID = "user_author";
// ---------------------------------------------------------------------------
// content_publish — publishedAt override (#622)
// ---------------------------------------------------------------------------
describe("MCP content_publish — publishedAt override (#622)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("backdates publishedAt when caller passes an explicit ISO timestamp", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Imported post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const PAST = "2020-01-15T10:00:00.000Z";
const result = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id, publishedAt: PAST },
});
expect(result.isError, extractText(result)).toBeFalsy();
const item = extractJson<{ item: { publishedAt: string | null; status: string } }>(result).item;
expect(item.status).toBe("published");
// Repository normalizes to ISO so we compare via Date round-trip.
expect(new Date(item.publishedAt!).toISOString()).toBe(PAST);
});
it("re-publishing with a new publishedAt overwrites the previous timestamp", async () => {
// First publish without an override — gets a current timestamp.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const first = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const firstTs = extractJson<{ item: { publishedAt: string } }>(first).item.publishedAt;
expect(firstTs).toBeTruthy();
// Re-publish with explicit override — should overwrite.
const PAST = "2019-06-01T00:00:00.000Z";
const second = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id, publishedAt: PAST },
});
const secondItem = extractJson<{ item: { publishedAt: string | null } }>(second).item;
expect(new Date(secondItem.publishedAt!).toISOString()).toBe(PAST);
expect(secondItem.publishedAt).not.toBe(firstTs);
});
it("rejects non-ISO-8601 publishedAt at the schema layer", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const result = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id, publishedAt: "yesterday" },
});
// Schema validation produces an isError envelope. We assert the schema's
// own message wording — not just that the field name appears anywhere
// (which would let an echoed input or stack trace satisfy the test for
// the wrong reason).
expect(isErrorResult(result)).toBe(true);
expect(extractText(result)).toContain("must be an ISO 8601 datetime");
});
it("accepts ISO 8601 with explicit timezone offset (offset: true)", async () => {
// Positive companion to the rejection test: pins that the schema's
// `offset: true` actually accepts non-Z offsets, not just Z.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const result = await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id,
publishedAt: "2020-01-15T10:00:00+05:30",
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const item = extractJson<{ item: { publishedAt: string | null } }>(result).item;
// Date round-trip normalizes the offset to UTC.
expect(new Date(item.publishedAt!).toISOString()).toBe(
new Date("2020-01-15T10:00:00+05:30").toISOString(),
);
});
it("requires content:publish_any to set publishedAt — AUTHOR (owner) is denied", async () => {
// Switch to AUTHOR role: AUTHOR has publish_own but NOT publish_any.
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Author's post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Plain publish (no publishedAt) — AUTHOR can do this for their own item.
const ok = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
expect(ok.isError, extractText(ok)).toBeFalsy();
// Publish with backdated publishedAt — AUTHOR is denied even on their
// own item, because backdating overwrites historical record.
const denied = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id, publishedAt: "2020-01-01T00:00:00.000Z" },
});
expect(isErrorResult(denied)).toBe(true);
expect(extractText(denied)).toContain("INSUFFICIENT_PERMISSIONS");
expect(extractText(denied).toLowerCase()).toContain("publish_any");
});
it("AUTHOR cannot publish someone else's item with publishedAt (ownership denies first)", async () => {
// First create as ADMIN so the item belongs to a different user.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Admin's post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Switch to AUTHOR — now they're not the owner.
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const denied = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id, publishedAt: "2020-01-01T00:00:00.000Z" },
});
// Whichever check fires first (ownership or publishedAt gate), the
// denial is the correct outcome. We pin the structural failure shape,
// not the specific code, because either order is correct.
expect(isErrorResult(denied)).toBe(true);
expect(extractText(denied)).toContain("INSUFFICIENT_PERMISSIONS");
});
it("idempotent re-publish without publishedAt preserves the original timestamp", async () => {
// Regression guard: the COALESCE preserve-on-re-publish behavior
// shouldn't change just because the repo signature now accepts an
// optional override.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const first = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const firstTs = extractJson<{ item: { publishedAt: string } }>(first).item.publishedAt;
// Wait so a regression that always uses `now` would surface as a new ts.
await new Promise((r) => setTimeout(r, 5));
const second = await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const secondTs = extractJson<{ item: { publishedAt: string } }>(second).item.publishedAt;
expect(secondTs).toBe(firstTs);
});
});
// ---------------------------------------------------------------------------
// content_update — seo / bylines / publishedAt (#621)
// ---------------------------------------------------------------------------
describe("MCP content_update — seo / bylines / publishedAt (#621)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
let bylineId: string;
let bylineId2: string;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
// Enable SEO on the post collection (mirrors integration/seo/seo.test.ts).
await db
.updateTable("_emdash_collections")
.set({ has_seo: 1 })
.where("slug", "=", "post")
.execute();
// Pre-create two bylines so we can attach them via content_update.
const bylineRepo = new BylineRepository(db);
const b1 = await bylineRepo.create({
slug: "jane-doe",
displayName: "Jane Doe",
isGuest: false,
});
const b2 = await bylineRepo.create({
slug: "john-smith",
displayName: "John Smith",
isGuest: false,
});
bylineId = b1.id;
bylineId2 = b2.id;
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects SEO canonical URL with non-http scheme (XSS guard)", async () => {
// Pins that the MCP `content_update.seo` schema reuses the REST
// `contentSeoInput` schema, which validates `canonical` through
// `httpUrl` (rejects javascript:/data: URIs that would otherwise
// become stored XSS in the rendered <link rel="canonical">).
// A regression that swapped this back to a plain `z.string()` would
// silently accept the malicious URL and persist it.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const result = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
seo: { canonical: "javascript:alert(1)" },
},
});
expect(isErrorResult(result)).toBe(true);
});
it("persists SEO fields passed to content_update", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const updated = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
seo: {
title: "SEO Title",
description: "SEO description goes here.",
noIndex: true,
},
},
});
expect(updated.isError, extractText(updated)).toBeFalsy();
// Round-trip via content_get — confirms persistence, not just the
// echo from the update response.
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: {
seo?: {
title: string | null;
description: string | null;
noIndex: boolean;
};
};
}>(got).item;
expect(item.seo?.title).toBe("SEO Title");
expect(item.seo?.description).toBe("SEO description goes here.");
expect(item.seo?.noIndex).toBe(true);
});
it("persists bylines passed to content_update and sets primary byline", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const updated = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
bylines: [
{ bylineId, roleLabel: "Author" },
{ bylineId: bylineId2, roleLabel: "Editor" },
],
},
});
expect(updated.isError, extractText(updated)).toBeFalsy();
// Round-trip via content_get rather than relying on the update response
// echoing the input — confirms persistence rather than just the in-memory
// pass-through. (A regression that silently dropped the DB write but
// echoed the byline list in the response would still pass an
// update-response-only assertion.)
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: {
primaryBylineId: string | null;
bylines?: Array<{ byline: { id: string }; roleLabel: string | null }>;
};
}>(got).item;
// First entry becomes the primary byline.
expect(item.primaryBylineId).toBe(bylineId);
expect(item.bylines).toHaveLength(2);
expect(item.bylines?.[0]?.byline.id).toBe(bylineId);
expect(item.bylines?.[0]?.roleLabel).toBe("Author");
expect(item.bylines?.[1]?.byline.id).toBe(bylineId2);
});
it("backdates publishedAt when content_update receives one", async () => {
// Publish first (so the item has a published_at to overwrite).
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const PAST = "2018-03-15T12:00:00.000Z";
const updated = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, publishedAt: PAST },
});
expect(updated.isError, extractText(updated)).toBeFalsy();
// Round-trip via content_get to confirm persistence.
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{ item: { publishedAt: string | null } }>(got).item;
expect(new Date(item.publishedAt!).toISOString()).toBe(PAST);
});
it("AUTHOR (owner) cannot set publishedAt via content_update", async () => {
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Author's post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// AUTHOR owns this item, so ownership passes — the publishedAt gate
// fires next and denies. This pins that the gate fires regardless of
// ownership (backdating overwrites historical record).
const denied = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
publishedAt: "2020-01-01T00:00:00.000Z",
},
});
expect(isErrorResult(denied)).toBe(true);
expect(extractText(denied)).toContain("INSUFFICIENT_PERMISSIONS");
expect(extractText(denied).toLowerCase()).toContain("publish_any");
});
it("AUTHOR cannot set publishedAt on someone else's item via content_update", async () => {
// Create as ADMIN so the item belongs to someone else.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Admin's post" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
// Switch to AUTHOR — now they're not the owner.
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const denied = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
publishedAt: "2020-01-01T00:00:00.000Z",
},
});
// Either ownership or the publishedAt gate denies — whichever fires
// first. Both produce INSUFFICIENT_PERMISSIONS so the cross-product is
// pinned without depending on check order.
expect(isErrorResult(denied)).toBe(true);
expect(extractText(denied)).toContain("INSUFFICIENT_PERMISSIONS");
});
it("rejects SEO on a collection without SEO enabled", async () => {
// page collection from the test fixture does NOT have SEO.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "page", data: { title: "Page" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const result = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "page",
id,
seo: { title: "Should fail" },
},
});
expect(isErrorResult(result)).toBe(true);
expect(extractText(result)).toContain("VALIDATION_ERROR");
});
it("content_update with status='published' + publishedAt publishes AND backdates", async () => {
// Pins the interaction between the status='published' branch and the
// publishedAt override. The branch calls handleContentUpdate (which
// writes published_at to the column) and then handleContentPublish
// (which preserves the column via COALESCE). If either side regresses,
// the backdated timestamp won't survive the publish.
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
const PAST = "2017-04-20T00:00:00.000Z";
const result = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
status: "published",
publishedAt: PAST,
},
});
expect(result.isError, extractText(result)).toBeFalsy();
// Round-trip via content_get to confirm both status AND backdated
// timestamp landed.
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{ item: { status: string; publishedAt: string | null } }>(got).item;
expect(item.status).toBe("published");
expect(new Date(item.publishedAt!).toISOString()).toBe(PAST);
});
it("seo / bylines / publishedAt and field updates apply atomically", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Original" } },
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const PAST = "2021-06-01T00:00:00.000Z";
const updated = await harness.client.callTool({
name: "content_update",
arguments: {
collection: "post",
id,
data: { title: "Updated" },
seo: { title: "SEO" },
bylines: [{ bylineId }],
publishedAt: PAST,
},
});
expect(updated.isError, extractText(updated)).toBeFalsy();
const got = await harness.client.callTool({
name: "content_get",
arguments: { collection: "post", id },
});
const item = extractJson<{
item: {
data: { title?: string };
publishedAt: string | null;
primaryBylineId: string | null;
seo?: { title: string | null };
};
}>(got).item;
// All four updates landed.
expect(item.data.title).toBe("Updated");
expect(item.seo?.title).toBe("SEO");
expect(item.primaryBylineId).toBe(bylineId);
expect(new Date(item.publishedAt!).toISOString()).toBe(PAST);
});
});

View File

@@ -0,0 +1,712 @@
/**
* MCP schema tools — comprehensive integration tests.
*
* Covers every branch of:
* - schema_list_collections
* - schema_get_collection
* - schema_create_collection (also bug #11 — supports default)
* - schema_delete_collection
* - schema_create_field
* - schema_delete_field
*
* For each tool: happy path, edge cases (empty, missing, duplicate,
* reserved names), permission gates, error envelope (bug #3 — currently
* generic). Where the omnibus fix is expected to introduce structured
* errors, the assertions name the specific failure mode so they fail
* usefully today.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const EDITOR_ID = "user_editor";
const AUTHOR_ID = "user_author";
const VALIDATION_ERROR = /validation|invalid|reserved|pattern|format|required/i;
// ---------------------------------------------------------------------------
// schema_list_collections
// ---------------------------------------------------------------------------
describe("schema_list_collections", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns empty list when no collections exist", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const { items } = extractJson<{ items: unknown[] }>(result);
expect(items).toEqual([]);
});
it("lists multiple collections in stable order", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createCollection({ slug: "page", label: "Pages" });
await registry.createCollection({ slug: "product", label: "Products" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
const { items } = extractJson<{ items: Array<{ slug: string }> }>(result);
const slugs = items.map((c) => c.slug).toSorted();
expect(slugs).toEqual(["page", "post", "product"]);
});
it("requires EDITOR or higher", async () => {
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
expect(result.isError).toBe(true);
});
it("EDITOR can list collections", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// schema_get_collection
// ---------------------------------------------------------------------------
describe("schema_get_collection", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
supports: ["drafts", "revisions"],
});
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
await registry.createField("post", { slug: "body", label: "Body", type: "text" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns collection with its fields", async () => {
const result = await harness.client.callTool({
name: "schema_get_collection",
arguments: { slug: "post" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const collection = extractJson<{
slug: string;
label: string;
labelSingular?: string;
supports: string[];
fields: Array<{ slug: string; type: string }>;
}>(result);
expect(collection.slug).toBe("post");
expect(collection.label).toBe("Posts");
expect(collection.labelSingular).toBe("Post");
expect(collection.supports).toEqual(expect.arrayContaining(["drafts", "revisions"]));
const fieldSlugs = collection.fields.map((f) => f.slug).toSorted();
expect(fieldSlugs).toEqual(["body", "title"]);
});
it("returns NOT_FOUND-style error for missing collection", async () => {
const result = await harness.client.callTool({
name: "schema_get_collection",
arguments: { slug: "nonexistent" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/COLLECTION_NOT_FOUND|\bnot found\b/i);
expect(extractText(result)).toContain("nonexistent");
});
it("requires EDITOR or higher", async () => {
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "schema_get_collection",
arguments: { slug: "post" },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// schema_create_collection
// ---------------------------------------------------------------------------
describe("schema_create_collection", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creates a collection with minimal arguments", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "article", label: "Articles" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const created = extractJson<{ slug: string; label: string }>(result);
expect(created.slug).toBe("article");
expect(created.label).toBe("Articles");
});
it("creates with all optional fields", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: {
slug: "story",
label: "Stories",
labelSingular: "Story",
description: "A story collection",
icon: "book",
supports: ["drafts", "revisions", "scheduling"],
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const created = extractJson<{
slug: string;
label: string;
labelSingular?: string;
description?: string;
icon?: string;
supports: string[];
}>(result);
expect(created.labelSingular).toBe("Story");
expect(created.description).toBe("A story collection");
expect(created.icon).toBe("book");
expect(created.supports.toSorted()).toEqual(["drafts", "revisions", "scheduling"].toSorted());
});
it("rejects slug that doesn't match the collection slug pattern", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "Has-Caps", label: "Bad" },
});
expect(result.isError).toBe(true);
});
it("rejects slug starting with a number", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "1posts", label: "Posts" },
});
expect(result.isError).toBe(true);
});
it("rejects empty slug", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "", label: "Empty" },
});
expect(result.isError).toBe(true);
});
it("rejects duplicate slug", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "post", label: "Posts" },
});
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "post", label: "Posts Two" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/exist|duplicate|conflict|already/i);
});
it("rejects reserved slug like 'media' or 'options'", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// `options` is a reserved table name
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "options", label: "Options" },
});
expect(result.isError).toBe(true);
});
it("requires ADMIN role (EDITOR is blocked)", async () => {
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "blocked", label: "Blocked" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/permission|insufficient/i);
});
it("accepts SQL-injection attempt as a normal slug rejection (regression)", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "drop_tables); --", label: "x" },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// schema_delete_collection
// ---------------------------------------------------------------------------
describe("schema_delete_collection", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("deletes an empty collection", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_delete_collection",
arguments: { slug: "post" },
});
expect(result.isError, extractText(result)).toBeFalsy();
// Verify it's gone
const list = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
const { items } = extractJson<{ items: Array<{ slug: string }> }>(list);
expect(items.find((c) => c.slug === "post")).toBeUndefined();
});
it("rejects deleting a collection with content unless force is true", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "A" } },
});
const result = await harness.client.callTool({
name: "schema_delete_collection",
arguments: { slug: "post" },
});
expect(result.isError).toBe(true);
// Tight: the error must say "has content" and tell the caller how
// to override (force: true). Loose word matches like /empty|content/
// passed against unrelated 500s, hiding regressions.
const text = extractText(result);
expect(text).toMatch(/has content/i);
expect(text).toContain("force: true");
});
it("force deletes a collection with content", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "A" } },
});
const result = await harness.client.callTool({
name: "schema_delete_collection",
arguments: { slug: "post", force: true },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("returns clear NOT_FOUND error for missing collection", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "schema_delete_collection",
arguments: { slug: "nonexistent" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/COLLECTION_NOT_FOUND|\bnot found\b/i);
expect(extractText(result)).toContain("nonexistent");
});
it("requires ADMIN role", async () => {
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "schema_delete_collection",
arguments: { slug: "post" },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// schema_create_field
// ---------------------------------------------------------------------------
describe("schema_create_field", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creates a string field with minimal args", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: { collection: "post", slug: "title", label: "Title", type: "string" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const field = extractJson<{ slug: string; type: string; required?: boolean }>(result);
expect(field.slug).toBe("title");
expect(field.type).toBe("string");
});
it.each([
["text", "f_text"],
["number", "f_number"],
["integer", "f_integer"],
["boolean", "f_bool"],
["datetime", "f_dt"],
["portableText", "f_portable_text"],
["json", "f_json"],
["slug", "f_slug"],
])("creates a %s field", async (fieldType, slug) => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: { collection: "post", slug, label: fieldType, type: fieldType },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("creates a select field with options", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "priority",
label: "Priority",
type: "select",
validation: { options: ["low", "high"] },
},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("creates a reference field with target collection", async () => {
await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "page", label: "Pages" },
});
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "parent",
label: "Parent",
type: "reference",
options: { collection: "page" },
},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects field slug not matching the slug pattern", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "Has-Caps",
label: "Bad",
type: "string",
},
});
expect(result.isError).toBe(true);
});
it("rejects duplicate field slug on the same collection", async () => {
await harness.client.callTool({
name: "schema_create_field",
arguments: { collection: "post", slug: "title", label: "Title", type: "string" },
});
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: { collection: "post", slug: "title", label: "Title v2", type: "string" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/exist|duplicate|already/i);
});
it("rejects field on non-existent collection", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "ghost",
slug: "title",
label: "Title",
type: "string",
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/COLLECTION_NOT_FOUND|\bnot found\b/i);
expect(extractText(result)).toContain("ghost");
});
it("rejects field type not in the enum", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "weird",
label: "Weird",
type: "not_a_real_type",
},
});
expect(result.isError).toBe(true);
});
it("rejects reserved field slug like 'id' or 'created_at'", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "id",
label: "ID",
type: "string",
},
});
expect(result.isError).toBe(true);
});
it("requires ADMIN role", async () => {
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "title",
label: "Title",
type: "string",
},
});
expect(result.isError).toBe(true);
});
it("required field is reflected in the response", async () => {
const result = await harness.client.callTool({
name: "schema_create_field",
arguments: {
collection: "post",
slug: "title",
label: "Title",
type: "string",
required: true,
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const field = extractJson<{ required?: boolean }>(result);
expect(field.required).toBe(true);
});
});
// ---------------------------------------------------------------------------
// schema_delete_field
// ---------------------------------------------------------------------------
describe("schema_delete_field", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createField("post", { slug: "title", label: "Title", type: "string" });
await registry.createField("post", { slug: "body", label: "Body", type: "text" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("deletes an unused field", async () => {
const result = await harness.client.callTool({
name: "schema_delete_field",
arguments: { collection: "post", fieldSlug: "body" },
});
expect(result.isError, extractText(result)).toBeFalsy();
// Verify it's gone
const get = await harness.client.callTool({
name: "schema_get_collection",
arguments: { slug: "post" },
});
const collection = extractJson<{ fields: Array<{ slug: string }> }>(get);
expect(collection.fields.find((f) => f.slug === "body")).toBeUndefined();
});
it("returns clear error for missing field slug", async () => {
const result = await harness.client.callTool({
name: "schema_delete_field",
arguments: { collection: "post", fieldSlug: "ghost" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/FIELD_NOT_FOUND|\bnot found\b/i);
expect(extractText(result)).toContain("ghost");
});
it("returns clear error for missing collection", async () => {
const result = await harness.client.callTool({
name: "schema_delete_field",
arguments: { collection: "noplace", fieldSlug: "title" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/COLLECTION_NOT_FOUND|\bnot found\b/i);
expect(extractText(result)).toContain("noplace");
});
it("requires ADMIN role", async () => {
await harness.cleanup();
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "schema_delete_field",
arguments: { collection: "post", fieldSlug: "body" },
});
expect(result.isError).toBe(true);
});
it("deleting a field with existing content also drops the data (no orphan)", async () => {
// Create content using the field
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "T", body: "Body content" } },
});
// Delete the field
const result = await harness.client.callTool({
name: "schema_delete_field",
arguments: { collection: "post", fieldSlug: "body" },
});
expect(result.isError, extractText(result)).toBeFalsy();
// content_get should return data without the body field
const list = await harness.client.callTool({
name: "content_list",
arguments: { collection: "post" },
});
const items = extractJson<{ items: Array<Record<string, unknown>> }>(list).items;
// At minimum, the API shouldn't crash. The field should not appear,
// and the data fetch should still succeed.
expect(items.length).toBeGreaterThan(0);
});
});
// ---------------------------------------------------------------------------
// Cross-cutting: error envelope quality (bug #3 lens)
// ---------------------------------------------------------------------------
describe("schema tools — error envelope quality (bug #3 lens)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("schema_create_collection on duplicate names a discriminated CONFLICT-like error", async () => {
await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "post", label: "Posts" },
});
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "post", label: "Posts" },
});
expect(result.isError).toBe(true);
const text = extractText(result);
// Today: probably leaks raw SQLite UNIQUE error or generic. After fix:
// a stable signal like "already exists" / CONFLICT.
expect(text).toMatch(/exist|conflict|duplicate|unique|already/i);
expect(text).not.toMatch(/^Failed to /);
});
it("validation error names the offending field/value in the message", async () => {
const result = await harness.client.callTool({
name: "schema_create_collection",
arguments: { slug: "Bad-Slug", label: "Bad" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
});
});

View File

@@ -0,0 +1,337 @@
/**
* MCP search tool — comprehensive integration tests.
*
* Covers:
* - search query → matching results
* - empty index / no searchable collections
* - collection scoping
* - locale filtering
* - special characters / FTS5 syntax
* - permission gating
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const SUBSCRIBER_ID = "user_subscriber";
async function setupSearchablePostCollection(db: Kysely<Database>): Promise<void> {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["drafts", "revisions", "search"],
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
searchable: true,
});
await registry.createField("post", {
slug: "body",
label: "Body",
type: "text",
searchable: true,
});
// Activate the FTS index. Production sites do this either via the seed
// pipeline or the admin "Enable search" toggle. Without it, the FTS
// table and triggers don't exist and the test would silently miss real
// indexing bugs.
await new FTSManager(db).enableSearch("post");
}
describe("search", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns empty results when no collections are searchable", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" }); // no search support
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "search",
arguments: { query: "anything" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items).toEqual([]);
});
it("returns empty results for a query with no matches", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Hello world", body: "Lorem ipsum" } },
});
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id: "hello-world" },
});
const result = await harness.client.callTool({
name: "search",
arguments: { query: "ZZZZZQuantumZebra" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items).toEqual([]);
});
it("returns matching items for a query that hits", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const created = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "Hello world", body: "Lorem ipsum about searching" },
},
});
const id = extractJson<{ item: { id: string } }>(created).item.id;
await harness.client.callTool({
name: "content_publish",
arguments: { collection: "post", id },
});
const result = await harness.client.callTool({
name: "search",
arguments: { query: "Hello" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ items: Array<{ id: string }> }>(result);
expect(data.items.length).toBeGreaterThan(0);
expect(data.items.find((i) => i.id === id)).toBeTruthy();
});
it("scopes search by collections argument", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["search"],
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
searchable: true,
});
await registry.createCollection({
slug: "page",
label: "Pages",
supports: ["search"],
});
await registry.createField("page", {
slug: "title",
label: "Title",
type: "string",
searchable: true,
});
const fts = new FTSManager(db);
await fts.enableSearch("post");
await fts.enableSearch("page");
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const post = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "rocket post" } },
});
const page = await harness.client.callTool({
name: "content_create",
arguments: { collection: "page", data: { title: "rocket page" } },
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(post).item.id,
},
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "page",
id: extractJson<{ item: { id: string } }>(page).item.id,
},
});
const result = await harness.client.callTool({
name: "search",
arguments: { query: "rocket", collections: ["post"] },
});
const data = extractJson<{ items: Array<{ collection?: string; type?: string }> }>(result);
// We seeded one post and one page that both match "rocket". Scoping
// to ["post"] must keep at least the post hit and exclude the page.
expect(data.items.length).toBeGreaterThan(0);
for (const item of data.items) {
const c = item.collection ?? item.type;
expect(c).toBe("post");
}
});
it("handles empty query string gracefully", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// Seed a published item so a regression that interprets an empty
// query as "match all" would produce a non-empty list and fail.
const created = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "matchable", body: "indexed content" },
},
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(created).item.id,
},
});
const result = await harness.client.callTool({
name: "search",
arguments: { query: "" },
});
// Empty queries are sanitized to a no-op and return zero matches.
// They must not surface as an error AND must not match all items.
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items).toEqual([]);
});
it("handles special characters in query without leaking FTS5 syntax errors", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// Seed a published item so a regression that lets malformed input
// fall through to "match all" would surface a non-empty list.
const created = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "matchable", body: "indexed content" },
},
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(created).item.id,
},
});
// FTS5 has special operators: AND OR NOT NEAR " * ( ) :
// `searchSingleCollection` swallows malformed-input FTS5 errors and
// returns no matches; the response is a clean empty list.
const result = await harness.client.callTool({
name: "search",
arguments: { query: 'NOT "quotes" AND* (' },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items).toEqual([]);
});
it("respects the limit parameter", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// Create 10 items containing the same word
for (let i = 0; i < 10; i++) {
const c = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: `searchable item ${i}`, body: "common-text" },
},
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(c).item.id,
},
});
}
const result = await harness.client.callTool({
name: "search",
arguments: { query: "common-text", limit: 3 },
});
const data = extractJson<{ items: unknown[] }>(result);
expect(data.items.length).toBeLessThanOrEqual(3);
});
it("only returns published items (not drafts) regardless of caller role", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// Create one draft, one published
await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "draft-only-content" } },
});
const pubItem = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "published-content" } },
});
await harness.client.callTool({
name: "content_publish",
arguments: {
collection: "post",
id: extractJson<{ item: { id: string } }>(pubItem).item.id,
},
});
const draftQuery = await harness.client.callTool({
name: "search",
arguments: { query: "draft-only-content" },
});
expect(extractJson<{ items: unknown[] }>(draftQuery).items).toEqual([]);
const pubQuery = await harness.client.callTool({
name: "search",
arguments: { query: "published-content" },
});
expect(extractJson<{ items: unknown[] }>(pubQuery).items.length).toBeGreaterThan(0);
});
it("any logged-in user (SUBSCRIBER) can search", async () => {
await setupSearchablePostCollection(db);
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "search",
arguments: { query: "anything" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});

View File

@@ -0,0 +1,364 @@
/**
* MCP settings tools — integration tests.
*
* Covers:
* - settings_get
* - settings_update
*
* Plus regression for bug #16 (no MCP tool for site settings).
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const EDITOR_ID = "user_editor";
const SUBSCRIBER_ID = "user_subscriber";
interface SiteSettingsResponse {
title?: string;
tagline?: string;
logo?: { mediaId: string; alt?: string; url?: string };
favicon?: { mediaId: string; alt?: string; url?: string };
url?: string;
postsPerPage?: number;
dateFormat?: string;
timezone?: string;
social?: Record<string, string | undefined>;
seo?: Record<string, unknown>;
}
async function seedMedia(db: Kysely<Database>, opts?: { id?: string }): Promise<string> {
const id = opts?.id ?? ulid();
const now = new Date().toISOString();
await db
.insertInto("media" as never)
.values({
id,
filename: "logo.png",
mime_type: "image/png",
size: 1024,
storage_key: `media/${id}.png`,
created_at: now,
} as never)
.execute();
return id;
}
// ---------------------------------------------------------------------------
// Tool registration — bug #16 regression.
// ---------------------------------------------------------------------------
describe("settings tools registered (bug #16)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("MCP exposes settings_get and settings_update", async () => {
const tools = await harness.client.listTools();
const names = new Set(tools.tools.map((t) => t.name));
expect(names.has("settings_get")).toBe(true);
expect(names.has("settings_update")).toBe(true);
});
});
// ---------------------------------------------------------------------------
// settings_get
// ---------------------------------------------------------------------------
describe("settings_get", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns an empty object when no settings are set", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data).toEqual({});
});
it("returns previously-set settings", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "settings_update",
arguments: { title: "My Site", tagline: "Welcome" },
});
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data.title).toBe("My Site");
expect(data.tagline).toBe("Welcome");
});
it("resolves logo media reference URL", async () => {
const mediaId = await seedMedia(db);
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "settings_update",
arguments: { logo: { mediaId, alt: "Site logo" } },
});
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data.logo?.mediaId).toBe(mediaId);
expect(data.logo?.alt).toBe("Site logo");
// URL is resolved to the media file route
expect(data.logo?.url).toMatch(/^\/_emdash\/api\/media\/file\//);
});
it("editor can read settings", async () => {
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("subscriber cannot read settings (INSUFFICIENT_PERMISSIONS)", async () => {
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
it("rejects token without settings:read scope (INSUFFICIENT_SCOPE)", async () => {
harness = await connectMcpHarness({
db,
userId: ADMIN_ID,
userRole: Role.ADMIN,
tokenScopes: ["content:read"],
});
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_SCOPE");
});
it("settings:read token is sufficient for settings_get", async () => {
harness = await connectMcpHarness({
db,
userId: ADMIN_ID,
userRole: Role.ADMIN,
tokenScopes: ["settings:read"],
});
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("admin scope grants settings_get access", async () => {
harness = await connectMcpHarness({
db,
userId: ADMIN_ID,
userRole: Role.ADMIN,
tokenScopes: ["admin"],
});
const result = await harness.client.callTool({
name: "settings_get",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// settings_update
// ---------------------------------------------------------------------------
describe("settings_update", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("updates title and tagline", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "settings_update",
arguments: { title: "EmDash Demo", tagline: "Hello" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data.title).toBe("EmDash Demo");
expect(data.tagline).toBe("Hello");
});
it("partial update preserves other fields", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "settings_update",
arguments: { title: "First", tagline: "Original tagline" },
});
// Update only tagline; title should be preserved
const result = await harness.client.callTool({
name: "settings_update",
arguments: { tagline: "Updated tagline" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data.title).toBe("First");
expect(data.tagline).toBe("Updated tagline");
});
it("accepts an http url and rejects javascript: scheme", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const ok = await harness.client.callTool({
name: "settings_update",
arguments: { url: "https://example.com" },
});
expect(ok.isError, extractText(ok)).toBeFalsy();
const bad = await harness.client.callTool({
name: "settings_update",
// eslint-disable-next-line no-script-url -- intentional for validation test
arguments: { url: "javascript:alert(1)" },
});
expect(bad.isError).toBe(true);
});
it("accepts empty string for url (clears it)", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "settings_update",
arguments: { url: "" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects out-of-range postsPerPage", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "settings_update",
arguments: { postsPerPage: 9999 },
});
expect(result.isError).toBe(true);
});
it("accepts nested seo and social objects", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "settings_update",
arguments: {
social: { twitter: "@emdash", github: "emdash-cms" },
seo: { titleSeparator: " | ", googleVerification: "abc123" },
},
});
expect(result.isError, extractText(result)).toBeFalsy();
const data = extractJson<SiteSettingsResponse>(result);
expect(data.social?.twitter).toBe("@emdash");
expect(data.social?.github).toBe("emdash-cms");
expect((data.seo as { titleSeparator?: string }).titleSeparator).toBe(" | ");
});
it("editor cannot update settings (INSUFFICIENT_PERMISSIONS — admin only)", async () => {
harness = await connectMcpHarness({ db, userId: EDITOR_ID, userRole: Role.EDITOR });
const result = await harness.client.callTool({
name: "settings_update",
arguments: { title: "Nope" },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
it("subscriber cannot update settings", async () => {
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "settings_update",
arguments: { title: "Nope" },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_PERMISSIONS");
});
it("settings:read token cannot call settings_update (INSUFFICIENT_SCOPE)", async () => {
harness = await connectMcpHarness({
db,
userId: ADMIN_ID,
userRole: Role.ADMIN,
tokenScopes: ["settings:read"],
});
const result = await harness.client.callTool({
name: "settings_update",
arguments: { title: "x" },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INSUFFICIENT_SCOPE");
});
it("settings:manage token can call settings_update", async () => {
harness = await connectMcpHarness({
db,
userId: ADMIN_ID,
userRole: Role.ADMIN,
tokenScopes: ["settings:manage"],
});
const result = await harness.client.callTool({
name: "settings_update",
arguments: { title: "x" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});

View File

@@ -0,0 +1,771 @@
/**
* MCP taxonomy tools — comprehensive integration tests.
*
* Covers:
* - taxonomy_list
* - taxonomy_list_terms
* - taxonomy_create_term
*
* Plus regression coverage for:
* - bug #7 (orphan taxonomy collection inconsistency)
* - bug #13 (no delete/update term tool — gap test)
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { handleTaxonomyCreate } from "../../../src/api/handlers/taxonomies.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import {
connectMcpHarness,
extractJson,
extractText,
type McpHarness,
} from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const AUTHOR_ID = "user_author";
const SUBSCRIBER_ID = "user_subscriber";
async function setupTaxonomy(
db: Kysely<Database>,
input: { name: string; label: string; hierarchical?: boolean; collections?: string[] },
): Promise<void> {
const result = await handleTaxonomyCreate(db, input);
if (!result.success) {
throw new Error(`Failed to set up taxonomy: ${result.error?.message}`);
}
}
// ---------------------------------------------------------------------------
// taxonomy_list
// ---------------------------------------------------------------------------
describe("taxonomy_list", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns only the seeded defaults when no extra taxonomies are added", async () => {
// Migration 006 seeds two default taxonomies: 'category' (hierarchical)
// and 'tag' (flat), both linked to the 'posts' collection. A fresh
// install always has these.
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
const { taxonomies } = extractJson<{
taxonomies: Array<{ name: string }>;
}>(result);
const names = taxonomies.map((t) => t.name).toSorted();
expect(names).toEqual(["category", "tag"]);
});
it("lists user-created taxonomies alongside the defaults", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
// Use names that don't collide with the seeded `category` / `tag`.
await setupTaxonomy(db, {
name: "section",
label: "Sections",
hierarchical: true,
collections: ["post"],
});
await setupTaxonomy(db, {
name: "topic",
label: "Topics",
collections: ["post"],
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_list",
arguments: {},
});
const { taxonomies } = extractJson<{
taxonomies: Array<{ name: string; hierarchical?: boolean; collections?: string[] }>;
}>(result);
const names = taxonomies.map((t) => t.name).toSorted();
expect(names).toEqual(["category", "section", "tag", "topic"]);
const section = taxonomies.find((t) => t.name === "section");
expect(section?.hierarchical).toBe(true);
expect(section?.collections).toEqual(["post"]);
});
it("any logged-in user (SUBSCRIBER) can read taxonomies", async () => {
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "taxonomy_list",
arguments: {},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("bug #7: orphaned collection slugs are filtered from taxonomy_list output", async () => {
// The seed taxonomies (category, tag) both reference 'posts' — a
// collection that doesn't exist in this test DB (no auto-seed). After
// the bug #7 fix, `taxonomy_list` filters those orphans out. We don't
// need to manufacture an orphan; the seed already gives us one.
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const taxResult = await harness.client.callTool({
name: "taxonomy_list",
arguments: {},
});
const { taxonomies } = extractJson<{
taxonomies: Array<{ name: string; collections?: string[] }>;
}>(taxResult);
// Each seeded taxonomy referenced 'posts'. After filtering, that
// orphan slug is gone — the array should be empty for both seeds.
for (const t of taxonomies) {
expect(t.collections).not.toContain("posts");
}
// And schema_list_collections agrees: there is no 'posts' collection.
const collResult = await harness.client.callTool({
name: "schema_list_collections",
arguments: {},
});
const { items } = extractJson<{ items: Array<{ slug: string }> }>(collResult);
expect(items.find((c) => c.slug === "posts")).toBeUndefined();
});
});
// ---------------------------------------------------------------------------
// taxonomy_list_terms
// ---------------------------------------------------------------------------
describe("taxonomy_list_terms", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
await setupTaxonomy(db, { name: "categories", label: "Categories", hierarchical: true });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("returns empty list when taxonomy has no terms", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const { items } = extractJson<{ items: unknown[] }>(result);
expect(items).toEqual([]);
});
it("returns terms after creation", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "tech", label: "Tech" },
});
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "design", label: "Design" },
});
const result = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories" },
});
const { items } = extractJson<{
items: Array<{ slug: string; label: string; parentId: string | null }>;
}>(result);
const slugs = items.map((t) => t.slug).toSorted();
expect(slugs).toEqual(["design", "tech"]);
});
it("returns clear error for missing taxonomy name", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "nonexistent" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("nonexistent");
});
it("paginates with limit + cursor", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
// Insert 5 terms — labels chosen so alphabetical ordering is predictable
for (const label of ["alpha", "bravo", "charlie", "delta", "echo"]) {
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: label, label },
});
}
const page1 = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", limit: 2 },
});
const p1 = extractJson<{ items: Array<{ slug: string; id: string }>; nextCursor?: string }>(
page1,
);
expect(p1.items).toHaveLength(2);
expect(p1.nextCursor).toBeTruthy();
const page2 = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", limit: 2, cursor: p1.nextCursor },
});
const p2 = extractJson<{ items: Array<{ slug: string }>; nextCursor?: string }>(page2);
expect(p2.items).toHaveLength(2);
// No overlap
const p1Slugs = p1.items.map((i) => i.slug);
for (const t of p2.items) expect(p1Slugs).not.toContain(t.slug);
});
it("paginates correctly when multiple terms share the same label", async () => {
// Keyset pagination over (label, id) needs a stable id tiebreaker
// at the SQL layer or tied-label rows can swap order between calls
// — producing duplicates or skipped items. Three terms share
// label "shared"; pagination must walk through them in a stable
// order with no duplicates and no gaps.
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const slugs = ["shared-1", "shared-2", "shared-3", "unique-a"];
for (const slug of slugs) {
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: {
taxonomy: "categories",
slug,
label: slug.startsWith("shared") ? "shared" : slug,
},
});
}
// Walk one item at a time so every cursor transition exercises the
// (label, id) keyset.
const collected: string[] = [];
let cursor: string | undefined;
// Hard cap to prevent the test hanging if pagination loops.
for (let i = 0; i < 10; i++) {
const page = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", limit: 1, ...(cursor ? { cursor } : {}) },
});
const data = extractJson<{
items: Array<{ slug: string; id: string }>;
nextCursor?: string;
}>(page);
if (data.items.length === 0) break;
for (const item of data.items) collected.push(item.slug);
if (!data.nextCursor) break;
cursor = data.nextCursor;
}
// Each slug appears exactly once. Order doesn't matter for this
// assertion — just no duplicates and no missing entries.
expect(collected.toSorted()).toEqual(slugs.toSorted());
});
it("survives concurrent deletion of the cursor-term", async () => {
// The base64 keyset cursor encodes a (label, id) position rather
// than a row reference, so deleting the cursor-term between pages
// must not error — the next page just continues from the next
// position in sort order.
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
for (const slug of ["alpha", "bravo", "charlie", "delta"]) {
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug, label: slug },
});
}
const page1 = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", limit: 2 },
});
const p1 = extractJson<{
items: Array<{ slug: string }>;
nextCursor?: string;
}>(page1);
expect(p1.items.map((i) => i.slug)).toEqual(["alpha", "bravo"]);
expect(p1.nextCursor).toBeTruthy();
// Delete the cursor-term ('bravo') out of band.
const { TaxonomyRepository } = await import("../../../src/database/repositories/taxonomy.js");
const repo = new TaxonomyRepository(db);
const bravo = await repo.findBySlug("categories", "bravo");
if (!bravo) throw new Error("bravo missing — fixture broken");
await db.deleteFrom("taxonomies").where("id", "=", bravo.id).execute();
// Page 2 must still work and return the items strictly after the
// cursor's position. Pre-fix the cursor stored 'bravo's id and
// findIndex would have returned -1 → INVALID_CURSOR. Post-fix the
// cursor stores ('bravo', '<bravo-id>') and the keyset comparison
// finds the first term with (label, id) > ('bravo', '<bravo-id>')
// — that's 'charlie'.
const page2 = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", limit: 2, cursor: p1.nextCursor },
});
expect(page2.isError, extractText(page2)).toBeFalsy();
const p2 = extractJson<{ items: Array<{ slug: string }> }>(page2);
expect(p2.items.map((i) => i.slug)).toEqual(["charlie", "delta"]);
});
it("malformed cursor returns INVALID_CURSOR", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "t1", label: "T1" },
});
// taxonomy_list_terms uses a base64 keyset cursor over (label, id).
// A completely bogus value fails decodeCursor and surfaces as a
// structured INVALID_CURSOR error.
const result = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories", cursor: "garbage_cursor_xyz" },
});
expect(result.isError).toBe(true);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("INVALID_CURSOR");
});
it("any logged-in user (SUBSCRIBER) can read terms", async () => {
harness = await connectMcpHarness({ db, userId: SUBSCRIBER_ID, userRole: Role.SUBSCRIBER });
const result = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "categories" },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
});
// ---------------------------------------------------------------------------
// taxonomy_create_term
// ---------------------------------------------------------------------------
describe("taxonomy_create_term", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
await setupTaxonomy(db, { name: "categories", label: "Categories", hierarchical: true });
await setupTaxonomy(db, { name: "tags", label: "Tags" });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("creates a term with minimal arguments", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "tech", label: "Tech" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const { term } = extractJson<{ term: { slug: string; label: string } }>(result);
expect(term.slug).toBe("tech");
expect(term.label).toBe("Tech");
});
it("creates a child term with parentId", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const parent = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "tech", label: "Tech" },
});
const parentId = extractJson<{ term: { id: string } }>(parent).term.id;
const child = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: {
taxonomy: "categories",
slug: "ai",
label: "AI",
parentId,
},
});
expect(child.isError, extractText(child)).toBeFalsy();
const { term } = extractJson<{ term: { parentId: string | null } }>(child);
expect(term.parentId).toBe(parentId);
});
it("rejects duplicate slug within the same taxonomy", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "tech", label: "Tech" },
});
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "tech", label: "Tech 2" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/exist|duplicate|conflict|unique|already/i);
});
it("allows same slug across different taxonomies", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const a = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "shared", label: "Shared" },
});
const b = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "shared", label: "Shared" },
});
expect(a.isError, extractText(a)).toBeFalsy();
expect(b.isError, extractText(b)).toBeFalsy();
});
it("rejects creating a term in a non-existent taxonomy", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "ghost", slug: "x", label: "X" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/\bNOT_FOUND\b|\bnot found\b/i);
expect(extractText(result)).toContain("ghost");
});
it("rejects parentId pointing to a different taxonomy", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const tag = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "stuff", label: "Stuff" },
});
const tagId = extractJson<{ term: { id: string } }>(tag).term.id;
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: {
taxonomy: "categories",
slug: "child",
label: "Child",
parentId: tagId,
},
});
expect(result.isError).toBe(true);
});
it("rejects parentId pointing to a non-existent term", async () => {
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: {
taxonomy: "categories",
slug: "orphan",
label: "Orphan",
parentId: "01NEVEREXISTED",
},
});
expect(result.isError).toBe(true);
});
it("requires EDITOR role (AUTHOR is blocked)", async () => {
harness = await connectMcpHarness({ db, userId: AUTHOR_ID, userRole: Role.AUTHOR });
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "categories", slug: "x", label: "X" },
});
expect(result.isError).toBe(true);
});
});
// ---------------------------------------------------------------------------
// Bug #13 / F2 / F3 / F12 — happy paths for taxonomy_update_term and
// taxonomy_delete_term, plus parent validation, cycle detection, and
// empty-string rejection.
// ---------------------------------------------------------------------------
describe("taxonomy_update_term (bug #13 / F2 / F12)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
async function createTerm(
taxonomy: string,
slug: string,
label: string,
parentId?: string,
): Promise<string> {
const args: Record<string, unknown> = { taxonomy, slug, label };
if (parentId) args.parentId = parentId;
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: args,
});
expect(result.isError, extractText(result)).toBeFalsy();
const { term } = extractJson<{ term: { id: string } }>(result);
return term.id;
}
beforeEach(async () => {
db = await setupTestDatabase();
await setupTaxonomy(db, { name: "tags", label: "Tags" });
await setupTaxonomy(db, { name: "sections", label: "Sections" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("MCP exposes taxonomy_update_term and taxonomy_delete_term", async () => {
const tools = await harness.client.listTools();
const names = tools.tools.map((t) => t.name);
expect(names).toContain("taxonomy_update_term");
expect(names).toContain("taxonomy_delete_term");
});
it("renames the slug when the new slug is free", async () => {
await createTerm("tags", "old-slug", "Original");
const result = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "old-slug", slug: "new-slug" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const { term } = extractJson<{ term: { slug: string } }>(result);
expect(term.slug).toBe("new-slug");
});
it("changes the label", async () => {
await createTerm("tags", "x", "Old Label");
const result = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "x", label: "New Label" },
});
expect(result.isError, extractText(result)).toBeFalsy();
const { term } = extractJson<{ term: { label: string } }>(result);
expect(term.label).toBe("New Label");
});
it("reparents a term and detaches via parentId: null", async () => {
const parentId = await createTerm("tags", "parent", "Parent");
await createTerm("tags", "child", "Child");
const reparent = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "child", parentId },
});
expect(reparent.isError, extractText(reparent)).toBeFalsy();
const reparented = extractJson<{ term: { parentId: string | null } }>(reparent);
expect(reparented.term.parentId).toBe(parentId);
const detach = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "child", parentId: null },
});
expect(detach.isError, extractText(detach)).toBeFalsy();
const detached = extractJson<{ term: { parentId: string | null } }>(detach);
expect(detached.term.parentId).toBeNull();
});
it("rejects parents from a different taxonomy", async () => {
const sectionId = await createTerm("sections", "news", "News");
await createTerm("tags", "alpha", "Alpha");
const result = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "alpha", parentId: sectionId },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/VALIDATION_ERROR/);
});
it("rejects self-parent", async () => {
const id = await createTerm("tags", "loop", "Loop");
const result = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "loop", parentId: id },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/own parent|VALIDATION_ERROR/i);
});
it("rejects a 2-cycle (descendant becoming ancestor)", async () => {
// A is parent of B. Now try to make B the parent of A — that's a cycle.
const aId = await createTerm("tags", "a", "A");
const bId = await createTerm("tags", "b", "B", aId);
const result = await harness.client.callTool({
name: "taxonomy_update_term",
arguments: { taxonomy: "tags", termSlug: "a", parentId: bId },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/cycle|VALIDATION_ERROR/i);
});
it("rejects empty-string parentId on create", async () => {
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "x", label: "X", parentId: "" },
});
// Either returns a validation error, or treats it as no-parent.
// We choose strict: empty string is normalized to undefined so it
// succeeds with parentId === null (no parent attached). That's the
// behavior we documented.
if (result.isError) {
expect(extractText(result)).toMatch(/VALIDATION_ERROR/);
} else {
const { term } = extractJson<{ term: { parentId: string | null } }>(result);
expect(term.parentId).toBeNull();
}
});
// ----- MAX_DEPTH boundary -----
// validateParentTerm walks up the parent chain bounded by MAX_DEPTH=100
// to prevent a pathological pre-existing cycle from hanging the
// validator. The boundary is "more than 100 ancestors": exactly-100 is
// accepted, 101+ is rejected.
it("accepts a chain of exactly MAX_DEPTH (100) ancestors", async () => {
const { TaxonomyRepository } = await import("../../../src/database/repositories/taxonomy.js");
const repo = new TaxonomyRepository(db);
// Build root → 1 → 2 → ... → 100. 101 terms total. The deepest
// term has 100 ancestors; setting it as parent of a new term means
// validateParentTerm walks 100 hops up before exhausting the chain.
let parentId: string | undefined;
const ids: string[] = [];
for (let i = 0; i < 101; i++) {
const term = await repo.create({
name: "tags",
slug: `chain-${i}`,
label: `Chain ${i}`,
parentId,
});
ids.push(term.id);
parentId = term.id;
}
const deepest = ids.at(-1);
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "leaf", label: "Leaf", parentId: deepest },
});
// New term's parent is the 100-deep tail. Walking up from there
// reaches the root after exactly 100 hops; cursor becomes null,
// the depth-exceeded check does NOT fire.
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects a chain that exceeds MAX_DEPTH", async () => {
const { TaxonomyRepository } = await import("../../../src/database/repositories/taxonomy.js");
const repo = new TaxonomyRepository(db);
// Build a 102-term chain. The deepest term has 101 ancestors —
// one more than MAX_DEPTH allows.
let parentId: string | undefined;
const ids: string[] = [];
for (let i = 0; i < 102; i++) {
const term = await repo.create({
name: "tags",
slug: `chain-${i}`,
label: `Chain ${i}`,
parentId,
});
ids.push(term.id);
parentId = term.id;
}
const deepest = ids.at(-1);
const result = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "leaf", label: "Leaf", parentId: deepest },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/maximum depth/i);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("VALIDATION_ERROR");
});
});
describe("taxonomy_delete_term (bug #13 / F12)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
await setupTaxonomy(db, { name: "tags", label: "Tags" });
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects deletion when children exist (matches handler behavior)", async () => {
const parent = await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "parent", label: "Parent" },
});
const { term } = extractJson<{ term: { id: string } }>(parent);
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "child", label: "Child", parentId: term.id },
});
const result = await harness.client.callTool({
name: "taxonomy_delete_term",
arguments: { taxonomy: "tags", termSlug: "parent" },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(/VALIDATION_ERROR|children/i);
});
it("deletes a leaf term and the row is actually gone", async () => {
await harness.client.callTool({
name: "taxonomy_create_term",
arguments: { taxonomy: "tags", slug: "leaf", label: "Leaf" },
});
// Pre-condition: the term is listable.
const before = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "tags" },
});
const beforeSlugs = extractJson<{ items: Array<{ slug: string }> }>(before).items.map(
(t) => t.slug,
);
expect(beforeSlugs).toContain("leaf");
const result = await harness.client.callTool({
name: "taxonomy_delete_term",
arguments: { taxonomy: "tags", termSlug: "leaf" },
});
expect(result.isError, extractText(result)).toBeFalsy();
// Post-condition: the term is no longer listable. A regression where
// the handler returns success: true without actually deleting the row
// fails this assertion.
const after = await harness.client.callTool({
name: "taxonomy_list_terms",
arguments: { taxonomy: "tags" },
});
const afterSlugs = extractJson<{ items: Array<{ slug: string }> }>(after).items.map(
(t) => t.slug,
);
expect(afterSlugs).not.toContain("leaf");
});
});

View File

@@ -0,0 +1,491 @@
/**
* MCP field-level validation tests.
*
* `EmDashRuntime.handleContentCreate` and `handleContentUpdate` validate
* `data` against the collection's schema before any write:
*
* - required fields must be present and non-empty
* - select / multiSelect values must match the configured options
* - reference fields must resolve to a real, non-trashed target
*
* Failures return `{ code: "VALIDATION_ERROR", message: "<field>: <reason>" }`
* with all offending fields named in one message so callers can fix
* everything in a single round trip. These tests cover both REST and MCP
* because validation runs at the runtime layer and both transports go
* through it.
*/
import { Role } from "@emdash-cms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { connectMcpHarness, extractText, type McpHarness } from "../../utils/mcp-runtime.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const ADMIN_ID = "user_admin";
const VALIDATION_ERROR = /validation|required|invalid/i;
const GENERIC_FAILURE = /^Failed to (create|update) content$/;
// ---------------------------------------------------------------------------
// Bug #4: required field validation
// ---------------------------------------------------------------------------
describe("MCP validation — required fields (bug #4)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
// Required title, optional body
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
await registry.createField("post", {
slug: "body",
label: "Body",
type: "text",
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects create without required title", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { body: "no title" } },
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).not.toMatch(GENERIC_FAILURE);
expect(text).toMatch(VALIDATION_ERROR);
expect(text).toMatch(/title/i);
});
it("rejects create with empty-string required title", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "" } },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
});
it("rejects create with explicitly-null required title", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: null } },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
});
it("rejects create with non-string value for a string field", async () => {
// Zod's `z.string()` rejects numbers/booleans/objects. The MCP
// boundary lets these through (data is `z.record(z.string(),
// z.unknown())`), so the check has to live in the runtime
// validator. Guard against future regressions like swapping in
// `z.coerce.string()`.
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
// eslint-disable-next-line typescript-eslint(no-explicit-any) -- intentionally bypass MCP type to hit runtime validation
data: { title: 42 } as any,
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
expect(extractText(result)).toMatch(/title/i);
const meta = (result as { _meta?: { code?: string } })._meta;
expect(meta?.code).toBe("VALIDATION_ERROR");
});
it("accepts create with required title present (regression guard)", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Has title" } },
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects update that clears required title to empty string", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Initial" } },
});
expect(created.isError, extractText(created)).toBeFalsy();
const id = JSON.parse(extractText(created)).item.id as string;
const updated = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { title: "" } },
});
expect(updated.isError).toBe(true);
expect(extractText(updated)).toMatch(VALIDATION_ERROR);
});
});
// ---------------------------------------------------------------------------
// Bug #5: select and multiSelect option enforcement
// ---------------------------------------------------------------------------
describe("MCP validation — select and multiSelect options (bug #5)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
await registry.createField("post", {
slug: "priority",
label: "Priority",
type: "select",
validation: { options: ["low", "medium", "high"] },
});
await registry.createField("post", {
slug: "tags",
label: "Tags",
type: "multiSelect",
validation: { options: ["news", "tech", "design"] },
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects select value not in options list", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", priority: "not-an-option" },
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
expect(extractText(result)).toMatch(/priority|select|option|not-an-option/i);
});
it("accepts select value in options list (regression guard)", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", priority: "high" },
},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects multiSelect array containing an invalid value", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", tags: ["news", "bogus"] },
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
expect(extractText(result)).toMatch(/tags|multiSelect|option|bogus/i);
});
it("accepts multiSelect with all valid values (regression guard)", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", tags: ["news", "tech"] },
},
});
expect(result.isError, extractText(result)).toBeFalsy();
});
it("rejects update introducing an invalid select value", async () => {
const created = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", priority: "low" },
},
});
expect(created.isError, extractText(created)).toBeFalsy();
const id = JSON.parse(extractText(created)).item.id as string;
const updated = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id, data: { priority: "URGENT" } },
});
expect(updated.isError).toBe(true);
expect(extractText(updated)).toMatch(VALIDATION_ERROR);
});
});
// ---------------------------------------------------------------------------
// Bug #6: reference field target existence
// ---------------------------------------------------------------------------
describe("MCP validation — reference field targets (bug #6)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "page", label: "Pages" });
await registry.createField("page", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
await registry.createField("post", {
slug: "parent_page",
label: "Parent Page",
type: "reference",
validation: { collection: "page" },
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects reference to non-existent target id", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", parent_page: "01NOTAREALPAGE" },
},
});
expect(result.isError).toBe(true);
const text = extractText(result);
expect(text).toMatch(VALIDATION_ERROR);
// Tight match: the error must specifically mention the offending field,
// echo the bad target id, AND say "not found" (one assertion per
// concern so a regression where any signal disappears is caught).
expect(text).toContain("parent_page");
expect(text).toContain("01NOTAREALPAGE");
expect(text).toMatch(/\bnot found\b/i);
});
it("accepts reference to a real target id (regression guard)", async () => {
// Create a page first
const page = await harness.client.callTool({
name: "content_create",
arguments: { collection: "page", data: { title: "Real page" } },
});
expect(page.isError, extractText(page)).toBeFalsy();
const pageId = JSON.parse(extractText(page)).item.id as string;
const post = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", parent_page: pageId },
},
});
expect(post.isError, extractText(post)).toBeFalsy();
});
it("rejects reference to id that exists in a different collection", async () => {
// Create a post (which is NOT the page collection the reference is scoped to)
const repo = new ContentRepository(db);
const otherPost = await repo.create({
type: "post",
data: { title: "Other" },
slug: "other",
status: "draft",
authorId: ADMIN_ID,
});
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", parent_page: otherPost.id },
},
});
// Reference points to a post id but field expects a page reference.
// After fix this should fail.
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
});
it("rejects reference to a soft-deleted (trashed) target", async () => {
const page = await harness.client.callTool({
name: "content_create",
arguments: { collection: "page", data: { title: "Will be trashed" } },
});
const pageId = JSON.parse(extractText(page)).item.id as string;
// Trash via repo
const repo = new ContentRepository(db);
await repo.delete("page", pageId);
const result = await harness.client.callTool({
name: "content_create",
arguments: {
collection: "post",
data: { title: "T", parent_page: pageId },
},
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
});
});
// ---------------------------------------------------------------------------
// Combined: error message is structured even when multiple fields fail
// ---------------------------------------------------------------------------
describe("MCP validation — multi-field error messaging", () => {
let db: Kysely<Database>;
let harness: McpHarness;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({ slug: "post", label: "Posts" });
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
await registry.createField("post", {
slug: "priority",
label: "Priority",
type: "select",
validation: { options: ["low", "high"] },
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("when multiple fields fail validation, the error mentions all of them", async () => {
const result = await harness.client.callTool({
name: "content_create",
arguments: {
// missing required title AND invalid priority
collection: "post",
data: { priority: "URGENT" },
},
});
expect(result.isError).toBe(true);
const text = extractText(result);
// Both field names should appear so a caller can fix everything in one round.
expect(text).toMatch(/title/i);
expect(text).toMatch(/priority/i);
});
});
// ---------------------------------------------------------------------------
// F4: validation runs on UPDATE for revision-supporting collections.
//
// Before the fix, the runtime wrote the draft revision *before* the API
// handler ran (and called the handler with `data: undefined`), so update-
// time validation was bypassed for any collection that supports revisions.
// ---------------------------------------------------------------------------
describe("MCP validation — UPDATE on revision-supporting collections (F4)", () => {
let db: Kysely<Database>;
let harness: McpHarness;
let postId: string;
beforeEach(async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "post",
label: "Posts",
supports: ["drafts", "revisions"],
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
required: true,
});
harness = await connectMcpHarness({ db, userId: ADMIN_ID, userRole: Role.ADMIN });
const create = await harness.client.callTool({
name: "content_create",
arguments: { collection: "post", data: { title: "Initial title" } },
});
expect(create.isError, extractText(create)).toBeFalsy();
postId = JSON.parse(extractText(create)).item.id as string;
});
afterEach(async () => {
if (harness) await harness.cleanup();
await teardownTestDatabase(db);
});
it("rejects update with empty required field BEFORE creating a draft revision", async () => {
const result = await harness.client.callTool({
name: "content_update",
arguments: { collection: "post", id: postId, data: { title: "" } },
});
expect(result.isError).toBe(true);
expect(extractText(result)).toMatch(VALIDATION_ERROR);
// And no draft revision was written — listing revisions returns empty.
const list = await harness.client.callTool({
name: "revision_list",
arguments: { collection: "post", id: postId },
});
expect(list.isError, extractText(list)).toBeFalsy();
const { items } = JSON.parse(extractText(list)) as { items: unknown[] };
expect(items).toEqual([]);
});
});

View File

@@ -0,0 +1,115 @@
/**
* Integration tests for the configurable media upload size limit.
*
* Starts a server with maxUploadSize=1 MB and verifies that both
* upload paths (direct multipart and signed-URL) enforce the limit.
*/
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import { assertNodeVersion, createTestServer, type TestServerContext } from "../server.js";
const PORT = 4400;
const ONE_MB = 1024 * 1024;
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({
port: PORT,
seed: false,
env: { EMDASH_MAX_UPLOAD_SIZE: String(ONE_MB) },
});
}, 120_000);
afterAll(async () => {
await ctx?.cleanup();
});
describe("direct multipart upload", () => {
it("rejects a file that exceeds maxUploadSize with 413", async () => {
const bigFile = new File([new Uint8Array(2 * ONE_MB)], "big.pdf", {
type: "application/pdf",
});
const body = new FormData();
body.append("file", bigFile);
const res = await fetch(`${ctx.baseUrl}/_emdash/api/media`, {
method: "POST",
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
},
body,
});
expect(res.status).toBe(413);
const json = (await res.json()) as { error: { code: string } };
expect(json.error.code).toBe("PAYLOAD_TOO_LARGE");
});
it("accepts a file within maxUploadSize", async () => {
const smallFile = new File([new Uint8Array(512 * 1024)], "small.pdf", {
type: "application/pdf",
});
const body = new FormData();
body.append("file", smallFile);
const res = await fetch(`${ctx.baseUrl}/_emdash/api/media`, {
method: "POST",
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
},
body,
});
// 201 = created successfully
expect(res.status).toBe(201);
});
});
describe("signed-URL upload (upload-url endpoint)", () => {
it("rejects a declared size that exceeds maxUploadSize with 400", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/media/upload-url`, {
method: "POST",
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
"Content-Type": "application/json",
},
body: JSON.stringify({
filename: "big.pdf",
contentType: "application/pdf",
size: 2 * ONE_MB,
}),
});
expect(res.status).toBe(400);
const json = (await res.json()) as { error: { code: string } };
expect(json.error.code).toBe("VALIDATION_ERROR");
});
it("passes size validation for a declared size within maxUploadSize", async () => {
// Local storage does not support signed URLs, so a valid-size request
// proceeds past Zod validation and fails later with 501.
const res = await fetch(`${ctx.baseUrl}/_emdash/api/media/upload-url`, {
method: "POST",
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
"Content-Type": "application/json",
},
body: JSON.stringify({
filename: "ok.pdf",
contentType: "application/pdf",
size: 512 * 1024,
}),
});
// 501 means the request passed size validation and hit the storage layer.
// A size-rejection would produce 400.
expect(res.status).toBe(501);
});
});

View File

@@ -0,0 +1,193 @@
import SwaggerParser from "@apidevtools/swagger-parser";
import { describe, expect, it } from "vitest";
import { generateOpenApiDocument } from "../../../src/api/openapi/document.js";
describe("OpenAPI spec validation", () => {
it("produces a valid OpenAPI 3.1 document", async () => {
const doc = generateOpenApiDocument();
// swagger-parser.validate() resolves $refs and validates against the OAS JSON Schema.
// It throws if the document is invalid.
const validated = await SwaggerParser.validate(structuredClone(doc));
expect(validated.openapi).toBe("3.1.0");
expect(validated.info.title).toBe("EmDash CMS API");
});
it("resolves all $ref pointers without errors", async () => {
const doc = generateOpenApiDocument();
// dereference() resolves every $ref in the document tree.
// If any $ref points to a missing schema, it throws.
const dereferenced = await SwaggerParser.dereference(structuredClone(doc));
// After dereferencing, no $ref keys should remain.
// Use a replacer to handle circular references (e.g. PublicComment.replies)
const seen = new WeakSet();
const json = JSON.stringify(dereferenced, (_key, value) => {
if (typeof value === "object" && value !== null) {
if (seen.has(value)) return "[Circular]";
seen.add(value);
}
return value;
});
expect(json).not.toContain('"$ref"');
});
it("has all content paths with responses", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, unknown>; operationId?: string }
| undefined;
if (!op) continue;
// Every operation must have responses
expect(op.responses, `${method.toUpperCase()} ${path} missing responses`).toBeDefined();
// Every operation must have an operationId
expect(op.operationId, `${method.toUpperCase()} ${path} missing operationId`).toBeDefined();
// Every operation must have at least one success response (2xx)
const statusCodes = Object.keys(op.responses ?? {});
const has2xx = statusCodes.some((code) => code.startsWith("2"));
expect(has2xx, `${method.toUpperCase()} ${path} has no 2xx response`).toBe(true);
}
}
});
it("wraps all success responses in the { data } envelope", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, Record<string, unknown>> }
| undefined;
if (!op?.responses) continue;
for (const [statusCode, response] of Object.entries(op.responses)) {
if (!statusCode.startsWith("2")) continue;
const content = (response as Record<string, unknown>)?.content as
| Record<string, { schema?: Record<string, unknown> }>
| undefined;
if (!content?.["application/json"]) continue;
const schema = content["application/json"].schema;
expect(
schema,
`${method.toUpperCase()} ${path} ${statusCode} missing schema`,
).toBeDefined();
// The envelope must have a "data" property (either directly or via $ref that wraps it)
// Check for direct properties or allOf/oneOf patterns
const props = (schema as Record<string, unknown>)?.properties as
| Record<string, unknown>
| undefined;
if (props) {
expect(
props,
`${method.toUpperCase()} ${path} ${statusCode} envelope missing "data" property`,
).toHaveProperty("data");
}
}
}
}
});
it("includes auth error responses on authenticated endpoints", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
// Public endpoints that don't require authentication
const publicPaths = new Set(["/_emdash/api/comments/{collection}/{contentId}"]);
for (const [path, pathItem] of Object.entries(paths)) {
if (publicPaths.has(path)) continue;
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, unknown> }
| undefined;
if (!op?.responses) continue;
const statusCodes = Object.keys(op.responses);
expect(statusCodes, `${method.toUpperCase()} ${path} missing 401`).toContain("401");
expect(statusCodes, `${method.toUpperCase()} ${path} missing 403`).toContain("403");
}
}
});
it("has no duplicate operation IDs across all paths", () => {
const doc = generateOpenApiDocument();
const operationIds: string[] = [];
for (const pathItem of Object.values(doc.paths ?? {})) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { operationId?: string }
| undefined;
if (op?.operationId) {
operationIds.push(op.operationId);
}
}
}
const seen = new Set<string>();
for (const id of operationIds) {
expect(seen.has(id), `duplicate operationId: ${id}`).toBe(false);
seen.add(id);
}
});
it("registers referenced schemas as reusable components", async () => {
const doc = generateOpenApiDocument();
const schemas = doc.components?.schemas ?? {};
const schemaNames = Object.keys(schemas);
// Should have a reasonable number of reusable schemas
expect(schemaNames.length).toBeGreaterThanOrEqual(5);
// All registered schemas should be valid objects with type or properties
for (const [name, schema] of Object.entries(schemas)) {
expect(schema, `component schema "${name}" is not an object`).toBeTypeOf("object");
}
});
it("uses consistent error response shape across all error codes", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, Record<string, unknown>> }
| undefined;
if (!op?.responses) continue;
for (const [statusCode, response] of Object.entries(op.responses)) {
// Only check error responses (4xx, 5xx)
const code = Number(statusCode);
if (code < 400) continue;
const content = (response as Record<string, unknown>)?.content as
| Record<string, { schema?: Record<string, unknown> }>
| undefined;
if (!content?.["application/json"]) continue;
const schema = content["application/json"].schema;
expect(
schema,
`${method.toUpperCase()} ${path} ${statusCode} error missing schema`,
).toBeDefined();
}
}
}
});
});

View File

@@ -0,0 +1,961 @@
/**
* Capability Enforcement Integration Tests (v2)
*
* Tests the capability-based access gating in the v2 plugin context.
* v2 always enforces capabilities - there's no "trusted mode" bypass.
*
*/
import Database from "better-sqlite3";
import { Kysely, SqliteDialect, sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import { UserRepository } from "../../../src/database/repositories/user.js";
import type { Database as DbSchema } from "../../../src/database/types.js";
import {
PluginContextFactory,
createContentAccess,
createContentAccessWithWrite,
createHttpAccess,
createUnrestrictedHttpAccess,
createBlockedHttpAccess,
createLogAccess,
createStorageAccess,
createKVAccess,
createSiteInfo,
createUrlHelper,
createUserAccess,
} from "../../../src/plugins/context.js";
import type { ResolvedPlugin } from "../../../src/plugins/types.js";
// Test regex patterns
const NOT_ALLOWED_FETCH_REGEX = /not allowed to fetch from host/;
const NO_ALLOWED_FETCH_REGEX = /not allowed to fetch/;
const NO_NETWORK_FETCH_REGEX = /does not have the "network:request" capability/;
const SEO_NOT_ENABLED_REGEX = /does not have SEO enabled/;
/**
* Create a minimal resolved plugin for testing
*/
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
return {
id: "test-plugin",
version: "1.0.0",
capabilities: [],
allowedHosts: [],
storage: {},
admin: {
pages: [],
widgets: [],
fieldWidgets: {},
},
hooks: {},
routes: {},
settings: undefined,
...overrides,
};
}
describe("Capability Enforcement Integration (v2)", () => {
let db: Kysely<DbSchema>;
let sqliteDb: Database.Database;
beforeEach(async () => {
// Create in-memory SQLite database
sqliteDb = new Database(":memory:");
db = new Kysely<DbSchema>({
dialect: new SqliteDialect({
database: sqliteDb,
}),
});
// Run migrations
await runMigrations(db);
// Create test content table with actual field columns (not JSON data column)
// The ContentRepository expects real columns for each field
await sql`
CREATE TABLE IF NOT EXISTS ec_posts (
id TEXT PRIMARY KEY,
slug TEXT,
status TEXT DEFAULT 'draft',
author_id TEXT,
primary_byline_id TEXT,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
published_at TEXT,
deleted_at TEXT,
version INTEGER DEFAULT 1,
locale TEXT NOT NULL DEFAULT 'en',
translation_group TEXT,
title TEXT,
content TEXT,
UNIQUE(slug, locale)
)
`.execute(db);
// Insert test content with actual column values
await sql`
INSERT INTO ec_posts (id, slug, status, title, content, locale, translation_group)
VALUES
('post-1', 'hello-world', 'published', 'Hello World', 'Content 1', 'en', 'post-1'),
('post-2', 'second-post', 'draft', 'Second Post', 'Content 2', 'en', 'post-2')
`.execute(db);
});
afterEach(async () => {
await db.destroy();
sqliteDb.close();
});
describe("Content Access", () => {
describe("createContentAccess (read-only)", () => {
it("can read content by ID", async () => {
const access = createContentAccess(db);
const post = await access.get("posts", "post-1");
expect(post).not.toBeNull();
expect(post!.id).toBe("post-1");
expect(post!.data.title).toBe("Hello World");
});
it("can list content", async () => {
const access = createContentAccess(db);
const result = await access.list("posts");
expect(result.items).toHaveLength(2);
expect(result.hasMore).toBe(false);
});
it("narrows list results by where.status", async () => {
const access = createContentAccess(db);
const result = await access.list("posts", { where: { status: "published" } });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.id).toBe("post-1");
expect(result.items[0]!.status).toBe("published");
});
it("narrows list results by where.locale", async () => {
await sql`
INSERT INTO ec_posts (id, slug, status, title, content, locale, translation_group)
VALUES ('post-3', 'bonjour', 'published', 'Bonjour', 'Contenu', 'fr', 'post-3')
`.execute(db);
const access = createContentAccess(db);
const result = await access.list("posts", { where: { locale: "fr" } });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.id).toBe("post-3");
});
it("combines where.status and where.locale", async () => {
await sql`
INSERT INTO ec_posts (id, slug, status, title, content, locale, translation_group)
VALUES
('post-3', 'bonjour', 'published', 'Bonjour', 'Contenu', 'fr', 'post-3'),
('post-4', 'brouillon', 'draft', 'Brouillon', 'WIP', 'fr', 'post-4')
`.execute(db);
const access = createContentAccess(db);
const result = await access.list("posts", {
where: { status: "published", locale: "fr" },
});
expect(result.items).toHaveLength(1);
expect(result.items[0]!.id).toBe("post-3");
});
it("paginates consistently with where filters", async () => {
// Three more published posts so the total published count is 4.
// A limit of 2 should yield two pages: [2, 2] — never drafts.
await sql`
INSERT INTO ec_posts (id, slug, status, title, content, locale, translation_group)
VALUES
('post-3', 'a', 'published', 'A', 'a', 'en', 'post-3'),
('post-4', 'b', 'published', 'B', 'b', 'en', 'post-4'),
('post-5', 'c', 'published', 'C', 'c', 'en', 'post-5')
`.execute(db);
const access = createContentAccess(db);
const page1 = await access.list("posts", {
limit: 2,
where: { status: "published" },
});
expect(page1.items).toHaveLength(2);
expect(page1.hasMore).toBe(true);
for (const item of page1.items) expect(item.status).toBe("published");
const page2 = await access.list("posts", {
limit: 2,
cursor: page1.cursor,
where: { status: "published" },
});
expect(page2.items).toHaveLength(2);
expect(page2.hasMore).toBe(false);
for (const item of page2.items) expect(item.status).toBe("published");
// No overlap between pages
const ids = new Set([...page1.items, ...page2.items].map((i) => i.id));
expect(ids.size).toBe(4);
// Drafts never surface
expect(ids.has("post-2")).toBe(false);
});
it("returns null for non-existent content", async () => {
const access = createContentAccess(db);
const post = await access.get("posts", "non-existent");
expect(post).toBeNull();
});
});
describe("createContentAccessWithWrite", () => {
it("includes read methods", async () => {
const access = createContentAccessWithWrite(db);
expect(typeof access.get).toBe("function");
expect(typeof access.list).toBe("function");
});
it("includes write methods", async () => {
const access = createContentAccessWithWrite(db);
expect(typeof access.create).toBe("function");
expect(typeof access.update).toBe("function");
expect(typeof access.delete).toBe("function");
});
it("can create new content", async () => {
const access = createContentAccessWithWrite(db);
const created = await access.create("posts", {
title: "New Post",
content: "New content",
});
expect(created.id).toBeDefined();
expect(created.data.title).toBe("New Post");
// Verify it was created
const found = await access.get("posts", created.id);
expect(found).not.toBeNull();
});
});
describe("SEO panel integration", () => {
beforeEach(async () => {
// Register the "posts" collection with SEO enabled so the plugin
// content API routes `seo` writes to the core SEO panel.
await sql`
INSERT INTO _emdash_collections (slug, label, label_singular, has_seo)
VALUES ('posts', 'Posts', 'Post', 1)
`.execute(db);
});
it("returns seo defaults from get() for SEO-enabled collections", async () => {
const access = createContentAccess(db);
const post = await access.get("posts", "post-1");
expect(post).not.toBeNull();
expect(post!.seo).toEqual({
title: null,
description: null,
image: null,
canonical: null,
noIndex: false,
});
});
it("omits seo from get() for collections without SEO enabled", async () => {
// Reset has_seo on posts so it behaves like a non-SEO collection
await db
.updateTable("_emdash_collections")
.set({ has_seo: 0 })
.where("slug", "=", "posts")
.execute();
const access = createContentAccess(db);
const post = await access.get("posts", "post-1");
expect(post).not.toBeNull();
expect(post!.seo).toBeUndefined();
});
it("update() routes `seo` to the SEO panel instead of failing on missing column", async () => {
const access = createContentAccessWithWrite(db);
// Regression for #374: previously this threw
// "SQLite error: no such column: seo"
const updated = await access.update("posts", "post-1", {
seo: {
title: "Custom SEO Title",
description: "A better meta description",
canonical: "https://example.com/canonical",
noIndex: false,
},
});
expect(updated.seo).toEqual({
title: "Custom SEO Title",
description: "A better meta description",
image: null,
canonical: "https://example.com/canonical",
noIndex: false,
});
// Verify it persisted via a subsequent read
const fresh = await access.get("posts", "post-1");
expect(fresh!.seo?.title).toBe("Custom SEO Title");
expect(fresh!.seo?.description).toBe("A better meta description");
});
it("update() accepts field updates alongside seo in a single call", async () => {
const access = createContentAccessWithWrite(db);
const updated = await access.update("posts", "post-1", {
title: "Updated Title",
seo: {
title: "SEO Title",
description: "SEO Description",
},
});
expect(updated.data.title).toBe("Updated Title");
expect(updated.seo?.title).toBe("SEO Title");
expect(updated.seo?.description).toBe("SEO Description");
});
it("update() only overwrites explicitly-set seo fields (partial updates)", async () => {
const access = createContentAccessWithWrite(db);
await access.update("posts", "post-1", {
seo: { title: "Initial Title", description: "Initial Description" },
});
const updated = await access.update("posts", "post-1", {
seo: { title: "Updated Title" },
});
expect(updated.seo?.title).toBe("Updated Title");
// description must not be clobbered by a partial update
expect(updated.seo?.description).toBe("Initial Description");
});
it("create() routes `seo` to the SEO panel", async () => {
const access = createContentAccessWithWrite(db);
const created = await access.create("posts", {
title: "New Post",
content: "Body",
seo: {
title: "Brand New SEO",
description: "New Description",
},
});
expect(created.data.title).toBe("New Post");
expect(created.seo?.title).toBe("Brand New SEO");
expect(created.seo?.description).toBe("New Description");
const fresh = await access.get("posts", created.id);
expect(fresh!.seo?.title).toBe("Brand New SEO");
});
it("update() throws when seo is provided on a collection without SEO enabled", async () => {
// Disable SEO on posts
await db
.updateTable("_emdash_collections")
.set({ has_seo: 0 })
.where("slug", "=", "posts")
.execute();
const access = createContentAccessWithWrite(db);
await expect(
access.update("posts", "post-1", {
seo: { title: "Won't work" },
}),
).rejects.toThrow(SEO_NOT_ENABLED_REGEX);
});
it("list() hydrates seo for each item in SEO-enabled collections", async () => {
const access = createContentAccessWithWrite(db);
await access.update("posts", "post-1", {
seo: { title: "Post One SEO" },
});
await access.update("posts", "post-2", {
seo: { title: "Post Two SEO" },
});
const result = await access.list("posts");
expect(result.items).toHaveLength(2);
const byId = new Map(result.items.map((i) => [i.id, i]));
expect(byId.get("post-1")?.seo?.title).toBe("Post One SEO");
expect(byId.get("post-2")?.seo?.title).toBe("Post Two SEO");
});
});
});
describe("HTTP Access", () => {
describe("createHttpAccess (with host restrictions)", () => {
it("allows requests to allowed hosts", async () => {
const http = createHttpAccess("test-plugin", ["example.com"]);
// We can't actually make the request in tests, but we can verify
// the function doesn't throw for allowed hosts
expect(typeof http.fetch).toBe("function");
});
it("blocks requests to non-allowed hosts", async () => {
const http = createHttpAccess("test-plugin", ["example.com"]);
await expect(http.fetch("https://evil.com/api")).rejects.toThrow(NOT_ALLOWED_FETCH_REGEX);
});
it("supports wildcard host patterns", { timeout: 15000 }, async () => {
const http = createHttpAccess("test-plugin", ["*.example.com"]);
// Should not throw for subdomains
// (Can't test actual fetch, but verify pattern matching logic)
await expect(http.fetch("https://api.example.com/test")).rejects.not.toThrow(
NO_ALLOWED_FETCH_REGEX,
);
});
});
describe("createBlockedHttpAccess", () => {
it("always throws", async () => {
const http = createBlockedHttpAccess("no-network-plugin");
await expect(http.fetch("https://example.com")).rejects.toThrow(NO_NETWORK_FETCH_REGEX);
});
});
describe("createUnrestrictedHttpAccess", () => {
it("returns an HttpAccess with a fetch function", () => {
const http = createUnrestrictedHttpAccess("unrestricted-plugin");
expect(typeof http.fetch).toBe("function");
});
it("does not throw for any host", async () => {
const http = createUnrestrictedHttpAccess("unrestricted-plugin");
// Can't make a real request in tests, but verify it doesn't throw a
// host-validation error — it will throw a network error instead.
await expect(http.fetch("https://any-host-at-all.example.com/test")).rejects.not.toThrow(
NOT_ALLOWED_FETCH_REGEX,
);
});
});
});
describe("Storage Access", () => {
it("creates collection accessors from config", () => {
const storage = createStorageAccess(db, "test-plugin", {
events: { indexes: ["type"] },
cache: { indexes: ["key"] },
});
expect(storage.events).toBeDefined();
expect(storage.cache).toBeDefined();
});
it("provides full StorageCollection API", () => {
const storage = createStorageAccess(db, "test-plugin", {
items: { indexes: [] },
});
const collection = storage.items;
expect(typeof collection.get).toBe("function");
expect(typeof collection.put).toBe("function");
expect(typeof collection.delete).toBe("function");
expect(typeof collection.exists).toBe("function");
expect(typeof collection.getMany).toBe("function");
expect(typeof collection.putMany).toBe("function");
expect(typeof collection.deleteMany).toBe("function");
expect(typeof collection.query).toBe("function");
expect(typeof collection.count).toBe("function");
});
it("isolates storage between plugins", async () => {
const storage1 = createStorageAccess(db, "plugin-1", {
items: { indexes: [] },
});
const storage2 = createStorageAccess(db, "plugin-2", {
items: { indexes: [] },
});
await storage1.items.put("doc-1", { value: "from plugin 1" });
// Plugin 2 should not see plugin 1's data
const fromPlugin2 = await storage2.items.get("doc-1");
expect(fromPlugin2).toBeNull();
// Plugin 1 should still see its data
const fromPlugin1 = await storage1.items.get("doc-1");
expect(fromPlugin1).toEqual({ value: "from plugin 1" });
});
});
describe("KV Access", () => {
it("prefixes keys with plugin ID", async () => {
const optionsRepo = new OptionsRepository(db);
const kv = createKVAccess(optionsRepo, "test-plugin");
await kv.set("my-key", { foo: "bar" });
// Verify the key is prefixed in the database
const rawValue = await optionsRepo.get("plugin:test-plugin:my-key");
expect(rawValue).toEqual({ foo: "bar" });
});
it("isolates KV between plugins", async () => {
const optionsRepo = new OptionsRepository(db);
const kv1 = createKVAccess(optionsRepo, "plugin-1");
const kv2 = createKVAccess(optionsRepo, "plugin-2");
await kv1.set("shared-key", "value from 1");
await kv2.set("shared-key", "value from 2");
expect(await kv1.get("shared-key")).toBe("value from 1");
expect(await kv2.get("shared-key")).toBe("value from 2");
});
it("supports listing keys with prefix", async () => {
const optionsRepo = new OptionsRepository(db);
const kv = createKVAccess(optionsRepo, "test-plugin");
await kv.set("settings:theme", "dark");
await kv.set("settings:lang", "en");
await kv.set("cache:user-1", { name: "John" });
const settings = await kv.list("settings:");
expect(settings).toHaveLength(2);
expect(settings.map((s) => s.key).toSorted()).toEqual(["settings:lang", "settings:theme"]);
});
});
describe("Log Access", () => {
it("prefixes messages with plugin ID", () => {
const log = createLogAccess("test-plugin");
// These just verify the methods exist and don't throw
expect(() => log.debug("test message")).not.toThrow();
expect(() => log.info("test message", { extra: "data" })).not.toThrow();
expect(() => log.warn("test warning")).not.toThrow();
expect(() => log.error("test error")).not.toThrow();
});
});
describe("PluginContextFactory", () => {
it("creates context with capability-gated access", () => {
const factory = new PluginContextFactory({ db });
const readOnlyPlugin = createTestPlugin({
id: "reader",
capabilities: ["content:read"],
});
const ctx = factory.createContext(readOnlyPlugin);
// Content should be read-only (no create/update/delete)
expect(ctx.content).toBeDefined();
expect(typeof ctx.content!.get).toBe("function");
expect(typeof ctx.content!.list).toBe("function");
expect("create" in ctx.content!).toBe(false);
});
it("provides undefined content for plugins without capability", () => {
const factory = new PluginContextFactory({ db });
const noContentPlugin = createTestPlugin({
id: "no-content",
capabilities: ["network:request"],
});
const ctx = factory.createContext(noContentPlugin);
expect(ctx.content).toBeUndefined();
});
it("provides http for plugins with network:fetch", () => {
const factory = new PluginContextFactory({ db });
const networkPlugin = createTestPlugin({
id: "network",
capabilities: ["network:request"],
allowedHosts: ["api.example.com"],
});
const ctx = factory.createContext(networkPlugin);
expect(ctx.http).toBeDefined();
expect(typeof ctx.http!.fetch).toBe("function");
});
it("provides undefined http for plugins without capability", () => {
const factory = new PluginContextFactory({ db });
const noNetworkPlugin = createTestPlugin({
id: "no-network",
capabilities: [],
});
const ctx = factory.createContext(noNetworkPlugin);
expect(ctx.http).toBeUndefined();
});
it("provides unrestricted http for plugins with network:fetch:any", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "unrestricted-network",
capabilities: ["network:request:unrestricted", "network:request"],
});
const ctx = factory.createContext(plugin);
expect(ctx.http).toBeDefined();
expect(typeof ctx.http!.fetch).toBe("function");
});
it("prefers network:fetch:any over network:fetch when both present", async () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "both-fetch",
capabilities: ["network:request", "network:request:unrestricted"],
allowedHosts: ["restricted.example.com"],
});
const ctx = factory.createContext(plugin);
expect(ctx.http).toBeDefined();
// With network:fetch:any, arbitrary hosts should not throw a host validation error
await expect(ctx.http!.fetch("https://unrestricted.example.com/test")).rejects.not.toThrow(
NOT_ALLOWED_FETCH_REGEX,
);
});
it("always provides kv, storage, and log", () => {
const factory = new PluginContextFactory({ db });
const minimalPlugin = createTestPlugin({
id: "minimal",
capabilities: [],
storage: {
items: { indexes: [] },
},
});
const ctx = factory.createContext(minimalPlugin);
expect(ctx.kv).toBeDefined();
expect(ctx.storage).toBeDefined();
expect(ctx.storage.items).toBeDefined();
expect(ctx.log).toBeDefined();
});
it("provides write:content access with create/update/delete", () => {
const factory = new PluginContextFactory({ db });
const writePlugin = createTestPlugin({
id: "writer",
capabilities: ["content:write"],
});
const ctx = factory.createContext(writePlugin);
expect(ctx.content).toBeDefined();
expect("create" in ctx.content!).toBe(true);
expect("update" in ctx.content!).toBe(true);
expect("delete" in ctx.content!).toBe(true);
});
it("always provides site info", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({ id: "site-test", capabilities: [] });
const ctx = factory.createContext(plugin);
expect(ctx.site).toBeDefined();
expect(typeof ctx.site.name).toBe("string");
expect(typeof ctx.site.url).toBe("string");
expect(typeof ctx.site.locale).toBe("string");
});
it("always provides url() helper", () => {
const factory = new PluginContextFactory({
db,
siteInfo: { siteUrl: "https://example.com" },
});
const plugin = createTestPlugin({ id: "url-test", capabilities: [] });
const ctx = factory.createContext(plugin);
expect(typeof ctx.url).toBe("function");
expect(ctx.url("/posts")).toBe("https://example.com/posts");
});
it("provides users for plugins with read:users", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "user-reader",
capabilities: ["users:read"],
});
const ctx = factory.createContext(plugin);
expect(ctx.users).toBeDefined();
expect(typeof ctx.users!.get).toBe("function");
expect(typeof ctx.users!.getByEmail).toBe("function");
expect(typeof ctx.users!.list).toBe("function");
});
it("provides undefined users for plugins without read:users", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "no-users",
capabilities: [],
});
const ctx = factory.createContext(plugin);
expect(ctx.users).toBeUndefined();
});
});
describe("Site Info", () => {
it("creates site info with all options", () => {
const info = createSiteInfo({
siteName: "My Site",
siteUrl: "https://example.com/",
locale: "fr",
});
expect(info.name).toBe("My Site");
expect(info.url).toBe("https://example.com"); // trailing slash stripped
expect(info.locale).toBe("fr");
});
it("uses defaults for missing values", () => {
const info = createSiteInfo({});
expect(info.name).toBe("");
expect(info.url).toBe("");
expect(info.locale).toBe("en");
});
it("strips trailing slash from URL", () => {
const info = createSiteInfo({ siteUrl: "https://example.com/" });
expect(info.url).toBe("https://example.com");
});
});
describe("URL Helper", () => {
it("creates absolute URLs from paths", () => {
const url = createUrlHelper("https://example.com");
expect(url("/posts")).toBe("https://example.com/posts");
expect(url("/")).toBe("https://example.com/");
});
it("strips trailing slash from base URL", () => {
const url = createUrlHelper("https://example.com/");
expect(url("/posts")).toBe("https://example.com/posts");
});
it("throws for paths not starting with /", () => {
const url = createUrlHelper("https://example.com");
expect(() => url("posts")).toThrow('URL path must start with "/"');
});
it("works with empty base URL", () => {
const url = createUrlHelper("");
expect(url("/posts")).toBe("/posts");
});
it("rejects protocol-relative paths (//)", () => {
const url = createUrlHelper("https://example.com");
expect(() => url("//evil.com")).toThrow("protocol-relative");
});
it("rejects protocol-relative paths with empty base URL", () => {
const url = createUrlHelper("");
expect(() => url("//evil.com/path")).toThrow("protocol-relative");
});
});
describe("User Access", () => {
let userRepo: UserRepository;
beforeEach(async () => {
userRepo = new UserRepository(db);
// Create test users with all 5 role levels
await userRepo.create({ email: "admin@test.com", name: "Admin User", role: "admin" });
await userRepo.create({ email: "editor@test.com", name: "Editor User", role: "editor" });
await userRepo.create({ email: "author@test.com", name: "Author User", role: "author" });
await userRepo.create({
email: "contrib@test.com",
name: "Contributor User",
role: "contributor",
});
await userRepo.create({
email: "sub@test.com",
name: "Subscriber User",
role: "subscriber",
});
});
it("gets user by ID", async () => {
const user = await userRepo.findByEmail("admin@test.com");
const access = createUserAccess(db);
const result = await access.get(user!.id);
expect(result).not.toBeNull();
expect(result!.email).toBe("admin@test.com");
expect(result!.name).toBe("Admin User");
expect(result!.role).toBe(50); // admin = 50
});
it("gets user by email", async () => {
const access = createUserAccess(db);
const result = await access.getByEmail("editor@test.com");
expect(result).not.toBeNull();
expect(result!.email).toBe("editor@test.com");
expect(result!.role).toBe(40); // editor = 40
});
it("returns null for non-existent user", async () => {
const access = createUserAccess(db);
expect(await access.get("non-existent")).toBeNull();
expect(await access.getByEmail("nobody@test.com")).toBeNull();
});
it("lists users", async () => {
const access = createUserAccess(db);
const result = await access.list();
expect(result.items).toHaveLength(5);
// All users should have role as number
for (const user of result.items) {
expect(typeof user.role).toBe("number");
}
});
it("excludes sensitive fields", async () => {
const access = createUserAccess(db);
const result = await access.list();
for (const user of result.items) {
// UserInfo should only have: id, email, name, role, createdAt
const keys = Object.keys(user);
expect(keys).toContain("id");
expect(keys).toContain("email");
expect(keys).toContain("name");
expect(keys).toContain("role");
expect(keys).toContain("createdAt");
// Should NOT have sensitive fields
expect(keys).not.toContain("avatarUrl");
expect(keys).not.toContain("emailVerified");
expect(keys).not.toContain("data");
expect(keys).not.toContain("password_hash");
}
});
it("converts role strings to numeric levels", async () => {
const access = createUserAccess(db);
const admin = await access.getByEmail("admin@test.com");
const editor = await access.getByEmail("editor@test.com");
const subscriber = await access.getByEmail("sub@test.com");
expect(admin!.role).toBe(50);
expect(editor!.role).toBe(40);
expect(subscriber!.role).toBe(10);
});
it("respects limit on list", async () => {
const access = createUserAccess(db);
const result = await access.list({ limit: 2 });
expect(result.items).toHaveLength(2);
expect(result.nextCursor).toBeDefined();
});
it("clamps limit to maximum of 100", async () => {
const access = createUserAccess(db);
// Should not throw for large limits — just clamp
const result = await access.list({ limit: 500 });
expect(result.items).toHaveLength(5);
});
it("clamps negative limit to minimum of 1", async () => {
const access = createUserAccess(db);
// Negative limit should be clamped to 1, not passed through
const result = await access.list({ limit: -999 });
expect(result.items).toHaveLength(1);
});
it("preserves contributor (20) and author (30) roles", async () => {
// beforeEach creates users via UserRepository with all 5 roles.
// Verify that contributor (20) and author (30) survive the round-trip.
const access = createUserAccess(db);
const contributor = await access.getByEmail("contrib@test.com");
expect(contributor).not.toBeNull();
expect(contributor!.role).toBe(20);
const author = await access.getByEmail("author@test.com");
expect(author).not.toBeNull();
expect(author!.role).toBe(30);
});
it("filters users by exact role number", async () => {
// beforeEach creates one user per role level (10, 20, 30, 40, 50)
const access = createUserAccess(db);
const contributors = await access.list({ role: 20 });
expect(contributors.items).toHaveLength(1);
expect(contributors.items[0]!.email).toBe("contrib@test.com");
expect(contributors.items[0]!.role).toBe(20);
const authors = await access.list({ role: 30 });
expect(authors.items).toHaveLength(1);
expect(authors.items[0]!.email).toBe("author@test.com");
expect(authors.items[0]!.role).toBe(30);
const admins = await access.list({ role: 50 });
expect(admins.items).toHaveLength(1);
expect(admins.items[0]!.email).toBe("admin@test.com");
});
it("supports cursor-based pagination", async () => {
const access = createUserAccess(db);
const seen = new Set<string>();
// Page through all 5 users one at a time
let cursor: string | undefined;
let pageCount = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
const page = await access.list({ limit: 1, cursor });
if (page.items.length === 0) break;
expect(page.items).toHaveLength(1);
const userId = page.items[0]!.id;
expect(seen.has(userId)).toBe(false); // no duplicates
seen.add(userId);
pageCount++;
if (!page.nextCursor) break; // last page
cursor = page.nextCursor;
}
expect(seen.size).toBe(5);
expect(pageCount).toBe(5);
});
});
});

View File

@@ -0,0 +1,236 @@
/**
* Integration tests for field widget manifest pipeline.
*
* Tests that field widgets declared on collections flow through
* the manifest builder correctly, including the widget property
* and select options for select/multiSelect fields.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await db.destroy();
});
describe("field widget on schema fields", () => {
it("should store and retrieve widget property on a field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "theme_color",
label: "Theme Color",
type: "string",
widget: "color:picker",
});
const collection = await registry.getCollectionWithFields("posts");
expect(collection).toBeTruthy();
const colorField = collection!.fields.find((f) => f.slug === "theme_color");
expect(colorField).toBeTruthy();
expect(colorField!.widget).toBe("color:picker");
expect(colorField!.type).toBe("string");
});
it("should store and retrieve widget on a json field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "pricing",
label: "Pricing",
type: "json",
widget: "x402:pricing",
});
const collection = await registry.getCollectionWithFields("posts");
const pricingField = collection!.fields.find((f) => f.slug === "pricing");
expect(pricingField).toBeTruthy();
expect(pricingField!.widget).toBe("x402:pricing");
expect(pricingField!.type).toBe("json");
});
it("should return undefined widget when not set", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "title",
label: "Title",
type: "string",
});
const collection = await registry.getCollectionWithFields("posts");
const titleField = collection!.fields.find((f) => f.slug === "title");
expect(titleField).toBeTruthy();
expect(titleField!.widget).toBeUndefined();
});
it("should update widget on an existing field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "color",
label: "Color",
type: "string",
});
// Update to add widget
await registry.updateField("posts", "color", {
widget: "color:picker",
});
const collection = await registry.getCollectionWithFields("posts");
const colorField = collection!.fields.find((f) => f.slug === "color");
expect(colorField!.widget).toBe("color:picker");
});
it("should include select options from validation in manifest format", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "priority",
label: "Priority",
type: "select",
validation: {
options: ["low", "medium", "high"],
},
});
const collection = await registry.getCollectionWithFields("posts");
const priorityField = collection!.fields.find((f) => f.slug === "priority");
expect(priorityField).toBeTruthy();
expect(priorityField!.type).toBe("select");
expect(priorityField!.validation?.options).toEqual(["low", "medium", "high"]);
});
});
describe("field widget content CRUD", () => {
it("should save and retrieve content with a widget field value", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("posts", {
slug: "theme_color",
label: "Theme Color",
type: "string",
widget: "color:picker",
});
// Insert content with the widget field value
const { ulid } = await import("ulidx");
const id = ulid();
await db
.insertInto("ec_posts" as never)
.values({
id,
slug: "test-post",
status: "draft",
title: "Test Post",
theme_color: "#ff6600",
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
version: 1,
} as never)
.execute();
// Read it back
const row = await db
.selectFrom("ec_posts" as never)
.selectAll()
.where("id" as never, "=", id)
.executeTakeFirst();
expect(row).toBeTruthy();
expect((row as Record<string, unknown>).theme_color).toBe("#ff6600");
});
it("should save and retrieve json widget field value", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "pricing",
label: "Pricing",
type: "json",
widget: "x402:pricing",
});
const { ulid } = await import("ulidx");
const id = ulid();
const pricingValue = JSON.stringify({ enabled: true, price: "$0.10", gateMode: "bots" });
await db
.insertInto("ec_posts" as never)
.values({
id,
slug: "premium-post",
status: "draft",
pricing: pricingValue,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
version: 1,
} as never)
.execute();
const row = await db
.selectFrom("ec_posts" as never)
.selectAll()
.where("id" as never, "=", id)
.executeTakeFirst();
expect(row).toBeTruthy();
const pricing = JSON.parse((row as Record<string, unknown>).pricing as string);
expect(pricing.enabled).toBe(true);
expect(pricing.price).toBe("$0.10");
expect(pricing.gateMode).toBe("bots");
});
});

View File

@@ -0,0 +1,380 @@
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { PluginStorageRepository } from "../../../src/database/repositories/plugin-storage.js";
import type { Database } from "../../../src/database/types.js";
import {
createStorageIndexes,
removeOrphanedIndexes,
syncStorageIndexes,
removeAllPluginIndexes,
getPluginIndexStatus,
} from "../../../src/plugins/storage-indexes.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const UNIQUE_CONSTRAINT_PATTERN = /UNIQUE constraint failed/;
describe("Plugin Storage Indexes Integration", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("createStorageIndexes", () => {
it("should create single-field index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
expect(result.created).toContain("idx_plugin_my-plugin_events_eventType");
expect(result.errors).toHaveLength(0);
});
it("should create composite index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", [
["status", "createdAt"],
]);
expect(result.created).toContain("idx_plugin_my-plugin_events_status_createdAt");
expect(result.errors).toHaveLength(0);
});
it("should create multiple indexes", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
["status", "timestamp"],
]);
expect(result.created).toHaveLength(3);
expect(result.errors).toHaveLength(0);
});
it("should track indexes in _plugin_indexes table", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(2);
expect(indexes.map((i) => JSON.parse(i.fields))).toContainEqual(["eventType"]);
expect(indexes.map((i) => JSON.parse(i.fields))).toContainEqual(["userId"]);
});
it("should be idempotent", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
const result = await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
// Should still succeed
expect(result.errors).toHaveLength(0);
// Should not duplicate tracking records
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(1);
});
});
describe("removeOrphanedIndexes", () => {
it("should remove indexes no longer in declaration", async () => {
// Create initial indexes
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId", "status"]);
// Remove one
const result = await removeOrphanedIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
]);
expect(result.removed).toContain("idx_plugin_my-plugin_events_status");
expect(result.errors).toHaveLength(0);
});
it("should keep indexes that are still declared", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
const result = await removeOrphanedIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
]);
expect(result.removed).toHaveLength(0);
});
it("should update tracking table", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "status"]);
await removeOrphanedIndexes(db, "my-plugin", "events", ["eventType"]);
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(1);
expect(JSON.parse(indexes[0].fields)).toEqual(["eventType"]);
});
});
describe("syncStorageIndexes", () => {
it("should create new and remove old indexes in one call", async () => {
// Initial state
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "oldField"]);
// Sync to new state
const result = await syncStorageIndexes(db, "my-plugin", "events", ["eventType", "newField"]);
expect(result.created).toContain("idx_plugin_my-plugin_events_newField");
expect(result.removed).toContain("idx_plugin_my-plugin_events_oldField");
const status = await getPluginIndexStatus(db, "my-plugin");
const fields = status.map((s) => s.fields);
expect(fields).toContainEqual(["eventType"]);
expect(fields).toContainEqual(["newField"]);
expect(fields).not.toContainEqual(["oldField"]);
});
});
describe("removeAllPluginIndexes", () => {
it("should remove all indexes for a plugin", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
await createStorageIndexes(db, "my-plugin", "cache", ["key", "expiresAt"]);
const result = await removeAllPluginIndexes(db, "my-plugin");
expect(result.removed).toHaveLength(4);
expect(result.errors).toHaveLength(0);
const remaining = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(remaining).toHaveLength(0);
});
it("should not affect other plugins", async () => {
await createStorageIndexes(db, "plugin1", "events", ["eventType"]);
await createStorageIndexes(db, "plugin2", "events", ["eventType"]);
await removeAllPluginIndexes(db, "plugin1");
const plugin2Indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "plugin2")
.execute();
expect(plugin2Indexes).toHaveLength(1);
});
});
describe("getPluginIndexStatus", () => {
it("should return all indexes for a plugin", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", ["status", "timestamp"]]);
await createStorageIndexes(db, "my-plugin", "cache", ["key"]);
const status = await getPluginIndexStatus(db, "my-plugin");
expect(status).toHaveLength(3);
expect(status).toContainEqual(
expect.objectContaining({
collection: "events",
fields: ["eventType"],
}),
);
expect(status).toContainEqual(
expect.objectContaining({
collection: "events",
fields: ["status", "timestamp"],
}),
);
expect(status).toContainEqual(
expect.objectContaining({
collection: "cache",
fields: ["key"],
}),
);
});
it("should return empty array for plugin with no indexes", async () => {
const status = await getPluginIndexStatus(db, "nonexistent-plugin");
expect(status).toEqual([]);
});
});
describe("query performance with indexes", () => {
it("should efficiently query using indexed fields", async () => {
const pluginId = "perf-test";
const collection = "events";
// Create index first
await createStorageIndexes(db, pluginId, collection, ["eventType"]);
// Create repository with the indexed field
const repo = new PluginStorageRepository<{ eventType: string }>(db, pluginId, collection, [
"eventType",
]);
// Insert test data
const items = Array.from({ length: 100 }, (_, i) => ({
id: `event-${i}`,
data: { eventType: i % 2 === 0 ? "pageview" : "click" },
}));
await repo.putMany(items);
// Query should work and use the index
const result = await repo.query({
where: { eventType: "pageview" },
});
expect(result.items).toHaveLength(50);
expect(result.items.every((i) => i.data.eventType === "pageview")).toBe(true);
});
});
describe("index verification", () => {
it("should create actual SQLite index", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
// Query SQLite's index list
const indexes = await sql<{ name: string }>`
SELECT name FROM sqlite_master
WHERE type = 'index'
AND name LIKE 'idx_plugin_%'
`.execute(db);
expect(indexes.rows.map((r) => r.name)).toContain("idx_plugin_my-plugin_events_eventType");
});
it("should drop actual SQLite index on removal", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
await removeAllPluginIndexes(db, "my-plugin");
const indexes = await sql<{ name: string }>`
SELECT name FROM sqlite_master
WHERE type = 'index'
AND name LIKE 'idx_plugin_my-plugin_%'
`.execute(db);
expect(indexes.rows).toHaveLength(0);
});
});
describe("unique indexes", () => {
it("should create a unique index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
expect(result.created).toContain("uidx_plugin_my-plugin_forms_slug");
expect(result.errors).toHaveLength(0);
// Verify it's actually a UNIQUE index in SQLite
const indexSql = await sql<{ sql: string }>`
SELECT sql FROM sqlite_master
WHERE type = 'index'
AND name = 'uidx_plugin_my-plugin_forms_slug'
`.execute(db);
expect(indexSql.rows).toHaveLength(1);
expect(indexSql.rows[0].sql).toContain("UNIQUE");
});
it("should enforce uniqueness on insert", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; name: string }>(
db,
"my-plugin",
"forms",
["slug"],
);
await repo.put("form-1", { slug: "contact", name: "Contact" });
// Second insert with a different ID but same slug should fail
await expect(repo.put("form-2", { slug: "contact", name: "Contact Copy" })).rejects.toThrow(
UNIQUE_CONSTRAINT_PATTERN,
);
});
it("should allow updating the same document", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; name: string }>(
db,
"my-plugin",
"forms",
["slug"],
);
await repo.put("form-1", { slug: "contact", name: "Contact" });
// Updating the same ID should succeed (upsert)
await repo.put("form-1", { slug: "contact", name: "Contact Updated" });
const result = await repo.get("form-1");
expect(result?.name).toBe("Contact Updated");
});
it("should allow different slugs across different collections", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
await createStorageIndexes(db, "my-plugin", "templates", [], {
uniqueIndexes: ["slug"],
});
const formsRepo = new PluginStorageRepository<{ slug: string }>(db, "my-plugin", "forms", [
"slug",
]);
const templatesRepo = new PluginStorageRepository<{ slug: string }>(
db,
"my-plugin",
"templates",
["slug"],
);
// Same slug in different collections should work (partial index scoped by collection)
await formsRepo.put("form-1", { slug: "contact" });
await templatesRepo.put("tmpl-1", { slug: "contact" });
expect(await formsRepo.get("form-1")).toEqual({ slug: "contact" });
expect(await templatesRepo.get("tmpl-1")).toEqual({ slug: "contact" });
});
it("should include unique index fields in queryable fields", async () => {
await createStorageIndexes(db, "my-plugin", "forms", ["status"], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; status: string }>(
db,
"my-plugin",
"forms",
["status", "slug"],
);
await repo.put("form-1", { slug: "contact", status: "active" });
await repo.put("form-2", { slug: "feedback", status: "active" });
// Query by unique field should work
const result = await repo.query({ where: { slug: "contact" } });
expect(result.items).toHaveLength(1);
expect(result.items[0].data.slug).toBe("contact");
});
});
});

View File

@@ -0,0 +1,293 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
PluginStorageRepository,
createPluginStorageAccessor,
deleteAllPluginStorage,
deletePluginCollection,
} from "../../../src/database/repositories/plugin-storage.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
interface AnalyticsEvent {
eventType: string;
userId: string;
timestamp: string;
metadata: Record<string, unknown>;
}
describe("Plugin Storage Integration", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("full storage flow", () => {
it("should support complete CRUD cycle", async () => {
const repo = new PluginStorageRepository<AnalyticsEvent>(db, "analytics-plugin", "events", [
"eventType",
"userId",
"timestamp",
]);
// Create
const event: AnalyticsEvent = {
eventType: "pageview",
userId: "user123",
timestamp: new Date().toISOString(),
metadata: { page: "/home", referrer: "google.com" },
};
await repo.put("event1", event);
// Read
const fetched = await repo.get("event1");
expect(fetched).toEqual(event);
// Update
const updatedEvent = {
...event,
metadata: { ...event.metadata, duration: 5000 },
};
await repo.put("event1", updatedEvent);
const refetched = await repo.get("event1");
expect(refetched?.metadata).toHaveProperty("duration", 5000);
// Delete
const deleted = await repo.delete("event1");
expect(deleted).toBe(true);
expect(await repo.get("event1")).toBeNull();
});
it("should support complex queries with JSON extraction", async () => {
const repo = new PluginStorageRepository<AnalyticsEvent>(db, "analytics-plugin", "events", [
"eventType",
"userId",
"timestamp",
]);
// Create events
await repo.putMany([
{
id: "e1",
data: {
eventType: "pageview",
userId: "user1",
timestamp: "2024-01-01T10:00:00Z",
metadata: {},
},
},
{
id: "e2",
data: {
eventType: "click",
userId: "user1",
timestamp: "2024-01-01T10:05:00Z",
metadata: {},
},
},
{
id: "e3",
data: {
eventType: "pageview",
userId: "user2",
timestamp: "2024-01-01T11:00:00Z",
metadata: {},
},
},
]);
// Query by eventType
const pageviews = await repo.query({ where: { eventType: "pageview" } });
expect(pageviews.items).toHaveLength(2);
// Query by userId
const user1Events = await repo.query({ where: { userId: "user1" } });
expect(user1Events.items).toHaveLength(2);
// Combined query
const user1Pageviews = await repo.query({
where: { eventType: "pageview", userId: "user1" },
});
expect(user1Pageviews.items).toHaveLength(1);
});
});
describe("createPluginStorageAccessor", () => {
it("should create accessor with multiple collections", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType", "timestamp"] },
cache: { indexes: ["key", "expiresAt"] },
});
expect(accessor).toHaveProperty("events");
expect(accessor).toHaveProperty("cache");
// Use events collection
await accessor.events.put("e1", {
eventType: "test",
timestamp: new Date().toISOString(),
});
const event = await accessor.events.get("e1");
expect(event).toBeDefined();
// Use cache collection
await accessor.cache.put("c1", {
key: "test-key",
value: "test-value",
expiresAt: new Date().toISOString(),
});
const cached = await accessor.cache.get("c1");
expect(cached).toBeDefined();
});
it("should isolate collections from each other", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
await accessor.events.put("item1", { eventType: "test" });
await accessor.cache.put("item1", { key: "test" });
// Both should exist independently
expect(await accessor.events.get("item1")).toEqual({ eventType: "test" });
expect(await accessor.cache.get("item1")).toEqual({ key: "test" });
// Count should be separate
expect(
await (accessor.events as PluginStorageRepository<any>).count({
eventType: "test",
}),
).toBe(1);
expect(
await (accessor.cache as PluginStorageRepository<any>).count({
key: "test",
}),
).toBe(1);
});
});
describe("deleteAllPluginStorage", () => {
it("should delete all data for a plugin", async () => {
const accessor = createPluginStorageAccessor(db, "cleanup-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
// Add data
await accessor.events.put("e1", { eventType: "test" });
await accessor.events.put("e2", { eventType: "test2" });
await accessor.cache.put("c1", { key: "test" });
// Delete all
const deleted = await deleteAllPluginStorage(db, "cleanup-plugin");
expect(deleted).toBe(3);
// Verify empty
expect(await accessor.events.get("e1")).toBeNull();
expect(await accessor.events.get("e2")).toBeNull();
expect(await accessor.cache.get("c1")).toBeNull();
});
it("should not affect other plugins", async () => {
const plugin1 = createPluginStorageAccessor(db, "plugin1", {
data: { indexes: ["key"] },
});
const plugin2 = createPluginStorageAccessor(db, "plugin2", {
data: { indexes: ["key"] },
});
await plugin1.data.put("item1", { key: "test" });
await plugin2.data.put("item1", { key: "test" });
await deleteAllPluginStorage(db, "plugin1");
expect(await plugin1.data.get("item1")).toBeNull();
expect(await plugin2.data.get("item1")).toEqual({ key: "test" });
});
});
describe("deletePluginCollection", () => {
it("should delete specific collection", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
await accessor.events.put("e1", { eventType: "test" });
await accessor.cache.put("c1", { key: "test" });
await deletePluginCollection(db, "my-plugin", "events");
expect(await accessor.events.get("e1")).toBeNull();
expect(await accessor.cache.get("c1")).toEqual({ key: "test" });
});
});
describe("pagination", () => {
it("should paginate through large datasets", async () => {
const repo = new PluginStorageRepository<{ index: number }>(
db,
"pagination-test",
"items",
[],
);
// Create 25 items
const items = Array.from({ length: 25 }, (_, i) => ({
id: `item-${String(i).padStart(3, "0")}`,
data: { index: i },
}));
await repo.putMany(items);
// Paginate with limit of 10
const pages: Array<Array<{ id: string; data: { index: number } }>> = [];
let cursor: string | undefined;
do {
const result = await repo.query({ limit: 10, cursor });
pages.push(result.items);
cursor = result.cursor;
} while (cursor);
expect(pages).toHaveLength(3);
expect(pages[0]).toHaveLength(10);
expect(pages[1]).toHaveLength(10);
expect(pages[2]).toHaveLength(5);
// Verify all items were retrieved
const allItems = pages.flat();
expect(allItems).toHaveLength(25);
expect(new Set(allItems.map((i) => i.id)).size).toBe(25);
});
});
describe("concurrent operations", () => {
it("should handle concurrent puts", async () => {
const repo = new PluginStorageRepository<{ value: number }>(
db,
"concurrent-test",
"items",
[],
);
// Concurrent puts
await Promise.all([
repo.put("item1", { value: 1 }),
repo.put("item2", { value: 2 }),
repo.put("item3", { value: 3 }),
repo.put("item4", { value: 4 }),
repo.put("item5", { value: 5 }),
]);
const count = await repo.count();
expect(count).toBe(5);
});
});
});

View File

@@ -0,0 +1,210 @@
/**
* Regression tests for the unbounded 404 logging DoS.
*
* `log404` was previously an unconditional INSERT for every 404 — an
* unauthenticated attacker could fill the database by hitting unique URLs.
*
* The hardened version:
* - Dedups by path: existing rows are bumped (`hits++`, `last_seen_at` refreshed)
* instead of inserting new rows.
* - Caps the table at MAX_404_LOG_ROWS rows; oldest entries (by `last_seen_at`)
* are evicted to make room for new paths.
* - Truncates referrer / user_agent to bounded lengths so a malicious client
* can't blow up storage by sending huge headers.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
MAX_404_LOG_ROWS,
REFERRER_MAX_LENGTH,
RedirectRepository,
USER_AGENT_MAX_LENGTH,
} from "../../../src/database/repositories/redirect.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
/**
* Seed `_emdash_404_log` directly to MAX_404_LOG_ROWS, batching to stay
* under SQLite's per-statement bind-parameter limit (~32k by default).
*
* Rows are staggered in `last_seen_at` so `seed-000000` is the oldest.
*/
async function seedToCapacity(db: Kysely<Database>): Promise<void> {
const now = Date.now();
const batchSize = 500;
for (let start = 0; start < MAX_404_LOG_ROWS; start += batchSize) {
const end = Math.min(start + batchSize, MAX_404_LOG_ROWS);
const rows = [];
for (let i = start; i < end; i++) {
const ts = new Date(now - (MAX_404_LOG_ROWS - i) * 1000).toISOString();
rows.push({
id: `seed-${i.toString().padStart(6, "0")}`,
path: `/seed-${i}`,
referrer: null,
user_agent: null,
ip: null,
hits: 1,
last_seen_at: ts,
created_at: ts,
});
}
await db.insertInto("_emdash_404_log").values(rows).execute();
}
}
describe("RedirectRepository.log404 — bounded logging", () => {
let db: Kysely<Database>;
let repo: RedirectRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new RedirectRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("dedups repeat hits by path instead of inserting new rows", async () => {
await repo.log404({ path: "/missing" });
await repo.log404({ path: "/missing" });
await repo.log404({ path: "/missing" });
const rows = await db
.selectFrom("_emdash_404_log")
.selectAll()
.where("path", "=", "/missing")
.execute();
expect(rows).toHaveLength(1);
expect(rows[0]!.hits).toBe(3);
expect(rows[0]!.last_seen_at).toBeTruthy();
});
it("truncates oversize referrer and user_agent on insert", async () => {
const bigReferrer = "https://evil.example.com/" + "a".repeat(10_000);
const bigUserAgent = "Mozilla/5.0 " + "b".repeat(10_000);
await repo.log404({
path: "/missing",
referrer: bigReferrer,
userAgent: bigUserAgent,
});
const row = await db
.selectFrom("_emdash_404_log")
.selectAll()
.where("path", "=", "/missing")
.executeTakeFirstOrThrow();
expect(row.referrer?.length).toBeLessThanOrEqual(REFERRER_MAX_LENGTH);
expect(row.user_agent?.length).toBeLessThanOrEqual(USER_AGENT_MAX_LENGTH);
// Confirm the truncation actually happened (sanity check on the constants).
expect(row.referrer!.length).toBe(REFERRER_MAX_LENGTH);
expect(row.user_agent!.length).toBe(USER_AGENT_MAX_LENGTH);
});
it("preserves null referrer / user_agent without coercing to empty string", async () => {
await repo.log404({ path: "/missing", referrer: null, userAgent: null });
const row = await db
.selectFrom("_emdash_404_log")
.selectAll()
.where("path", "=", "/missing")
.executeTakeFirstOrThrow();
expect(row.referrer).toBeNull();
expect(row.user_agent).toBeNull();
});
it("evicts the oldest entry when the table is at capacity", async () => {
// Stuffing the table to MAX_404_LOG_ROWS via the public API would be
// slow, so seed it directly. Batch the inserts to stay under SQLite's
// per-statement parameter limit.
await seedToCapacity(db);
// Sanity: at capacity.
const before = await db
.selectFrom("_emdash_404_log")
.select((eb) => eb.fn.countAll<number>().as("c"))
.executeTakeFirstOrThrow();
expect(Number(before.c)).toBe(MAX_404_LOG_ROWS);
// New unique path triggers eviction.
await repo.log404({ path: "/brand-new" });
const after = await db
.selectFrom("_emdash_404_log")
.select((eb) => eb.fn.countAll<number>().as("c"))
.executeTakeFirstOrThrow();
expect(Number(after.c)).toBe(MAX_404_LOG_ROWS);
// The oldest seed row is gone.
const oldest = await db
.selectFrom("_emdash_404_log")
.select("id")
.where("id", "=", "seed-000000")
.executeTakeFirst();
expect(oldest).toBeUndefined();
// The new path is present.
const fresh = await db
.selectFrom("_emdash_404_log")
.select("path")
.where("path", "=", "/brand-new")
.executeTakeFirst();
expect(fresh?.path).toBe("/brand-new");
});
it("does not evict when an existing path is hit again, even at capacity", async () => {
await seedToCapacity(db);
// Hit an existing path — should bump hits, not evict.
await repo.log404({ path: "/seed-500" });
const oldest = await db
.selectFrom("_emdash_404_log")
.select("id")
.where("id", "=", "seed-000000")
.executeTakeFirst();
expect(oldest?.id).toBe("seed-000000");
const bumped = await db
.selectFrom("_emdash_404_log")
.select(["hits"])
.where("path", "=", "/seed-500")
.executeTakeFirstOrThrow();
expect(bumped.hits).toBe(2);
});
it("handles concurrent inserts for the same new path atomically", async () => {
// Regression: `log404` used to be SELECT-then-INSERT/UPDATE, which
// races under concurrency — both callers could miss the SELECT and
// the second INSERT would fail with a uniqueness violation once a
// UNIQUE index on `path` was added. The fix uses a single atomic
// upsert (ON CONFLICT DO UPDATE).
//
// better-sqlite3 is synchronous, so Promise.all doesn't produce real
// parallelism; the test instead sends a batch of concurrent upserts
// and asserts the end state: exactly one row, with the full count
// reflected in `hits`. Any lost updates or uniqueness errors would
// cause this to fail.
const concurrency = 10;
const pending: Array<Promise<void>> = [];
for (let i = 0; i < concurrency; i++) {
pending.push(repo.log404({ path: "/race" }));
}
await Promise.all(pending);
const rows = await db
.selectFrom("_emdash_404_log")
.selectAll()
.where("path", "=", "/race")
.execute();
expect(rows).toHaveLength(1);
expect(rows[0]!.hits).toBe(concurrency);
});
});

View File

@@ -0,0 +1,137 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleRedirectCreate,
handleRedirectUpdate,
handleRedirectList,
} from "../../../src/api/handlers/redirects.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("redirect handlers — loop detection", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("handleRedirectCreate", () => {
it("rejects a redirect that would create a direct 2-node loop", async () => {
await handleRedirectCreate(db, { source: "/a", destination: "/b" });
const result = await handleRedirectCreate(db, {
source: "/b",
destination: "/a",
});
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.code).toBe("VALIDATION_ERROR");
expect(result.error.message).toContain("loop");
expect(result.error.message).toContain("/a");
expect(result.error.message).toContain("/b");
}
});
it("rejects a redirect that would create a 3-node loop", async () => {
await handleRedirectCreate(db, { source: "/one", destination: "/two" });
await handleRedirectCreate(db, { source: "/two", destination: "/three" });
const result = await handleRedirectCreate(db, {
source: "/three",
destination: "/one",
});
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.code).toBe("VALIDATION_ERROR");
expect(result.error.message).toContain("loop");
}
});
it("allows a redirect that does not create a loop", async () => {
await handleRedirectCreate(db, { source: "/a", destination: "/b" });
const result = await handleRedirectCreate(db, {
source: "/c",
destination: "/d",
});
expect(result.success).toBe(true);
});
it("allows a redirect that extends a chain without looping", async () => {
await handleRedirectCreate(db, { source: "/a", destination: "/b" });
const result = await handleRedirectCreate(db, {
source: "/b",
destination: "/c",
});
expect(result.success).toBe(true);
});
});
describe("handleRedirectUpdate", () => {
it("rejects an update that would create a loop", async () => {
const r1 = await handleRedirectCreate(db, {
source: "/a",
destination: "/b",
});
await handleRedirectCreate(db, { source: "/b", destination: "/c" });
if (!r1.success) throw new Error("setup failed");
const result = await handleRedirectUpdate(db, r1.data.id, {
destination: "/c",
});
// /a → /c, /b → /c — no loop (both point to /c)
// Actually this is fine, let me create a real loop scenario
expect(result.success).toBe(true);
});
it("rejects an update that creates a cycle", async () => {
await handleRedirectCreate(db, {
source: "/a",
destination: "/b",
});
await handleRedirectCreate(db, { source: "/b", destination: "/c" });
const r3 = await handleRedirectCreate(db, {
source: "/c",
destination: "/d",
});
if (!r3.success) throw new Error("setup failed");
// Update /c → /d to /c → /a, creating /a → /b → /c → /a
const result = await handleRedirectUpdate(db, r3.data.id, {
destination: "/a",
});
expect(result.success).toBe(false);
if (!result.success) {
expect(result.error.code).toBe("VALIDATION_ERROR");
expect(result.error.message).toContain("loop");
}
});
});
describe("handleRedirectList", () => {
it("does not include loopRedirectIds when no loops exist", async () => {
await handleRedirectCreate(db, { source: "/a", destination: "/b" });
const result = await handleRedirectList(db, {});
expect(result.success).toBe(true);
if (result.success) {
expect(result.data.loopRedirectIds).toBeUndefined();
}
});
});
});

View File

@@ -0,0 +1,537 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { RedirectRepository } from "../../../src/database/repositories/redirect.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("RedirectRepository", () => {
let db: Kysely<Database>;
let repo: RedirectRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new RedirectRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// --- CRUD ---------------------------------------------------------------
describe("create", () => {
it("creates a redirect with defaults", async () => {
const redirect = await repo.create({
source: "/old",
destination: "/new",
});
expect(redirect.source).toBe("/old");
expect(redirect.destination).toBe("/new");
expect(redirect.type).toBe(301);
expect(redirect.isPattern).toBe(false);
expect(redirect.enabled).toBe(true);
expect(redirect.hits).toBe(0);
expect(redirect.lastHitAt).toBeNull();
expect(redirect.auto).toBe(false);
expect(redirect.id).toBeTruthy();
});
it("creates a redirect with custom values", async () => {
const redirect = await repo.create({
source: "/temp",
destination: "/target",
type: 302,
enabled: false,
groupName: "Temporary",
auto: true,
});
expect(redirect.type).toBe(302);
expect(redirect.enabled).toBe(false);
expect(redirect.groupName).toBe("Temporary");
expect(redirect.auto).toBe(true);
});
it("auto-detects pattern sources", async () => {
const redirect = await repo.create({
source: "/old-blog/[...path]",
destination: "/blog/[...path]",
});
expect(redirect.isPattern).toBe(true);
});
it("respects explicit isPattern=false override", async () => {
const redirect = await repo.create({
source: "/literal-with-brackets",
destination: "/target",
isPattern: false,
});
expect(redirect.isPattern).toBe(false);
});
});
describe("findById", () => {
it("returns null for non-existent id", async () => {
expect(await repo.findById("nonexistent")).toBeNull();
});
it("finds a redirect by id", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
const found = await repo.findById(created.id);
expect(found?.source).toBe("/a");
});
});
describe("findBySource", () => {
it("returns null for non-existent source", async () => {
expect(await repo.findBySource("/nope")).toBeNull();
});
it("finds a redirect by source", async () => {
await repo.create({ source: "/old", destination: "/new" });
const found = await repo.findBySource("/old");
expect(found?.destination).toBe("/new");
});
});
describe("update", () => {
it("returns null for non-existent id", async () => {
expect(await repo.update("nonexistent", { destination: "/x" })).toBeNull();
});
it("updates destination", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
const updated = await repo.update(created.id, { destination: "/c" });
expect(updated?.destination).toBe("/c");
});
it("updates type and enabled", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
type: 301,
});
const updated = await repo.update(created.id, {
type: 302,
enabled: false,
});
expect(updated?.type).toBe(302);
expect(updated?.enabled).toBe(false);
});
it("auto-detects isPattern when source changes", async () => {
const created = await repo.create({
source: "/literal",
destination: "/target",
});
expect(created.isPattern).toBe(false);
const updated = await repo.update(created.id, {
source: "/[slug]",
});
expect(updated?.isPattern).toBe(true);
});
});
describe("delete", () => {
it("returns false for non-existent id", async () => {
expect(await repo.delete("nonexistent")).toBe(false);
});
it("deletes and returns true", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
expect(await repo.delete(created.id)).toBe(true);
expect(await repo.findById(created.id)).toBeNull();
});
});
describe("findMany", () => {
it("returns empty list when no redirects", async () => {
const result = await repo.findMany({});
expect(result.items).toEqual([]);
expect(result.nextCursor).toBeUndefined();
});
it("returns all redirects", async () => {
await repo.create({ source: "/a", destination: "/b" });
await repo.create({ source: "/c", destination: "/d" });
const result = await repo.findMany({});
expect(result.items).toHaveLength(2);
});
it("paginates with cursor", async () => {
for (let i = 0; i < 5; i++) {
await repo.create({ source: `/s${i}`, destination: `/d${i}` });
}
const page1 = await repo.findMany({ limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findMany({ limit: 2, cursor: page1.nextCursor });
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeTruthy();
// Ensure no overlap
const page1Ids = new Set(page1.items.map((r) => r.id));
for (const item of page2.items) {
expect(page1Ids.has(item.id)).toBe(false);
}
});
it("filters by search term", async () => {
await repo.create({ source: "/blog/hello", destination: "/new/hello" });
await repo.create({ source: "/about", destination: "/info" });
const result = await repo.findMany({ search: "blog" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.source).toBe("/blog/hello");
});
it("filters by enabled status", async () => {
await repo.create({ source: "/a", destination: "/b", enabled: true });
await repo.create({ source: "/c", destination: "/d", enabled: false });
const enabled = await repo.findMany({ enabled: true });
expect(enabled.items).toHaveLength(1);
expect(enabled.items[0]!.source).toBe("/a");
const disabled = await repo.findMany({ enabled: false });
expect(disabled.items).toHaveLength(1);
expect(disabled.items[0]!.source).toBe("/c");
});
it("filters by group", async () => {
await repo.create({
source: "/a",
destination: "/b",
groupName: "wp-import",
});
await repo.create({ source: "/c", destination: "/d" });
const result = await repo.findMany({ group: "wp-import" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.groupName).toBe("wp-import");
});
it("filters by auto flag", async () => {
await repo.create({ source: "/a", destination: "/b", auto: true });
await repo.create({ source: "/c", destination: "/d", auto: false });
const autoOnly = await repo.findMany({ auto: true });
expect(autoOnly.items).toHaveLength(1);
expect(autoOnly.items[0]!.auto).toBe(true);
});
it("clamps limit to 1-100", async () => {
for (let i = 0; i < 3; i++) {
await repo.create({ source: `/s${i}`, destination: `/d${i}` });
}
// limit=0 should clamp to 1
const min = await repo.findMany({ limit: 0 });
expect(min.items.length).toBeLessThanOrEqual(1);
// limit=200 should clamp to 100
const max = await repo.findMany({ limit: 200 });
expect(max.items).toHaveLength(3); // only 3 exist
});
});
// --- findAllEnabled -----------------------------------------------------
describe("findAllEnabled", () => {
it("returns only enabled redirects", async () => {
await repo.create({ source: "/a", destination: "/b", enabled: true });
await repo.create({ source: "/c", destination: "/d", enabled: false });
await repo.create({ source: "/e", destination: "/f", enabled: true });
const result = await repo.findAllEnabled();
expect(result).toHaveLength(2);
expect(result.every((r) => r.enabled)).toBe(true);
});
it("returns empty array when no enabled redirects", async () => {
await repo.create({ source: "/a", destination: "/b", enabled: false });
const result = await repo.findAllEnabled();
expect(result).toHaveLength(0);
});
});
// --- Matching -----------------------------------------------------------
describe("matchPath", () => {
it("returns null when no redirects exist", async () => {
expect(await repo.matchPath("/anything")).toBeNull();
});
it("matches exact paths", async () => {
await repo.create({ source: "/old", destination: "/new" });
const match = await repo.matchPath("/old");
expect(match).not.toBeNull();
expect(match!.resolvedDestination).toBe("/new");
});
it("does not match disabled redirects", async () => {
await repo.create({
source: "/old",
destination: "/new",
enabled: false,
});
expect(await repo.matchPath("/old")).toBeNull();
});
it("matches pattern redirects", async () => {
await repo.create({
source: "/old-blog/[...path]",
destination: "/blog/[...path]",
});
const match = await repo.matchPath("/old-blog/2024/01/post");
expect(match).not.toBeNull();
expect(match!.resolvedDestination).toBe("/blog/2024/01/post");
});
it("prefers exact match over pattern match", async () => {
await repo.create({
source: "/blog/[slug]",
destination: "/articles/[slug]",
});
await repo.create({
source: "/blog/special",
destination: "/special-page",
});
const match = await repo.matchPath("/blog/special");
expect(match!.resolvedDestination).toBe("/special-page");
});
it("matches [param] in single segment", async () => {
await repo.create({
source: "/category/[slug]",
destination: "/tags/[slug]",
});
const match = await repo.matchPath("/category/typescript");
expect(match!.resolvedDestination).toBe("/tags/typescript");
// Should not match multi-segment
expect(await repo.matchPath("/category/a/b")).toBeNull();
});
});
// --- Hit tracking -------------------------------------------------------
describe("recordHit", () => {
it("increments hit count and updates lastHitAt", async () => {
const redirect = await repo.create({
source: "/a",
destination: "/b",
});
expect(redirect.hits).toBe(0);
expect(redirect.lastHitAt).toBeNull();
await repo.recordHit(redirect.id);
const updated = await repo.findById(redirect.id);
expect(updated!.hits).toBe(1);
expect(updated!.lastHitAt).toBeTruthy();
await repo.recordHit(redirect.id);
const again = await repo.findById(redirect.id);
expect(again!.hits).toBe(2);
});
});
// --- Auto-redirects -----------------------------------------------------
describe("createAutoRedirect", () => {
it("creates a redirect for slug change with url pattern", async () => {
const redirect = await repo.createAutoRedirect(
"posts",
"old-title",
"new-title",
"id1",
"/blog/{slug}",
);
expect(redirect.source).toBe("/blog/old-title");
expect(redirect.destination).toBe("/blog/new-title");
expect(redirect.auto).toBe(true);
expect(redirect.groupName).toBe("Auto: slug change");
expect(redirect.type).toBe(301);
});
it("uses fallback URL when no url pattern", async () => {
const redirect = await repo.createAutoRedirect("posts", "old-slug", "new-slug", "id1", null);
expect(redirect.source).toBe("/posts/old-slug");
expect(redirect.destination).toBe("/posts/new-slug");
});
it("collapses existing chains", async () => {
// First rename: A -> B
await repo.createAutoRedirect("posts", "title-a", "title-b", "id1", "/blog/{slug}");
// Second rename: B -> C (should update A's destination to C)
await repo.createAutoRedirect("posts", "title-b", "title-c", "id1", "/blog/{slug}");
// Check that the A -> B redirect now points to C
const aRedirect = await repo.findBySource("/blog/title-a");
expect(aRedirect!.destination).toBe("/blog/title-c");
// And B -> C also exists
const bRedirect = await repo.findBySource("/blog/title-b");
expect(bRedirect!.destination).toBe("/blog/title-c");
});
it("updates existing redirect from same source instead of duplicating", async () => {
// Create A -> B
await repo.createAutoRedirect("posts", "a", "b", "id1", "/blog/{slug}");
// Create A -> C (same source /blog/a, different dest)
// This calls collapseChains first, which doesn't touch /blog/a since
// nothing points to /blog/a as destination.
// Then it finds existing source=/blog/a and updates its destination.
await repo.createAutoRedirect("posts", "a", "c", "id1", "/blog/{slug}");
const all = await repo.findMany({});
// Should only have one redirect from /blog/a
const fromA = all.items.filter((r) => r.source === "/blog/a");
expect(fromA).toHaveLength(1);
expect(fromA[0]!.destination).toBe("/blog/c");
});
});
// --- 404 log ------------------------------------------------------------
describe("log404", () => {
it("logs a 404 entry", async () => {
await repo.log404({ path: "/missing" });
const result = await repo.find404s({});
expect(result.items).toHaveLength(1);
expect(result.items[0]!.path).toBe("/missing");
});
it("logs with metadata", async () => {
await repo.log404({
path: "/missing",
referrer: "https://google.com",
userAgent: "Mozilla/5.0",
ip: "1.2.3.4",
});
const result = await repo.find404s({});
const entry = result.items[0]!;
expect(entry.referrer).toBe("https://google.com");
expect(entry.userAgent).toBe("Mozilla/5.0");
expect(entry.ip).toBe("1.2.3.4");
});
});
describe("find404s", () => {
it("filters by search", async () => {
await repo.log404({ path: "/missing-blog-post" });
await repo.log404({ path: "/about-us" });
const result = await repo.find404s({ search: "blog" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.path).toBe("/missing-blog-post");
});
it("paginates", async () => {
for (let i = 0; i < 5; i++) {
await repo.log404({ path: `/missing-${i}` });
}
const page1 = await repo.find404s({ limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.find404s({ limit: 2, cursor: page1.nextCursor });
expect(page2.items).toHaveLength(2);
});
});
describe("get404Summary", () => {
it("groups by path and counts", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/a" });
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const summary = await repo.get404Summary();
expect(summary).toHaveLength(2);
// Ordered by count desc
expect(summary[0]!.path).toBe("/a");
expect(summary[0]!.count).toBe(3);
expect(summary[1]!.path).toBe("/b");
expect(summary[1]!.count).toBe(1);
});
it("includes the most recently seen referrer", async () => {
// 404 rows are now deduped by path, so the stored referrer is the
// most recent one seen for that path rather than the most frequent.
await repo.log404({ path: "/x", referrer: "https://google.com" });
await repo.log404({ path: "/x", referrer: "https://google.com" });
await repo.log404({ path: "/x", referrer: "https://bing.com" });
const summary = await repo.get404Summary();
expect(summary[0]!.topReferrer).toBe("https://bing.com");
});
});
describe("delete404", () => {
it("deletes a single 404 entry", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const all = await repo.find404s({});
expect(all.items).toHaveLength(2);
await repo.delete404(all.items[0]!.id);
const remaining = await repo.find404s({});
expect(remaining.items).toHaveLength(1);
});
});
describe("clear404s", () => {
it("removes all 404 entries", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const count = await repo.clear404s();
expect(count).toBe(2);
const result = await repo.find404s({});
expect(result.items).toHaveLength(0);
});
});
describe("prune404s", () => {
it("removes entries older than cutoff", async () => {
await repo.log404({ path: "/old" });
// All entries were just created, so pruning with a future date should clear them
const count = await repo.prune404s("2099-01-01T00:00:00.000Z");
expect(count).toBe(1);
});
it("keeps entries newer than cutoff", async () => {
await repo.log404({ path: "/new" });
const count = await repo.prune404s("2000-01-01T00:00:00.000Z");
expect(count).toBe(0);
const result = await repo.find404s({});
expect(result.items).toHaveLength(1);
});
});
});

View File

@@ -0,0 +1,91 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import { searchWithDb } from "../../../src/search/query.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("FTS repair", () => {
let db: Kysely<Database>;
let registry: SchemaRegistry;
let repo: ContentRepository;
let ftsManager: FTSManager;
let gameId: string;
beforeEach(async () => {
db = await setupTestDatabase();
registry = new SchemaRegistry(db);
repo = new ContentRepository(db);
ftsManager = new FTSManager(db);
await registry.createCollection({
slug: "game",
label: "Games",
labelSingular: "Game",
supports: ["search"],
});
await registry.createField("game", {
slug: "title",
label: "Title",
type: "string",
searchable: true,
});
await registry.createField("game", {
slug: "blurb",
label: "Blurb",
type: "text",
searchable: true,
});
const created = await repo.create({
type: "game",
slug: "trail-of-cthulhu",
status: "published",
publishedAt: new Date().toISOString(),
data: {
title: "Trail of Cthulhu",
blurb: "Investigative horror in the Cthulhu mythos.",
},
});
gameId = created.id;
await ftsManager.enableSearch("game");
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("recreates a missing FTS table when search remains enabled", async () => {
expect(await ftsManager.ftsTableExists("game")).toBe(true);
await ftsManager.dropFtsTable("game");
expect(await ftsManager.ftsTableExists("game")).toBe(false);
expect(
await searchWithDb(db, "cthulhu", {
collections: ["game"],
status: "published",
}),
).toEqual({ items: [] });
await expect(ftsManager.verifyAndRepairAll()).resolves.toBe(1);
expect(await ftsManager.ftsTableExists("game")).toBe(true);
const repaired = await searchWithDb(db, "cthulhu", {
collections: ["game"],
status: "published",
});
expect(repaired.items).toHaveLength(1);
expect(repaired.items[0]?.slug).toBe("trail-of-cthulhu");
});
it("keeps the FTS index in sync after soft delete", async () => {
await expect(repo.delete("game", gameId)).resolves.toBe(true);
await expect(ftsManager.verifyAndRepairAll()).resolves.toBe(0);
});
});

View File

@@ -0,0 +1,158 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import { searchWithDb } from "../../../src/search/query.js";
import { createPostFixture } from "../../utils/fixtures.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
/**
* Snippets returned by FTS5 splice literal `<mark>` markers around matched
* terms but never escape the surrounding text. If the indexed content
* contains characters that mean something in HTML (`<`, `>`, `&`, `"`,
* `'`) the resulting "snippet" is unsafe to render with set:html or
* innerHTML — both for visual integrity (broken markup, mojibake) and
* for security (a `<script>` literal in a title becomes executable).
*
* The shipped contract is "snippet is safe HTML containing only <mark>
* highlight tags." These tests pin that contract.
*/
describe("search snippet sanitization", () => {
let db: Kysely<Database>;
let repo: ContentRepository;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
repo = new ContentRepository(db);
const registry = new SchemaRegistry(db);
const ftsManager = new FTSManager(db);
await registry.updateField("post", "title", { searchable: true });
await ftsManager.enableSearch("post");
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("escapes `<` and `>` in matched content so a `<script>` title cannot execute", async () => {
// A title containing a literal script tag — exactly the payload
// that an attacker would aim at a poorly-escaped highlighter.
await repo.create(
createPostFixture({
slug: "xss-attempt",
status: "published",
data: { title: "Hello <script>alert(1)</script> world" },
}),
);
const { items } = await searchWithDb(db, "alert", {
collections: ["post"],
});
expect(items).toHaveLength(1);
const snippet = items[0]!.snippet ?? "";
// The dangerous `<script>` substring must be escaped. The result
// is allowed to contain `<mark>...</mark>` highlights, so we
// can't just assert "no `<` chars" — we assert the script tag
// itself cannot appear as live markup.
expect(snippet).not.toContain("<script>");
expect(snippet).not.toContain("</script>");
expect(snippet).toContain("&lt;script&gt;");
});
it("escapes ampersands so `<3` and `&amp;` round-trip correctly", async () => {
await repo.create(
createPostFixture({
slug: "ampersand",
status: "published",
data: { title: "Tom & Jerry: 2 < 3 forever" },
}),
);
const { items } = await searchWithDb(db, "Jerry", {
collections: ["post"],
});
expect(items).toHaveLength(1);
const snippet = items[0]!.snippet ?? "";
// Bare `&` must be escaped to `&amp;` — otherwise a downstream
// HTML parser may interpret `& Jerry` as the start of an entity.
expect(snippet).toContain("&amp;");
expect(snippet).not.toMatch(/&(?!amp;|lt;|gt;|quot;|#39;)/);
// `<` from "2 < 3" must also be escaped, even though it's not
// adjacent to a tag-like structure.
expect(snippet).toContain("&lt;");
});
it("does not crash when the snippet column is NULL", async () => {
// FTS triggers insert raw column values with no COALESCE, so any
// row whose title (the column the snippet() call targets) is
// NULL produces a NULL snippet from SQLite — even when the row
// matched via a different searchable column. A regression that
// drops the null-guard throws "Cannot read properties of null
// (reading 'replace')" before these assertions can run.
const registry = new SchemaRegistry(db);
await registry.updateField("post", "content", { searchable: true });
const ftsManager = new FTSManager(db);
await ftsManager.enableSearch("post");
await repo.create(
createPostFixture({
slug: "no-title",
status: "published",
data: {
// Deliberately NULL title — matched via the content
// column so this row still surfaces in results.
title: null,
content: [
{
_type: "block",
style: "normal",
children: [{ _type: "span", text: "Quokka spotted today" }],
},
],
},
}),
);
const { items } = await searchWithDb(db, "Quokka", {
collections: ["post"],
});
expect(items).toHaveLength(1);
// Whether the snippet ends up as a string or undefined doesn't
// matter — the contract is "the search call must not throw".
expect(typeof items[0]!.snippet === "string" || items[0]!.snippet === undefined).toBe(true);
});
it("preserves `<mark>` highlight tags as live HTML", async () => {
// The whole point of returning a snippet is highlighting matches.
// Sanitization must not strip the markers we deliberately added.
await repo.create(
createPostFixture({
slug: "highlight",
status: "published",
data: { title: "The quick brown fox jumps" },
}),
);
const { items } = await searchWithDb(db, "fox", {
collections: ["post"],
});
expect(items).toHaveLength(1);
const snippet = items[0]!.snippet ?? "";
expect(snippet).toContain("<mark>");
expect(snippet).toContain("</mark>");
// And the highlighted token should be the matched word.
expect(snippet).toMatch(/<mark>fox<\/mark>/i);
});
});

View File

@@ -0,0 +1,57 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import { getSuggestions } from "../../../src/search/query.js";
import { createPostFixture } from "../../utils/fixtures.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("getSuggestions (Integration)", () => {
let db: Kysely<Database>;
let repo: ContentRepository;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
repo = new ContentRepository(db);
const registry = new SchemaRegistry(db);
const ftsManager = new FTSManager(db);
await registry.updateField("post", "title", { searchable: true });
await ftsManager.enableSearch("post");
await repo.create(
createPostFixture({
slug: "designing-things",
status: "published",
data: { title: "Designing things" },
}),
);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("returns matching suggestions for a plain prefix query", async () => {
const suggestions = await getSuggestions(db, "des", {
collections: ["post"],
});
expect(suggestions).toHaveLength(1);
expect(suggestions[0]).toMatchObject({
collection: "post",
title: "Designing things",
});
});
it("returns empty array for a non-matching query", async () => {
const suggestions = await getSuggestions(db, "zzz", {
collections: ["post"],
});
expect(suggestions).toEqual([]);
});
});

View File

@@ -0,0 +1,127 @@
/**
* Tests that `applySeed()` creates a live revision for entries seeded with
* `status: "published"`.
*
* Regression for #650: seeded published content was missing `live_revision_id`,
* causing the admin UI to show "Save & Publish" instead of "Unpublish" for
* content that was already supposed to be live.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../src/database/types.js";
import { applySeed } from "../../src/seed/apply.js";
import type { SeedFile } from "../../src/seed/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../utils/test-db.js";
function seedWith(status: "draft" | "published"): SeedFile {
return {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
labelSingular: "Post",
fields: [
{ slug: "title", label: "Title", type: "string" },
{ slug: "body", label: "Body", type: "text" },
],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status,
data: { title: "Hello World", body: "body" },
},
],
},
};
}
describe("applySeed creates live revisions for published content", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("populates live_revision_id when seed status is 'published'", async () => {
await applySeed(db, seedWith("published"), { includeContent: true });
const row = await db
.selectFrom("ec_posts" as any)
.selectAll()
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
const r = row as Record<string, unknown>;
expect(r.status).toBe("published");
expect(r.live_revision_id).toBeTruthy();
expect(r.draft_revision_id).toBeNull();
expect(r.published_at).toBeTruthy();
});
it("does not create a live revision when seed status is 'draft'", async () => {
await applySeed(db, seedWith("draft"), { includeContent: true });
const row = await db
.selectFrom("ec_posts" as any)
.selectAll()
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
const r = row as Record<string, unknown>;
expect(r.status).toBe("draft");
expect(r.live_revision_id).toBeNull();
});
it("populates live_revision_id when updating an existing entry to 'published' via onConflict: 'update'", async () => {
// First pass: create as draft
await applySeed(db, seedWith("draft"), { includeContent: true });
// Second pass: same slug, now published
await applySeed(db, seedWith("published"), {
includeContent: true,
onConflict: "update",
});
const row = await db
.selectFrom("ec_posts" as any)
.selectAll()
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
const r = row as Record<string, unknown>;
expect(r.status).toBe("published");
expect(r.live_revision_id).toBeTruthy();
});
it("writes a revision row to the revisions table", async () => {
await applySeed(db, seedWith("published"), { includeContent: true });
const row = await db
.selectFrom("ec_posts" as any)
.select(["id", "live_revision_id"] as never)
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
const r = row as { id: string; live_revision_id: string };
const revision = await db
.selectFrom("revisions")
.selectAll()
.where("id", "=", r.live_revision_id)
.executeTakeFirstOrThrow();
expect(revision.collection).toBe("posts");
expect(revision.entry_id).toBe(r.id);
});
});

View File

@@ -0,0 +1,579 @@
/**
* Tests for seed --on-conflict modes: skip, update, error
*
* Verifies that applySeed() correctly handles conflicts when records
* already exist in the database.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../src/database/types.js";
import { applySeed } from "../../src/seed/apply.js";
import type { SeedFile } from "../../src/seed/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../utils/test-db.js";
/**
* Minimal seed file with one collection, one byline, one redirect, and one section
*/
function createTestSeed(overrides?: Partial<SeedFile>): SeedFile {
return {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
labelSingular: "Post",
fields: [
{ slug: "title", label: "Title", type: "string" },
{ slug: "body", label: "Body", type: "text" },
],
},
],
bylines: [
{
id: "byline-1",
slug: "jane-doe",
displayName: "Jane Doe",
bio: "Original bio",
},
],
redirects: [
{
source: "/old-page",
destination: "/new-page",
type: 301,
},
],
sections: [
{
slug: "hero",
title: "Hero Section",
description: "Original description",
content: [{ _type: "block", _key: "1" }],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello World", body: "Original body" },
},
],
},
...overrides,
};
}
/**
* Seed file with updated values for all entities
*/
function createUpdatedSeed(): SeedFile {
return {
version: "1",
collections: [
{
slug: "posts",
label: "Blog Posts",
labelSingular: "Blog Post",
fields: [
{ slug: "title", label: "Post Title", type: "string" },
{ slug: "body", label: "Post Body", type: "text" },
],
},
],
bylines: [
{
id: "byline-1",
slug: "jane-doe",
displayName: "Jane Smith",
bio: "Updated bio",
},
],
redirects: [
{
source: "/old-page",
destination: "/newer-page",
type: 302,
},
],
sections: [
{
slug: "hero",
title: "Updated Hero",
description: "Updated description",
content: [{ _type: "block", _key: "2" }],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello World Updated", body: "Updated body" },
},
],
},
};
}
describe("applySeed onConflict modes", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("onConflict: skip (default)", () => {
it("skips existing collections", async () => {
const seed = createTestSeed();
// First apply
await applySeed(db, seed, { includeContent: true });
// Second apply with default (skip)
const result = await applySeed(db, seed, { includeContent: true });
expect(result.collections.created).toBe(0);
expect(result.collections.skipped).toBe(1);
expect(result.collections.updated).toBe(0);
});
it("skips existing bylines", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const result = await applySeed(db, seed, { includeContent: true });
expect(result.bylines.created).toBe(0);
expect(result.bylines.skipped).toBe(1);
expect(result.bylines.updated).toBe(0);
});
it("skips existing redirects", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const result = await applySeed(db, seed);
expect(result.redirects.created).toBe(0);
expect(result.redirects.skipped).toBe(1);
expect(result.redirects.updated).toBe(0);
});
it("skips existing sections", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const result = await applySeed(db, seed);
expect(result.sections.created).toBe(0);
expect(result.sections.skipped).toBe(1);
expect(result.sections.updated).toBe(0);
});
it("skips existing content", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(0);
expect(result.content.skipped).toBe(1);
expect(result.content.updated).toBe(0);
});
it("defaults to skip when onConflict is not specified", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
// No onConflict specified -- should default to skip
const result = await applySeed(db, seed, { includeContent: true });
expect(result.collections.skipped).toBe(1);
expect(result.collections.created).toBe(0);
expect(result.collections.updated).toBe(0);
});
});
describe("onConflict: update", () => {
it("updates existing collections and fields", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.collections.updated).toBe(1);
expect(result.collections.created).toBe(0);
expect(result.fields.updated).toBe(2);
// Verify the collection was actually updated
const row = await db
.selectFrom("_emdash_collections")
.selectAll()
.where("slug", "=", "posts")
.executeTakeFirst();
expect(row?.label).toBe("Blog Posts");
expect(row?.label_singular).toBe("Blog Post");
});
it("updates existing bylines", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.bylines.updated).toBe(1);
expect(result.bylines.created).toBe(0);
// Verify the byline was actually updated
const row = await db
.selectFrom("_emdash_bylines")
.selectAll()
.where("slug", "=", "jane-doe")
.executeTakeFirst();
expect(row?.display_name).toBe("Jane Smith");
expect(row?.bio).toBe("Updated bio");
});
it("updates existing redirects", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
onConflict: "update",
});
expect(result.redirects.updated).toBe(1);
expect(result.redirects.created).toBe(0);
// Verify the redirect was actually updated
const row = await db
.selectFrom("_emdash_redirects")
.selectAll()
.where("source", "=", "/old-page")
.executeTakeFirst();
expect(row?.destination).toBe("/newer-page");
expect(row?.type).toBe(302);
});
it("updates existing sections", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
onConflict: "update",
});
expect(result.sections.updated).toBe(1);
expect(result.sections.created).toBe(0);
// Verify the section was actually updated
const row = await db
.selectFrom("_emdash_sections")
.selectAll()
.where("slug", "=", "hero")
.executeTakeFirst();
expect(row?.title).toBe("Updated Hero");
expect(row?.description).toBe("Updated description");
});
it("updates existing content", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.content.updated).toBe(1);
expect(result.content.created).toBe(0);
// Verify the content was actually updated
const row = await db
.selectFrom("ec_posts" as any)
.selectAll()
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
expect((row as Record<string, unknown>).title).toBe("Hello World Updated");
expect((row as Record<string, unknown>).body).toBe("Updated body");
});
});
describe("onConflict: error", () => {
it("throws on existing collection", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
await expect(
applySeed(db, seed, {
includeContent: true,
onConflict: "error",
}),
).rejects.toThrow('Conflict: collection "posts" already exists');
});
it("throws on existing byline", async () => {
// Seed without collections to get past collections step
const seed = createTestSeed({ collections: [] });
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: byline "jane-doe" already exists',
);
});
it("throws on existing redirect", async () => {
const seed = createTestSeed({
collections: [],
bylines: [],
sections: [],
});
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: redirect "/old-page" already exists',
);
});
it("throws on existing section", async () => {
const seed = createTestSeed({
collections: [],
bylines: [],
redirects: [],
});
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: section "hero" already exists',
);
});
it("throws on existing content", async () => {
// First apply creates collections and content
const seed = createTestSeed({
bylines: [],
redirects: [],
sections: [],
});
await applySeed(db, seed, { includeContent: true });
// Second apply with only content (collections already exist, skip them)
const contentOnlySeed = createTestSeed({
collections: [],
bylines: [],
redirects: [],
sections: [],
});
await expect(
applySeed(db, contentOnlySeed, {
includeContent: true,
onConflict: "error",
}),
).rejects.toThrow('Conflict: content "hello-world" in "posts" already exists');
});
});
describe("mixed scenarios", () => {
it("creates new records alongside existing ones in update mode", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
// Add a new content entry to the seed
const extendedSeed = createUpdatedSeed();
const posts = extendedSeed.content!["posts"];
if (!posts) throw new Error("posts missing from seed");
posts.push({
id: "post-2",
slug: "second-post",
status: "published",
data: { title: "Second Post", body: "New content" },
});
const result = await applySeed(db, extendedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.content.updated).toBe(1);
expect(result.content.created).toBe(1);
});
it("clears taxonomy assignments on content update when seed removes them", async () => {
// Seed with a taxonomy and content that has taxonomy assignments
const seed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
taxonomies: [
{
name: "categories",
label: "Categories",
hierarchical: false,
collections: ["posts"],
terms: [
{ slug: "news", label: "News" },
{ slug: "tech", label: "Tech" },
],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello" },
taxonomies: { categories: ["news", "tech"] },
},
],
},
};
await applySeed(db, seed, { includeContent: true });
// Verify both terms are attached
const beforeRows = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.execute();
expect(beforeRows).toHaveLength(2);
// Re-apply with only one taxonomy term
const updatedSeed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
taxonomies: [
{
name: "categories",
label: "Categories",
hierarchical: false,
collections: ["posts"],
terms: [
{ slug: "news", label: "News" },
{ slug: "tech", label: "Tech" },
],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello Updated" },
taxonomies: { categories: ["tech"] },
},
],
},
};
await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
// Should only have "tech" now, not both
const afterRows = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.execute();
expect(afterRows).toHaveLength(1);
});
it("clears byline assignments on content update when seed removes them", async () => {
const seed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
bylines: [{ id: "byline-1", slug: "jane-doe", displayName: "Jane Doe" }],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello" },
bylines: [{ byline: "byline-1" }],
},
],
},
};
await applySeed(db, seed, { includeContent: true });
// Verify byline is attached
const beforeRows = await db
.selectFrom("_emdash_content_bylines")
.selectAll()
.where("collection_slug", "=", "posts")
.execute();
expect(beforeRows).toHaveLength(1);
// Re-apply without bylines on the content entry
const updatedSeed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
bylines: [{ id: "byline-1", slug: "jane-doe", displayName: "Jane Doe" }],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello Updated" },
// No bylines -- should clear existing
},
],
},
};
await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
// Should have no bylines now
const afterRows = await db
.selectFrom("_emdash_content_bylines")
.selectAll()
.where("collection_slug", "=", "posts")
.execute();
expect(afterRows).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,160 @@
/**
* Regression guard for issue #867 (and the related portfolio
* `featured_image` shape bug surfaced during review).
*
* The bug: PR #777 wired the existing `generateZodSchema()` into the
* runtime content-update path, so autosave now validates the body the
* admin re-sends on every keystroke. Several first-party templates ship
* seed content that didn't satisfy that schema (PT blocks missing
* `_key`, portfolio's `featured_image` as bare URL strings instead of
* media objects). The result: any user who scaffolded those templates
* couldn't save edits to seeded entries.
*
* This test does the smallest end-to-end thing that would have caught
* both regressions: for every shipped template seed, apply it to a
* fresh DB and re-validate every stored entry against the same
* validator the autosave endpoint uses (`validateContentData` with
* `partial: true`). If a template ever ships malformed seed data
* again, this fails before release.
*/
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { validateContentData } from "../../src/api/handlers/validation.js";
import type { Database } from "../../src/database/types.js";
import { applySeed } from "../../src/seed/apply.js";
import type { SeedFile } from "../../src/seed/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../utils/test-db.js";
// `tests/integration/` -> repo root is four levels up.
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../..");
const TEMPLATE_SEEDS = [
"templates/blog/seed/seed.json",
"templates/blog-cloudflare/seed/seed.json",
"templates/portfolio/seed/seed.json",
"templates/portfolio-cloudflare/seed/seed.json",
"templates/starter/seed/seed.json",
"templates/starter-cloudflare/seed/seed.json",
"templates/marketing/seed/seed.json",
"templates/marketing-cloudflare/seed/seed.json",
] as const;
function loadSeed(rel: string): SeedFile {
const abs = resolve(WORKSPACE_ROOT, rel);
return JSON.parse(readFileSync(abs, "utf8")) as SeedFile;
}
/**
* Walk a seed and return every collection slug that has at least one
* entry, so the test can iterate dynamic `ec_*` tables without
* hard-coding them. Returns slugs in seed order to keep failures
* predictable.
*/
function collectionsWithContent(seed: SeedFile): string[] {
if (!seed.content) return [];
const out: string[] = [];
for (const [slug, entries] of Object.entries(seed.content)) {
if (Array.isArray(entries) && entries.length > 0) out.push(slug);
}
return out;
}
describe("shipped template seeds survive the autosave validator (issue #867)", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
for (const rel of TEMPLATE_SEEDS) {
it(`${rel}: every seeded entry round-trips through validateContentData`, async () => {
const seed = loadSeed(rel);
// `includeContent: true` is what `create-emdash` setup uses.
// `skipMediaDownload: true` keeps the test offline -- we don't
// care about the actual bytes here, only the validator-relevant
// shape of stored entries.
await applySeed(db, seed, {
includeContent: true,
skipMediaDownload: true,
});
const slugs = collectionsWithContent(seed);
if (slugs.length === 0) {
// Marketing has no content entries -- nothing to validate,
// but exercising applySeed itself is still useful coverage.
return;
}
for (const slug of slugs) {
const tableName = `ec_${slug}`;
const rows = await db
// biome-ignore lint/suspicious/noExplicitAny: dynamic content table
.selectFrom(tableName as any)
.selectAll()
.where("deleted_at", "is", null)
// biome-ignore lint/suspicious/noExplicitAny: dynamic content table
.execute();
expect(rows.length, `expected at least one row in ${tableName}`).toBeGreaterThan(0);
for (const row of rows as Array<Record<string, unknown>>) {
// Reconstruct the data shape the admin holds in memory:
// system columns + the user's field columns. We strip
// the obvious system columns so they don't get flagged
// as "unknown field" by the validator.
const data: Record<string, unknown> = {};
for (const [k, v] of Object.entries(row)) {
if (
k === "id" ||
k === "slug" ||
k === "status" ||
k === "author_id" ||
k === "primary_byline_id" ||
k === "created_at" ||
k === "updated_at" ||
k === "published_at" ||
k === "scheduled_at" ||
k === "deleted_at" ||
k === "version" ||
k === "live_revision_id" ||
k === "draft_revision_id" ||
k === "locale" ||
k === "translation_group"
) {
continue;
}
// JSON-shaped columns come back as strings; parse so
// the validator sees the structure it expects.
if (typeof v === "string" && (v.startsWith("[") || v.startsWith("{"))) {
try {
data[k] = JSON.parse(v);
continue;
} catch {
// Fall through -- treat as plain string.
}
}
data[k] = v;
}
const result = await validateContentData(db, slug, data, { partial: true });
if (!("ok" in result) || !result.ok) {
const message = result.ok ? "(unexpected)" : result.error.message;
throw new Error(
`${rel}: row in ${tableName} (slug=${row.slug as string}) failed validation: ${message}`,
);
}
}
}
});
}
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,373 @@
/**
* Integration test server helper.
*
* Bootstraps an isolated Astro dev server from a minimal fixture,
* runs setup, seeds test data, and creates auth tokens. Each test
* suite gets a fresh database and server process.
*
* Usage:
*
* const ctx = await createTestServer({ port: 4399 });
* // ctx.client — EmDashClient (devBypass auth)
* // ctx.token — PAT bearer token for CLI tests
* // ctx.baseUrl — http://localhost:4399
* // ctx.cwd — working directory of the running server
* await ctx.cleanup();
*/
import { execFile, spawn } from "node:child_process";
import { existsSync, mkdirSync, mkdtempSync, rmSync, symlinkSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { EmDashClient } from "../../src/client/index.js";
const execAsync = promisify(execFile);
// Test regex patterns
const SESSION_COOKIE_REGEX = /^([^;]+)/;
// ---------------------------------------------------------------------------
// Paths
// ---------------------------------------------------------------------------
const FIXTURE_DIR = resolve(import.meta.dirname, "fixture");
// Borrow node_modules from demos/simple — it has all the deps we need
// and is maintained by pnpm workspace resolution.
const DONOR_NODE_MODULES = resolve(import.meta.dirname, "../../../../demos/simple/node_modules");
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface TestServerOptions {
port: number;
/** Server startup timeout in ms (default: 90_000) */
timeout?: number;
/** Seed test data after setup (default: true) */
seed?: boolean;
/** Additional environment variables to pass to the dev server */
env?: Record<string, string>;
}
export interface TestServerContext {
/** Base URL of the running server */
baseUrl: string;
/** Working directory containing the fixture */
cwd: string;
/** EmDashClient authenticated via dev-bypass session */
client: EmDashClient;
/** PAT bearer token with full scopes (for CLI / raw fetch tests) */
token: string;
/** Seeded collection slugs */
collections: string[];
/** Seeded content IDs keyed by collection */
contentIds: Record<string, string[]>;
/** Session cookie string for raw fetch calls needing session auth */
sessionCookie: string;
/** Stop the server and remove the temp directory */
cleanup: () => Promise<void>;
}
// ---------------------------------------------------------------------------
// Node.js version guard
// ---------------------------------------------------------------------------
/**
* Astro requires Node.js >= 22.12.0. Call from a `beforeAll` to fail the
* suite immediately when the environment is misconfigured rather than
* silently skipping.
*/
export function assertNodeVersion(): void {
const [major, minor] = process.versions.node.split(".").map(Number) as [number, number];
const ok = major! > 22 || (major === 22 && minor! >= 12);
if (!ok) {
throw new Error(
`Integration tests require Node.js >= 22.12.0 (running ${process.versions.node}). ` +
`Update your Node version instead of skipping tests.`,
);
}
}
// ---------------------------------------------------------------------------
// Build guard
// ---------------------------------------------------------------------------
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../..");
const CLI_BINARY = resolve(import.meta.dirname, "../../dist/cli/index.mjs");
let buildPromise: Promise<void> | null = null;
/**
* Ensure the workspace is built before starting integration tests.
* Runs `pnpm build` once (cached across test suites via module-level promise).
* Skips if the CLI binary already exists.
*/
export function ensureBuilt(): Promise<void> {
if (!buildPromise) {
buildPromise = doBuild();
}
return buildPromise;
}
async function doBuild(): Promise<void> {
if (existsSync(CLI_BINARY)) return;
console.log("[integration] Built artifacts missing — running pnpm build...");
await execAsync("pnpm", ["build"], {
cwd: WORKSPACE_ROOT,
timeout: 120_000,
});
console.log("[integration] Build complete.");
}
// ---------------------------------------------------------------------------
// Server lifecycle
// ---------------------------------------------------------------------------
async function waitForServer(url: string, timeoutMs: number): Promise<void> {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
try {
const res = await fetch(url, { signal: AbortSignal.timeout(2000) });
// Any HTTP response (even 500) means the server is up.
// We only keep waiting on connection errors (caught below).
if (res.status > 0) return;
} catch {
// Server not ready yet — connection refused / timeout
}
await new Promise((r) => setTimeout(r, 500));
}
throw new Error(`Server at ${url} did not start within ${timeoutMs}ms`);
}
/**
* Create an Astro dev server for integration testing.
*
* Runs the fixture in-place to avoid Astro virtual module resolution
* issues with symlinked temp dirs. Uses a temp directory only for the
* database file — source files stay at their real paths.
*/
export async function createTestServer(options: TestServerOptions): Promise<TestServerContext> {
const { port, timeout = 90_000, seed = true } = options;
const baseUrl = `http://localhost:${port}`;
// --- 0. Ensure workspace is built ---
await ensureBuilt();
// --- 1. Run fixture in-place, temp dir only for DB ---
const workDir = FIXTURE_DIR;
const tempDataDir = mkdtempSync(join(tmpdir(), "emdash-integration-"));
const dbPath = join(tempDataDir, "test.db");
const uploadsDir = join(tempDataDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
// Ensure node_modules symlink exists in the fixture dir.
// Multiple test suites may race to create this — handle EEXIST gracefully.
// The symlink is intentionally never removed: it's shared across concurrent
// test suites and gitignored, so cleanup of one suite must not break others.
const fixtureNodeModules = join(FIXTURE_DIR, "node_modules");
if (!existsSync(fixtureNodeModules)) {
try {
symlinkSync(DONOR_NODE_MODULES, fixtureNodeModules);
} catch (err: unknown) {
if ((err as NodeJS.ErrnoException).code !== "EEXIST") throw err;
}
}
// --- 2. Start dev server ---
const astroBin = join(fixtureNodeModules, ".bin", "astro");
const server = spawn(astroBin, ["dev", "--port", String(port)], {
cwd: workDir,
env: {
...process.env,
EMDASH_TEST_DB: `file:${dbPath}`,
EMDASH_TEST_UPLOADS: uploadsDir,
...options.env,
},
stdio: "pipe",
});
// Always capture server output. Forward to stderr when DEBUG is set,
// and always keep a ring buffer of the last 5 KB for error reporting.
let serverOutput = "";
const MAX_OUTPUT = 5000;
function appendOutput(chunk: string): void {
if (process.env.DEBUG) process.stderr.write(`[integration:${port}] ${chunk}`);
serverOutput += chunk;
if (serverOutput.length > MAX_OUTPUT * 2) {
serverOutput = serverOutput.slice(-MAX_OUTPUT);
}
}
server.stdout?.on("data", (data: Buffer) => appendOutput(data.toString()));
server.stderr?.on("data", (data: Buffer) => appendOutput(data.toString()));
// Track for cleanup
let stopped = false;
async function cleanup(): Promise<void> {
if (stopped) return;
stopped = true;
server.kill("SIGTERM");
await new Promise((r) => setTimeout(r, 1000));
// Force kill if still alive
if (!server.killed) {
server.kill("SIGKILL");
await new Promise((r) => setTimeout(r, 500));
}
// Remove temp data directory
rmSync(tempDataDir, { recursive: true, force: true });
}
try {
// --- 3. Wait for server to be ready ---
await waitForServer(`${baseUrl}/_emdash/api/setup/dev-bypass`, timeout);
// --- 4. Run setup + create PAT in one request ---
// The ?token query param tells the dev-bypass endpoint to also
// create a PAT with full scopes and return it in the response.
const setupRes = await fetch(`${baseUrl}/_emdash/api/setup/dev-bypass?token=1`);
if (!setupRes.ok) {
const body = await setupRes.text().catch(() => "");
throw new Error(`Setup bypass failed (${setupRes.status}): ${body}`);
}
const setupJson = (await setupRes.json()) as {
data: { user: { id: string; email: string }; token?: string };
};
const setupData = setupJson.data;
const token = setupData.token;
if (!token) {
throw new Error("Setup bypass did not return a PAT token");
}
// Extract session cookie for raw fetch calls that need session auth
const setCookie = setupRes.headers.get("set-cookie");
let sessionCookie = "";
if (setCookie) {
const match = setCookie.match(SESSION_COOKIE_REGEX);
if (match) sessionCookie = match[1]!;
}
// --- 5. Create client authenticated via PAT ---
const client = new EmDashClient({
baseUrl,
token,
});
// --- 8. Seed test data ---
const collections: string[] = [];
const contentIds: Record<string, string[]> = {};
if (seed) {
await seedTestData(client, collections, contentIds);
}
return {
baseUrl,
cwd: workDir,
client,
token,
collections,
contentIds,
sessionCookie,
cleanup,
};
} catch (error) {
// Include server output in error for CI debugging
const msg = error instanceof Error ? error.message : String(error);
await cleanup();
throw new Error(
`${msg}\n\nServer output (last ${MAX_OUTPUT} chars):\n${serverOutput.slice(-MAX_OUTPUT)}`,
{
cause: error,
},
);
}
}
// ---------------------------------------------------------------------------
// Seed data
// ---------------------------------------------------------------------------
/**
* Seeds sample content into the test server.
*
* Collections and fields are created by the seed file
* (fixture/.emdash/seed.json) during dev-bypass setup.
* This function only creates content entries.
*
* Content:
* - posts: 3 items (2 published, 1 draft)
* - pages: 2 items (1 published, 1 draft)
*/
async function seedTestData(
client: EmDashClient,
collections: string[],
contentIds: Record<string, string[]>,
): Promise<void> {
collections.push("posts");
collections.push("pages");
const postIds: string[] = [];
const post1 = await client.create("posts", {
data: {
title: "First Post",
body: "Hello **world**. This is the first post.",
excerpt: "The very first post",
},
slug: "first-post",
});
postIds.push(post1.id);
await client.publish("posts", post1.id);
const post2 = await client.create("posts", {
data: {
title: "Second Post",
body: "A second post with a [link](https://example.com).",
excerpt: "Another post",
},
slug: "second-post",
});
postIds.push(post2.id);
await client.publish("posts", post2.id);
const post3 = await client.create("posts", {
data: {
title: "Draft Post",
body: "This post is still a draft.",
excerpt: "Not published yet",
},
slug: "draft-post",
});
postIds.push(post3.id);
contentIds["posts"] = postIds;
const pageIds: string[] = [];
const page1 = await client.create("pages", {
data: {
title: "About",
body: "# About Us\n\nWe are a **test** fixture.",
},
slug: "about",
});
pageIds.push(page1.id);
await client.publish("pages", page1.id);
const page2 = await client.create("pages", {
data: {
title: "Contact",
body: "Get in touch.",
},
slug: "contact",
});
pageIds.push(page2.id);
contentIds["pages"] = pageIds;
}

View File

@@ -0,0 +1,424 @@
import { execFile, spawn } from "node:child_process";
import { rmSync } from "node:fs";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { describe, expect, it } from "vitest";
import { ensureBuilt } from "../server.js";
interface SiteCase {
name: string;
dir: string;
port: number;
startupTimeoutMs: number;
waitPath?: string;
setupPath?: string | null;
frontendPath?: string;
frontendStatuses?: number[];
requireDoctype?: boolean;
}
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const execAsync = promisify(execFile);
const SITE_MATRIX: SiteCase[] = [
{
name: "demos/playground",
dir: resolve(WORKSPACE_ROOT, "demos/playground"),
port: 4603,
startupTimeoutMs: 120_000,
waitPath: "/playground",
frontendPath: "/playground",
requireDoctype: false,
},
// Templates
{
name: "templates/blog",
dir: resolve(WORKSPACE_ROOT, "templates/blog"),
port: 4612,
startupTimeoutMs: 60_000,
},
{
name: "templates/blog-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/blog-cloudflare"),
port: 4613,
startupTimeoutMs: 120_000,
},
{
name: "templates/marketing",
dir: resolve(WORKSPACE_ROOT, "templates/marketing"),
port: 4614,
startupTimeoutMs: 90_000,
},
{
name: "templates/marketing-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/marketing-cloudflare"),
port: 4615,
startupTimeoutMs: 120_000,
},
{
name: "templates/portfolio",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio"),
port: 4616,
startupTimeoutMs: 90_000,
},
{
name: "templates/portfolio-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio-cloudflare"),
port: 4617,
startupTimeoutMs: 120_000,
},
{
name: "templates/starter-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/starter-cloudflare"),
port: 4618,
startupTimeoutMs: 120_000,
},
];
async function waitForServer(url: string, timeoutMs: number): Promise<void> {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(3000),
});
if (res.status > 0) return;
} catch {
// retry
}
await new Promise((resolveSleep) => setTimeout(resolveSleep, 500));
}
throw new Error(`Server at ${url} did not start within ${timeoutMs}ms`);
}
async function fetchWithRetry(url: string, retries = 10, delayMs = 1500): Promise<Response> {
let lastError: unknown;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(15_000),
});
if (res.status < 500) return res;
lastError = new Error(`${url} returned ${res.status}`);
} catch (error) {
lastError = error;
}
if (attempt < retries) {
await new Promise((resolveSleep) => setTimeout(resolveSleep, delayMs));
}
}
throw lastError instanceof Error ? lastError : new Error(`Request failed for ${url}`);
}
// ---------------------------------------------------------------------------
// Build verification — runs a single recursive `pnpm build` across templates
// and the playground demo in parallel.
// ---------------------------------------------------------------------------
describe("Site build verification", () => {
it("all templates and playground build successfully", { timeout: 300_000 }, async () => {
await ensureBuilt();
try {
await execAsync(
"pnpm",
[
"run",
"--recursive",
"--filter",
"{./templates/*}",
"--filter",
"@emdash-cms/playground",
"build",
],
{
cwd: WORKSPACE_ROOT,
timeout: 240_000,
env: {
...process.env,
CI: "true",
},
},
);
} catch (error) {
const stderr =
error instanceof Error && "stderr" in error ? (error as { stderr: string }).stderr : "";
const stdout =
error instanceof Error && "stdout" in error ? (error as { stdout: string }).stdout : "";
throw new Error(`Site builds failed:\n\n${stderr || stdout}`.slice(0, 5000), {
cause: error,
});
}
});
});
// ---------------------------------------------------------------------------
// Helpers — shared server lifecycle for runtime tests
// ---------------------------------------------------------------------------
interface BootedServer {
baseUrl: string;
process: ReturnType<typeof spawn>;
output: string;
}
async function bootSite(site: SiteCase): Promise<BootedServer> {
await ensureBuilt();
// Remove stale database files so each run starts fresh.
for (const file of ["data.db", "data.db-wal", "data.db-shm"]) {
rmSync(join(site.dir, file), { force: true });
}
const baseUrl = `http://localhost:${site.port}`;
const serverProcess = spawn("pnpm", ["exec", "astro", "dev", "--port", String(site.port)], {
cwd: site.dir,
env: {
...process.env,
CI: "true",
},
stdio: "pipe",
});
let output = "";
serverProcess.stdout?.on("data", (data: Buffer) => {
output += data.toString();
});
serverProcess.stderr?.on("data", (data: Buffer) => {
output += data.toString();
});
const waitPath = site.waitPath ?? "/_emdash/admin/";
await waitForServer(`${baseUrl}${waitPath}`, site.startupTimeoutMs);
return {
baseUrl,
process: serverProcess,
get output() {
return output;
},
};
}
function killServer(serverProcess: ReturnType<typeof spawn>): Promise<void> {
serverProcess.kill("SIGTERM");
return new Promise((done) => {
setTimeout(() => {
if (!serverProcess.killed) {
serverProcess.kill("SIGKILL");
}
setTimeout(done, 500);
}, 1200);
});
}
// ---------------------------------------------------------------------------
// Runtime verification — boots each site with `astro dev` and checks that
// admin + frontend respond.
// ---------------------------------------------------------------------------
describe.sequential("Site runtime verification", () => {
for (const site of SITE_MATRIX) {
const setupPath = site.setupPath ?? "/_emdash/api/setup/dev-bypass?redirect=/";
const frontendPath = site.frontendPath ?? "/";
const frontendStatuses = site.frontendStatuses ?? [200, 302, 307, 308];
const requireDoctype = site.requireDoctype ?? true;
it(
`${site.name} boots and serves admin + frontend`,
{ timeout: site.startupTimeoutMs + 120_000 },
async () => {
const server = await bootSite(site);
try {
if (setupPath) {
const setupRes = await fetchWithRetry(`${server.baseUrl}${setupPath}`);
expect(setupRes.status).toBeLessThan(500);
}
const adminRes = await fetchWithRetry(`${server.baseUrl}/_emdash/admin/`);
expect(adminRes.status).toBeLessThan(500);
const frontendRes = await fetchWithRetry(`${server.baseUrl}${frontendPath}`);
expect(frontendStatuses).toContain(frontendRes.status);
const body = await frontendRes.text();
if (requireDoctype) {
expect(body).toContain("<!DOCTYPE html>");
}
} catch (error) {
throw new Error(
`${site.name} smoke failed: ${error instanceof Error ? error.message : String(error)}\n\n` +
server.output.slice(-3000),
{ cause: error },
);
} finally {
await killServer(server.process);
}
},
);
}
});
// ---------------------------------------------------------------------------
// MCP endpoint verification — boots one Node and one Cloudflare site, gets a
// bearer token, and verifies the MCP server responds to tools/list.
// ---------------------------------------------------------------------------
const MCP_SITES: SiteCase[] = SITE_MATRIX.filter(
(s) => s.name === "templates/blog" || s.name === "templates/starter-cloudflare",
);
describe.sequential("MCP endpoint verification", () => {
for (const site of MCP_SITES) {
it(
`${site.name} MCP tools/list responds with tools`,
{ timeout: site.startupTimeoutMs + 120_000 },
async () => {
const server = await bootSite(site);
try {
// Run dev-bypass with ?token=1 to get a bearer token
const setupRes = await fetchWithRetry(
`${server.baseUrl}/_emdash/api/setup/dev-bypass?token=1`,
);
expect(setupRes.status).toBeLessThan(500);
const setupBody = (await setupRes.json()) as {
data?: { token?: string };
};
const token = setupBody.data?.token;
expect(token).toBeTruthy();
// Send MCP initialize
const initRes = await fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
jsonrpc: "2.0",
method: "initialize",
params: {
protocolVersion: "2025-03-26",
capabilities: {},
clientInfo: { name: "smoke-test", version: "1.0" },
},
id: 1,
}),
});
expect(initRes.status).toBe(200);
// Parse SSE response to extract JSON
const initText = await initRes.text();
const initData = parseSSE(initText);
expect(initData).toHaveProperty("result.serverInfo.name", "emdash");
// Send initialized notification + tools/list in one request
// (stateless mode — each request is independent, so we send
// the full sequence: notifications/initialized then tools/list)
const listRes = await fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify([
{
jsonrpc: "2.0",
method: "notifications/initialized",
},
{
jsonrpc: "2.0",
method: "tools/list",
params: {},
id: 2,
},
]),
});
expect(listRes.status).toBe(200);
const listText = await listRes.text();
const listData = parseSSE(listText);
expect(listData).toHaveProperty("result.tools");
const tools = (listData as { result: { tools: unknown[] } }).result.tools;
expect(tools.length).toBeGreaterThan(0);
// Verify some expected tools exist
const toolNames = tools.map((t: unknown) => (t as { name: string }).name);
expect(toolNames).toContain("content_list");
expect(toolNames).toContain("schema_list_collections");
// Send 14 concurrent tools/list calls and verify all succeed —
// guards against an auth-middleware race observed in production
// where parallel requests on the same authenticated session
// occasionally returned spurious 401s. The InMemoryTransport
// integration test cannot reach this code path; only a live
// HTTP server exercises the auth middleware that's racy.
const concurrentResponses = await Promise.all(
Array.from({ length: 14 }, (_, i) =>
fetch(`${server.baseUrl}/_emdash/api/mcp`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json, text/event-stream",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify([
{ jsonrpc: "2.0", method: "notifications/initialized" },
{
jsonrpc: "2.0",
method: "tools/list",
params: {},
id: 100 + i,
},
]),
}),
),
);
const statusCodes = concurrentResponses.map((r) => r.status);
const failedStatuses = statusCodes.filter((s) => s !== 200);
expect(
failedStatuses,
`expected all 14 concurrent calls to return 200; got: ${statusCodes.join(",")}`,
).toEqual([]);
} catch (error) {
throw new Error(
`${site.name} MCP smoke failed: ${error instanceof Error ? error.message : String(error)}\n\n` +
server.output.slice(-3000),
{ cause: error },
);
} finally {
await killServer(server.process);
}
},
);
}
});
/**
* Parse the first JSON-RPC message from an SSE text response.
* MCP stateless mode returns `event: message\ndata: {...}\n\n`.
*/
function parseSSE(text: string): unknown {
for (const line of text.split("\n")) {
if (line.startsWith("data: ")) {
return JSON.parse(line.slice(6));
}
}
// Fall back to parsing as plain JSON (non-SSE response)
return JSON.parse(text);
}

View File

@@ -0,0 +1,401 @@
/**
* Smoke tests for template/demo seed fixtures.
*
* Validates that all seed files are well-formed, can be applied
* to a fresh database, and that the resulting database passes
* doctor checks. Does NOT start a dev server — these are fast,
* programmatic tests that exercise the seed/validate/apply/doctor
* pipeline directly.
*
* Also shells out to the CLI binary for seed --validate and doctor
* commands to ensure the CLI interface works correctly.
*/
import { execFile } from "node:child_process";
import { existsSync, readFileSync, readdirSync, mkdtempSync, rmSync, mkdirSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import { applySeed } from "../../../src/seed/apply.js";
import type { SeedFile } from "../../../src/seed/types.js";
import { validateSeed } from "../../../src/seed/validate.js";
import { LocalStorage } from "../../../src/storage/local.js";
import { ensureBuilt } from "../server.js";
const exec = promisify(execFile);
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const CLI_BIN = resolve(import.meta.dirname, "../../../dist/cli/index.mjs");
const VALIDATION_FAILED_RE = /validation failed/i;
// ---------------------------------------------------------------------------
// Discover all templates and demos with seed files
// ---------------------------------------------------------------------------
interface SiteFixture {
/** Human-readable name for test output */
name: string;
/** Absolute path to the template/theme directory */
dir: string;
/** Absolute path to the seed file */
seedPath: string;
/** Parsed seed file contents */
seed: SeedFile;
}
function discoverFixtures(): SiteFixture[] {
const fixtures: SiteFixture[] = [];
const dirs = [
{ prefix: "templates", path: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", path: resolve(WORKSPACE_ROOT, "demos") },
];
for (const { prefix, path: parentDir } of dirs) {
if (!existsSync(parentDir)) continue;
for (const entry of readdirSync(parentDir)) {
const dir = join(parentDir, entry);
// Check for seed path in package.json first (emdash.seed config)
let seedPath = join(dir, ".emdash", "seed.json");
const pkgPath = join(dir, "package.json");
if (existsSync(pkgPath)) {
try {
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
if (pkg.emdash?.seed) {
seedPath = join(dir, pkg.emdash.seed);
}
} catch {
// Ignore parse errors
}
}
if (!existsSync(seedPath)) continue;
const raw = readFileSync(seedPath, "utf-8");
const seed = JSON.parse(raw) as SeedFile;
fixtures.push({
name: `${prefix}/${entry}`,
dir,
seedPath,
seed,
});
}
}
return fixtures;
}
const fixtures = discoverFixtures();
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe("Seed Fixture Smoke Tests", () => {
let tempDirs: string[] = [];
beforeAll(async () => {
// Ensure CLI binary is built for CLI-based tests
await ensureBuilt();
}, 120_000);
afterEach(() => {
// Clean up any temp directories created during tests
for (const dir of tempDirs) {
rmSync(dir, { recursive: true, force: true });
}
tempDirs = [];
});
function createTempDir(): string {
const dir = mkdtempSync(join(tmpdir(), "emdash-smoke-"));
tempDirs.push(dir);
return dir;
}
// Sanity check: we actually found fixtures to test
it("discovers at least one template/demo with a seed file", () => {
expect(fixtures.length).toBeGreaterThanOrEqual(1);
const names = fixtures.map((f) => f.name);
// At minimum the blog template should always be present.
expect(names).toContain("templates/blog");
});
// -----------------------------------------------------------------------
// Per-fixture tests
// -----------------------------------------------------------------------
for (const fixture of fixtures) {
describe(fixture.name, () => {
// --- Seed file is valid JSON with correct structure ---
it("has a valid seed.json that parses as JSON", () => {
expect(fixture.seed).toBeDefined();
expect(fixture.seed.version).toBe("1");
});
// --- Programmatic validation ---
it("passes programmatic seed validation", () => {
const result = validateSeed(fixture.seed);
if (!result.valid) {
// Include errors in failure message for debuggability
expect.fail(`Seed validation failed:\n${result.errors.join("\n")}`);
}
expect(result.valid).toBe(true);
});
// --- CLI --validate ---
it("passes CLI seed --validate", async () => {
const { stdout, stderr } = await exec(
"node",
[CLI_BIN, "seed", fixture.seedPath, "--validate"],
{
cwd: fixture.dir,
timeout: 15_000,
},
);
// The validate command should succeed (exit 0) — if it throws,
// the test will fail with the error message
expect(stdout + stderr).not.toMatch(VALIDATION_FAILED_RE);
});
// --- Seed applies to fresh database ---
it("applies seed to a fresh database without errors", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
// Create database and run migrations
const db = createDatabase({ url: `file:${dbPath}` });
try {
const { applied } = await runMigrations(db);
expect(applied.length).toBeGreaterThan(0);
// Set up local storage for media resolution
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
// Apply seed
const result = await applySeed(db, fixture.seed, {
includeContent: true,
onConflict: "skip",
storage,
mediaBasePath: join(fixture.dir, ".emdash"),
});
// Verify collections were created
if (fixture.seed.collections && fixture.seed.collections.length > 0) {
expect(result.collections.created).toBeGreaterThan(0);
}
// Verify fields were created
const totalFields =
fixture.seed.collections?.reduce((sum, c) => sum + (c.fields?.length ?? 0), 0) ?? 0;
if (totalFields > 0) {
expect(result.fields.created).toBeGreaterThan(0);
}
// Verify content was created if seed has content
if (fixture.seed.content) {
const totalEntries = Object.values(fixture.seed.content).reduce(
(sum, entries) => sum + (Array.isArray(entries) ? entries.length : 0),
0,
);
if (totalEntries > 0) {
expect(result.content.created).toBeGreaterThan(0);
}
}
// Verify taxonomy processing completed (some may be pre-seeded by migrations)
if (fixture.seed.taxonomies && fixture.seed.taxonomies.length > 0) {
// Taxonomies either created or already existed — just verify no crash
expect(result.taxonomies.created + result.taxonomies.terms).toBeGreaterThanOrEqual(0);
}
// Verify menus if present
if (fixture.seed.menus && fixture.seed.menus.length > 0) {
expect(result.menus.created).toBeGreaterThan(0);
}
} finally {
await db.destroy();
}
});
// --- CLI seed apply + doctor ---
it("passes CLI doctor after seed apply", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
// Apply seed via CLI (this also runs migrations)
await exec("node", [CLI_BIN, "seed", fixture.seedPath, "--database", dbPath], {
cwd: fixture.dir,
timeout: 30_000,
});
// Run doctor and verify all checks pass
const { stdout } = await exec("node", [CLI_BIN, "doctor", "--database", dbPath, "--json"], {
cwd: fixture.dir,
timeout: 15_000,
});
const checks = JSON.parse(stdout) as Array<{
name: string;
status: "pass" | "warn" | "fail";
message: string;
}>;
// No failures allowed
const failures = checks.filter((c) => c.status === "fail");
if (failures.length > 0) {
expect.fail(
`Doctor failures:\n${failures.map((f) => ` ${f.name}: ${f.message}`).join("\n")}`,
);
}
// Database, migrations, and collections should all pass
const dbCheck = checks.find((c) => c.name === "database");
expect(dbCheck?.status).toBe("pass");
const migrationsCheck = checks.find((c) => c.name === "migrations");
expect(migrationsCheck?.status).toBe("pass");
const collectionsCheck = checks.find((c) => c.name === "collections");
expect(collectionsCheck?.status).toBe("pass");
});
// --- Idempotent re-apply ---
it(
"can re-apply seed with on-conflict=skip without errors",
{ timeout: 30_000 },
async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
const db = createDatabase({ url: `file:${dbPath}` });
try {
await runMigrations(db);
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
const seedOpts = {
includeContent: true,
onConflict: "skip" as const,
storage,
seedDir: join(fixture.dir, ".emdash"),
};
// First apply
await applySeed(db, fixture.seed, seedOpts);
// Second apply — should not throw
const result2 = await applySeed(db, fixture.seed, seedOpts);
// Everything should be skipped on second apply
expect(result2.collections.created).toBe(0);
} finally {
await db.destroy();
}
},
);
// --- package.json has emdash.seed pointing to seed file ---
it("has package.json with emdash.seed pointing to the seed file", () => {
const pkgPath = join(fixture.dir, "package.json");
if (!existsSync(pkgPath)) return; // blank template has no seed, already filtered
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
// Either emdash.seed is set, or we rely on the .emdash/seed.json convention
const seedRef = pkg.emdash?.seed;
if (seedRef) {
const resolvedSeedPath = resolve(fixture.dir, seedRef);
expect(existsSync(resolvedSeedPath)).toBe(true);
} else {
// Convention: .emdash/seed.json exists (which it does since we're iterating fixtures)
expect(existsSync(fixture.seedPath)).toBe(true);
}
});
});
}
// -----------------------------------------------------------------------
// Cross-cutting: all templates/demos have required files
// -----------------------------------------------------------------------
describe("Required files", () => {
const roots = [
{ prefix: "templates", dir: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", dir: resolve(WORKSPACE_ROOT, "demos") },
].filter((root) => existsSync(root.dir));
const allDirs = roots
.flatMap((root) =>
readdirSync(root.dir).map((entry) => ({
name: `${root.prefix}/${entry}`,
dir: join(root.dir, entry),
})),
)
.filter((d) => existsSync(join(d.dir, "package.json")));
for (const { name, dir } of allDirs) {
describe(name, () => {
it("has astro.config.mjs", () => {
expect(existsSync(join(dir, "astro.config.mjs"))).toBe(true);
});
it("has tsconfig.json", () => {
expect(existsSync(join(dir, "tsconfig.json"))).toBe(true);
});
it("has live.config.ts with emdashLoader", () => {
const liveConfig = join(dir, "src", "live.config.ts");
expect(existsSync(liveConfig)).toBe(true);
const content = readFileSync(liveConfig, "utf-8");
expect(content).toContain("emdashLoader");
expect(content).toContain("defineLiveCollection");
});
it("has typecheck script in package.json", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.scripts?.typecheck || pkg.scripts?.check).toBeDefined();
});
it("uses workspace:* for emdash dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.dependencies?.emdash).toBe("workspace:*");
});
it("uses catalog: for astro dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
const astroVersion = pkg.dependencies?.astro;
expect(astroVersion).toBe("catalog:");
});
});
}
});
});

View File

@@ -0,0 +1,169 @@
/**
* Integration test for the full preview snapshot auth flow.
*
* Tests the complete chain that would have caught bug #3:
* signPreviewUrl → middleware builds header → snapshot endpoint parses and verifies
*
* The signing side (signPreviewUrl) lives in @emdash-cms/cloudflare, but we
* inline the same HMAC logic here to test the format contract without
* cross-package imports.
*/
import { sql } from "kysely";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
generateSnapshot,
parsePreviewSignatureHeader,
verifyPreviewSignature,
} from "../../../src/api/handlers/snapshot.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
const SECRET = "test-preview-secret";
/**
* Sign a preview URL using the same HMAC-SHA256 logic as
* @emdash-cms/cloudflare signPreviewUrl(). Inlined here so we test
* the format contract without cross-package deps.
*/
async function signPreview(
source: string,
ttl = 3600,
): Promise<{ source: string; exp: number; sig: string }> {
const exp = Math.floor(Date.now() / 1000) + ttl;
const encoder = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(SECRET),
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
const buffer = await crypto.subtle.sign("HMAC", key, encoder.encode(`${source}:${exp}`));
const sig = Array.from(new Uint8Array(buffer), (b) => b.toString(16).padStart(2, "0")).join("");
return { source, exp, sig };
}
/**
* Build the X-Preview-Signature header value the same way the
* preview middleware does: "source:exp:sig"
*/
function buildSignatureHeader(parts: { source: string; exp: number; sig: string }): string {
return `${parts.source}:${parts.exp}:${parts.sig}`;
}
describe("preview snapshot auth flow", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await db.destroy();
});
it("end-to-end: signed preview URL → header → snapshot access", async () => {
// 1. Insert some content so snapshot has data
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('p1', 'test-post', 'published', 'Test', 'Body', datetime('now'), datetime('now'), 1)
`.execute(db);
// 2. Sign a preview URL (same logic as @emdash-cms/cloudflare signPreviewUrl)
const signed = await signPreview("https://mysite.com");
// 3. Build the header the way the preview middleware does
const headerValue = buildSignatureHeader(signed);
// 4. Parse the header the way the snapshot endpoint does
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
expect(parsed!.source).toBe("https://mysite.com");
expect(parsed!.exp).toBe(signed.exp);
expect(parsed!.sig).toBe(signed.sig);
// 5. Verify the signature the way the snapshot endpoint does
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, parsed!.sig, SECRET);
expect(valid).toBe(true);
// 6. Actually generate the snapshot (proves auth would grant access)
const snapshot = await generateSnapshot(db);
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0]!.slug).toBe("test-post");
});
it("rejects tampered signature", async () => {
const signed = await signPreview("https://mysite.com");
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
// Tamper with the signature
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, "a".repeat(64), SECRET);
expect(valid).toBe(false);
});
it("rejects wrong secret", async () => {
const signed = await signPreview("https://mysite.com");
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
const valid = await verifyPreviewSignature(
parsed!.source,
parsed!.exp,
parsed!.sig,
"wrong-secret",
);
expect(valid).toBe(false);
});
it("rejects expired signature", async () => {
// Sign with TTL of -1 (already expired)
const signed = await signPreview("https://mysite.com", -1);
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, parsed!.sig, SECRET);
expect(valid).toBe(false);
});
});
describe("parsePreviewSignatureHeader", () => {
it("parses source URLs with colons correctly", async () => {
const signed = await signPreview("https://mysite.com:8080");
const header = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(header);
expect(parsed).not.toBeNull();
expect(parsed!.source).toBe("https://mysite.com:8080");
expect(parsed!.exp).toBe(signed.exp);
expect(parsed!.sig).toBe(signed.sig);
});
it("rejects empty string", () => {
expect(parsePreviewSignatureHeader("")).toBeNull();
});
it("rejects header with no colons", () => {
expect(parsePreviewSignatureHeader("noseparators")).toBeNull();
});
it("rejects header with sig wrong length", () => {
expect(parsePreviewSignatureHeader("https://x.com:12345:tooshort")).toBeNull();
});
it("rejects header with non-numeric exp", () => {
expect(parsePreviewSignatureHeader(`https://x.com:notanumber:${"a".repeat(64)}`)).toBeNull();
});
});

View File

@@ -0,0 +1,217 @@
import { sql } from "kysely";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Snapshot } from "../../../src/api/handlers/snapshot.js";
import { generateSnapshot } from "../../../src/api/handlers/snapshot.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
describe("generateSnapshot", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await db.destroy();
});
it("returns empty tables when no content exists", async () => {
const snapshot = await generateSnapshot(db);
expect(snapshot.generatedAt).toBeTruthy();
expect(typeof snapshot.generatedAt).toBe("string");
// Schema should include ec_post and ec_page (even with no rows)
expect(snapshot.schema).toHaveProperty("ec_post");
expect(snapshot.schema).toHaveProperty("ec_page");
expect(snapshot.schema.ec_post.columns).toContain("id");
expect(snapshot.schema.ec_post.columns).toContain("title");
expect(snapshot.schema.ec_post.columns).toContain("slug");
expect(snapshot.schema.ec_post.columns).toContain("status");
// System tables with data should appear
expect(snapshot.schema).toHaveProperty("_emdash_collections");
expect(snapshot.schema).toHaveProperty("_emdash_fields");
// _emdash_collections should have 2 rows (post + page)
expect(snapshot.tables._emdash_collections).toHaveLength(2);
});
it("includes published content and excludes drafts by default", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'hello-world', 'published', 'Hello World', 'Content here', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a draft post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('draft1', 'draft-post', 'draft', 'Draft Post', 'Draft content', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db);
// Only published content should appear
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0].slug).toBe("hello-world");
});
it("includes drafts when includeDrafts is true", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'hello-world', 'published', 'Hello World', 'Content', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a draft post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('draft1', 'draft-post', 'draft', 'Draft Post', 'Draft', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db, { includeDrafts: true });
// Both should appear
expect(snapshot.tables.ec_post).toHaveLength(2);
});
it("excludes soft-deleted content", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'live-post', 'published', 'Live', 'Content', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a soft-deleted post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, deleted_at, version)
VALUES ('del1', 'deleted-post', 'published', 'Deleted', 'Gone', datetime('now'), datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db);
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0].slug).toBe("live-post");
});
it("excludes auth and security tables", async () => {
const snapshot = await generateSnapshot(db);
// These should not appear in schema or tables
expect(snapshot.schema).not.toHaveProperty("users");
expect(snapshot.schema).not.toHaveProperty("sessions");
expect(snapshot.schema).not.toHaveProperty("credentials");
expect(snapshot.schema).not.toHaveProperty("challenges");
expect(snapshot.schema).not.toHaveProperty("_emdash_api_tokens");
expect(snapshot.schema).not.toHaveProperty("_emdash_oauth_tokens");
});
it("includes system tables needed for rendering", async () => {
const snapshot = await generateSnapshot(db);
// These system tables should have schema entries
expect(snapshot.schema).toHaveProperty("_emdash_collections");
expect(snapshot.schema).toHaveProperty("_emdash_fields");
expect(snapshot.schema).toHaveProperty("_emdash_migrations");
expect(snapshot.schema).toHaveProperty("options");
});
it("includes column type info in schema", async () => {
const snapshot = await generateSnapshot(db);
const postSchema = snapshot.schema.ec_post;
expect(postSchema).toBeDefined();
expect(postSchema.types).toBeDefined();
// PRAGMA table_info returns types as declared (case-sensitive)
// Kysely creates tables with lowercase types
expect(postSchema.types!.id.toLowerCase()).toBe("text");
expect(postSchema.types!.version.toLowerCase()).toBe("integer");
});
it("snapshot shape matches DO expectation", async () => {
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('p1', 'test', 'published', 'Test', 'Body', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot: Snapshot = await generateSnapshot(db);
// Verify shape matches what EmDashPreviewDB.applySnapshot expects
expect(snapshot).toHaveProperty("tables");
expect(snapshot).toHaveProperty("schema");
expect(snapshot).toHaveProperty("generatedAt");
expect(typeof snapshot.generatedAt).toBe("string");
// Tables are Record<string, Record<string, unknown>[]>
for (const [tableName, rows] of Object.entries(snapshot.tables)) {
expect(typeof tableName).toBe("string");
expect(Array.isArray(rows)).toBe(true);
for (const row of rows) {
expect(typeof row).toBe("object");
}
}
// Schema has columns and types
for (const [tableName, info] of Object.entries(snapshot.schema)) {
expect(typeof tableName).toBe("string");
expect(Array.isArray(info.columns)).toBe(true);
if (info.types) {
expect(typeof info.types).toBe("object");
}
}
});
it("filters options table to safe rendering prefixes only", async () => {
// Insert site settings (safe — should be included)
await sql`INSERT INTO options (name, value) VALUES ('site:title', '"My Site"')`.execute(db);
await sql`INSERT INTO options (name, value) VALUES ('site:tagline', '"Welcome"')`.execute(db);
// Insert plugin secrets (unsafe — should be excluded)
await sql`INSERT INTO options (name, value) VALUES ('plugin:smtp:api_key', '"sk-secret-123"')`.execute(
db,
);
await sql`INSERT INTO options (name, value) VALUES ('plugin:seo:license', '"lic-456"')`.execute(
db,
);
// Insert setup/auth data (unsafe — should be excluded)
await sql`INSERT INTO options (name, value) VALUES ('emdash:setup_complete', 'true')`.execute(
db,
);
await sql`INSERT INTO options (name, value) VALUES ('emdash:passkey_pending:user1', '{"challenge":"abc"}')`.execute(
db,
);
const snapshot = await generateSnapshot(db);
const optionsRows = snapshot.tables.options;
expect(optionsRows).toBeDefined();
expect(optionsRows).toHaveLength(2);
const names = optionsRows.map((r) => r.name);
expect(names).toContain("site:title");
expect(names).toContain("site:tagline");
expect(names).not.toContain("plugin:smtp:api_key");
expect(names).not.toContain("plugin:seo:license");
expect(names).not.toContain("emdash:setup_complete");
expect(names).not.toContain("emdash:passkey_pending:user1");
});
it("discovers content tables dynamically", async () => {
// The test setup creates ec_post and ec_page
const snapshot = await generateSnapshot(db);
expect(snapshot.schema).toHaveProperty("ec_post");
expect(snapshot.schema).toHaveProperty("ec_page");
// Verify column discovery matches what we created
expect(snapshot.schema.ec_post.columns).toContain("title");
expect(snapshot.schema.ec_post.columns).toContain("content");
expect(snapshot.schema.ec_page.columns).toContain("title");
expect(snapshot.schema.ec_page.columns).toContain("content");
});
});

View File

@@ -0,0 +1,170 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Minimal WXR fixture to reproduce emdash-cms/emdash#79:
WordPress import crashes on custom post types with hyphens.
Includes realistic post types from popular plugins:
- Elementor: elementor-hf (header/footer templates)
- WooCommerce: shop-order, shop-coupon
- ACF: acf-field-group
-->
<rss version="2.0"
xmlns:excerpt="http://wordpress.org/export/1.2/excerpt/"
xmlns:content="http://purl.org/rss/1.0/modules/content/"
xmlns:wfw="http://wellformedweb.org/CommentAPI/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:wp="http://wordpress.org/export/1.2/"
>
<channel>
<title>Plugin-Heavy WP Site</title>
<link>https://example.com</link>
<description>WordPress site with plugins that use hyphenated post types</description>
<pubDate>Mon, 07 Apr 2026 12:00:00 +0000</pubDate>
<language>en-US</language>
<wp:wxr_version>1.2</wp:wxr_version>
<wp:base_site_url>https://example.com</wp:base_site_url>
<wp:base_blog_url>https://example.com</wp:base_blog_url>
<wp:author>
<wp:author_id>1</wp:author_id>
<wp:author_login><![CDATA[admin]]></wp:author_login>
<wp:author_email><![CDATA[admin@example.com]]></wp:author_email>
<wp:author_display_name><![CDATA[Admin]]></wp:author_display_name>
</wp:author>
<!-- Standard post (control — should import normally) -->
<item>
<title>Hello World</title>
<link>https://example.com/hello-world/</link>
<pubDate>Mon, 15 Jan 2025 10:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=1</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>A normal blog post.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>1</wp:post_id>
<wp:post_date><![CDATA[2025-01-15 10:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-15 10:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-15 10:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-15 10:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[hello-world]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Elementor header/footer template (hyphenated post type: elementor-hf) -->
<item>
<title>Site Header</title>
<link>https://example.com/?p=200</link>
<pubDate>Tue, 16 Jan 2025 08:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=200</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Elementor header template content.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>200</wp:post_id>
<wp:post_date><![CDATA[2025-01-16 08:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-16 08:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-16 08:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-16 08:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[site-header]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[elementor-hf]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- WooCommerce order (hyphenated post type: shop-order) -->
<item>
<title>Order #1001</title>
<link>https://example.com/?p=300</link>
<pubDate>Wed, 17 Jan 2025 14:30:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=300</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>300</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 14:30:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 14:30:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 14:30:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 14:30:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[order-1001]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[shop-order]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- WooCommerce coupon (hyphenated post type: shop-coupon) -->
<item>
<title>SUMMER20</title>
<link>https://example.com/?p=301</link>
<pubDate>Wed, 17 Jan 2025 15:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=301</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>301</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 15:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 15:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 15:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 15:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[summer20]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[shop-coupon]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- ACF field group (hyphenated post type: acf-field-group) -->
<item>
<title>Product Details</title>
<link>https://example.com/?p=400</link>
<pubDate>Thu, 18 Jan 2025 09:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=400</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>400</wp:post_id>
<wp:post_date><![CDATA[2025-01-18 09:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-18 09:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-18 09:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-18 09:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[product-details]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[acf-field-group]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
</channel>
</rss>

View File

@@ -0,0 +1,337 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!-- WordPress WXR fixture for e2e tests -->
<rss version="2.0"
xmlns:excerpt="http://wordpress.org/export/1.2/excerpt/"
xmlns:content="http://purl.org/rss/1.0/modules/content/"
xmlns:wfw="http://wellformedweb.org/CommentAPI/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:wp="http://wordpress.org/export/1.2/"
>
<channel>
<title>Test Blog</title>
<link>https://example.com</link>
<description>A test WordPress site</description>
<pubDate>Sun, 19 Jan 2025 12:00:00 +0000</pubDate>
<language>en-US</language>
<wp:wxr_version>1.2</wp:wxr_version>
<wp:base_site_url>https://example.com</wp:base_site_url>
<wp:base_blog_url>https://example.com</wp:base_blog_url>
<wp:author>
<wp:author_id>1</wp:author_id>
<wp:author_login><![CDATA[admin]]></wp:author_login>
<wp:author_email><![CDATA[admin@example.com]]></wp:author_email>
<wp:author_display_name><![CDATA[Site Admin]]></wp:author_display_name>
</wp:author>
<wp:category>
<wp:term_id>2</wp:term_id>
<wp:category_nicename><![CDATA[tutorials]]></wp:category_nicename>
<wp:category_parent></wp:category_parent>
<wp:cat_name><![CDATA[Tutorials]]></wp:cat_name>
</wp:category>
<wp:category>
<wp:term_id>3</wp:term_id>
<wp:category_nicename><![CDATA[news]]></wp:category_nicename>
<wp:category_parent></wp:category_parent>
<wp:cat_name><![CDATA[News]]></wp:cat_name>
</wp:category>
<wp:tag>
<wp:term_id>4</wp:term_id>
<wp:tag_slug><![CDATA[featured]]></wp:tag_slug>
<wp:tag_name><![CDATA[Featured]]></wp:tag_name>
</wp:tag>
<!-- Post 1: Simple Gutenberg content -->
<item>
<title>Hello World</title>
<link>https://example.com/2025/01/hello-world/</link>
<pubDate>Mon, 15 Jan 2025 10:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=1</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Welcome to our new blog! This is a <strong>test post</strong> with some <em>formatting</em>.</p>
<!-- /wp:paragraph -->
<!-- wp:heading -->
<h2>Getting Started</h2>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>Here's how to get started with our platform.</p>
<!-- /wp:paragraph -->
<!-- wp:list -->
<ul>
<li>Step one</li>
<li>Step two</li>
<li>Step three</li>
</ul>
<!-- /wp:list -->]]></content:encoded>
<excerpt:encoded><![CDATA[Welcome to our new blog!]]></excerpt:encoded>
<wp:post_id>1</wp:post_id>
<wp:post_date><![CDATA[2025-01-15 10:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-15 10:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-15 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-15 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[hello-world]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<category domain="category" nicename="tutorials"><![CDATA[Tutorials]]></category>
<category domain="post_tag" nicename="featured"><![CDATA[Featured]]></category>
<wp:postmeta>
<wp:meta_key><![CDATA[_edit_last]]></wp:meta_key>
<wp:meta_value><![CDATA[1]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[_yoast_wpseo_title]]></wp:meta_key>
<wp:meta_value><![CDATA[Hello World - Welcome Post]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[_yoast_wpseo_metadesc]]></wp:meta_key>
<wp:meta_value><![CDATA[Our first blog post welcoming visitors.]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[custom_field]]></wp:meta_key>
<wp:meta_value><![CDATA[custom value]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Post 2: With image and quote -->
<item>
<title>Advanced Features</title>
<link>https://example.com/2025/01/advanced-features/</link>
<pubDate>Wed, 17 Jan 2025 14:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=2</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Let's explore some advanced features.</p>
<!-- /wp:paragraph -->
<!-- wp:image {"id":100,"sizeSlug":"large"} -->
<figure class="wp-block-image size-large"><img src="https://example.com/wp-content/uploads/2025/01/hero.jpg" alt="Hero image" class="wp-image-100"/><figcaption>Our hero image</figcaption></figure>
<!-- /wp:image -->
<!-- wp:quote -->
<blockquote class="wp-block-quote"><p>This is an inspiring quote about technology.</p><cite>Famous Person</cite></blockquote>
<!-- /wp:quote -->
<!-- wp:code -->
<pre class="wp-block-code"><code>const hello = "world";
console.log(hello);</code></pre>
<!-- /wp:code -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>2</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 14:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 14:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 14:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 14:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[advanced-features]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<category domain="category" nicename="tutorials"><![CDATA[Tutorials]]></category>
<wp:postmeta>
<wp:meta_key><![CDATA[_thumbnail_id]]></wp:meta_key>
<wp:meta_value><![CDATA[100]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Post 3: Draft post -->
<item>
<title>Work in Progress</title>
<link>https://example.com/?p=3</link>
<pubDate>Thu, 18 Jan 2025 09:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=3</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>This post is still being written.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>3</wp:post_id>
<wp:post_date><![CDATA[2025-01-18 09:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-18 09:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-18 10:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-18 10:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[work-in-progress]]></wp:post_name>
<wp:status><![CDATA[draft]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Page 1: About page -->
<item>
<title>About Us</title>
<link>https://example.com/about/</link>
<pubDate>Sat, 01 Jan 2025 12:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?page_id=10</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Welcome to our About page. We are a team of passionate developers.</p>
<!-- /wp:paragraph -->
<!-- wp:heading {"level":3} -->
<h3>Our Mission</h3>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>To build great software that helps people.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>10</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-10 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-10 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[about]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[page]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Page 2: Contact page (child of About) -->
<item>
<title>Contact</title>
<link>https://example.com/about/contact/</link>
<pubDate>Sat, 01 Jan 2025 12:30:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?page_id=11</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Get in touch with us at <a href="mailto:hello@example.com">hello@example.com</a>.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>11</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:30:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:30:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-01 12:30:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-01 12:30:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[contact]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>10</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[page]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Attachment -->
<item>
<title>hero</title>
<link>https://example.com/hero/</link>
<pubDate>Wed, 17 Jan 2025 13:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/wp-content/uploads/2025/01/hero.jpg</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[Hero image for the site]]></excerpt:encoded>
<wp:post_id>100</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 13:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 13:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 13:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 13:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[hero]]></wp:post_name>
<wp:status><![CDATA[inherit]]></wp:status>
<wp:post_parent>2</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[attachment]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<wp:attachment_url><![CDATA[https://example.com/wp-content/uploads/2025/01/hero.jpg]]></wp:attachment_url>
<wp:postmeta>
<wp:meta_key><![CDATA[_wp_attached_file]]></wp:meta_key>
<wp:meta_value><![CDATA[2025/01/hero.jpg]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Nav menu item (should be skipped) -->
<item>
<title>Home</title>
<link>https://example.com/?p=50</link>
<pubDate>Sat, 01 Jan 2025 12:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=50</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>50</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-01 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[home]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>1</wp:menu_order>
<wp:post_type><![CDATA[nav_menu_item]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Reusable Block (wp_block) - should be imported as section -->
<item>
<title>Newsletter CTA</title>
<link>https://example.com/?p=100</link>
<pubDate>Mon, 20 Jan 2025 10:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=100</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:heading {"level":3} -->
<h3>Subscribe to Our Newsletter</h3>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>Get the latest updates delivered to your inbox.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>100</wp:post_id>
<wp:post_date><![CDATA[2025-01-20 10:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-20 10:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-20 10:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-20 10:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[newsletter-cta]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[wp_block]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
</channel>
</rss>

View File

@@ -0,0 +1,508 @@
/**
* E2E tests for WordPress import CLI
*
* Tests the full two-phase import flow:
* - Phase 1: Prepare (analyze WXR, generate config)
* - Phase 2: Execute (import content using config)
*
* Also tests: --dry-run, --resume, --json flags
*/
import { mkdtemp, rm, readFile, writeFile, readdir } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
prepareWordPressImport,
executeWordPressImport,
type MigrationConfig,
type ImportProgress,
} from "../../../src/cli/commands/import/wordpress.js";
const FIXTURE_PATH = join(import.meta.dirname, "fixtures", "sample-export.xml");
describe("WordPress Import Integration", () => {
let testDir: string;
beforeEach(async () => {
testDir = await mkdtemp(join(tmpdir(), "emdash-wp-import-"));
});
afterEach(async () => {
await rm(testDir, { recursive: true, force: true });
});
describe("Phase 1: Prepare", () => {
it("analyzes WXR and generates migration config", async () => {
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
// Check config was created
const configContent = await readFile(configPath, "utf-8");
const config: MigrationConfig = JSON.parse(configContent);
// Verify site info
expect(config.site.title).toBe("Test Blog");
expect(config.site.url).toBe("https://example.com");
// Verify collections discovered
expect(config.collections.post).toEqual({
collection: "posts",
enabled: true,
count: 3,
});
expect(config.collections.page).toEqual({
collection: "pages",
enabled: true,
count: 2,
});
// nav_menu_item should be disabled (if it exists in the export)
if (config.collections.nav_menu_item) {
expect(config.collections.nav_menu_item.enabled).toBe(false);
}
// Verify custom fields discovered
expect(config.fields._yoast_wpseo_title).toEqual({
field: "seo.title",
type: "string",
enabled: true,
count: 1,
samples: expect.any(Array),
});
expect(config.fields._yoast_wpseo_metadesc?.field).toBe("seo.description");
expect(config.fields._thumbnail_id?.field).toBe("featuredImage");
expect(config.fields.custom_field?.enabled).toBe(true);
// Internal fields should be disabled
expect(config.fields._edit_last?.enabled).toBe(false);
});
it("generates suggested live.config.ts", async () => {
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
const liveConfigPath = join(testDir, "suggested-live.config.ts");
const liveConfig = await readFile(liveConfigPath, "utf-8");
// Collections are now created via Admin UI, so this generates helpful comments
expect(liveConfig).toContain("Suggested EmDash collections");
expect(liveConfig).toContain("/_emdash/admin/content-types");
expect(liveConfig).toContain('post → "posts"');
expect(liveConfig).toContain('page → "pages"');
expect(liveConfig).toContain("portableText");
});
it("dry-run does not create files", async () => {
const configPath = join(testDir, ".wp-migration.json");
const result = await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: true,
json: false,
});
// Result should indicate dry run
expect(result.dryRun).toBe(true);
expect(result.files).toContainEqual({
path: configPath,
action: "would_create",
});
// Files should NOT exist
await expect(readFile(configPath)).rejects.toThrow();
await expect(readFile(join(testDir, "suggested-live.config.ts"))).rejects.toThrow();
});
it("returns structured JSON result", async () => {
const configPath = join(testDir, ".wp-migration.json");
const result = await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: true,
});
expect(result.success).toBe(true);
expect(result.phase).toBe("prepare");
expect(result.summary.postsAnalyzed).toBe(7); // 3 posts + 2 pages + 1 attachment + 1 wp_block (excludes nav_menu_item)
expect(result.files.length).toBe(2);
expect(result.nextSteps.length).toBeGreaterThan(0);
});
});
describe("Phase 2: Execute", () => {
let configPath: string;
beforeEach(async () => {
// Run prepare first to create config
configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
});
it("imports posts and pages to correct directories", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Check posts directory
const posts = await readdir(join(testDir, "posts"));
expect(posts).toContain("hello-world.json");
expect(posts).toContain("advanced-features.json");
expect(posts).toContain("work-in-progress.json");
expect(posts.length).toBe(3);
// Check pages directory
const pages = await readdir(join(testDir, "pages"));
expect(pages).toContain("about.json");
expect(pages).toContain("contact.json");
expect(pages.length).toBe(2);
});
it("converts Gutenberg blocks to Portable Text", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
// Check content is Portable Text array
expect(Array.isArray(post.content)).toBe(true);
expect(post.content.length).toBeGreaterThan(0);
// Check for expected block types
const blockTypes = post.content.map((b: { _type: string }) => b._type);
expect(blockTypes).toContain("block"); // paragraphs and headings
// Check paragraph content
const firstBlock = post.content[0];
expect(firstBlock._type).toBe("block");
expect(firstBlock.children[0].text).toContain("Welcome to our new blog");
});
it("maps custom fields correctly", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
// Check SEO fields (nested)
expect(post.seo?.title).toBe("Hello World - Welcome Post");
expect(post.seo?.description).toBe("Our first blog post welcoming visitors.");
// Check custom field
expect(post.custom_field).toBe("custom value");
});
it("preserves post metadata", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
expect(post.title).toBe("Hello World");
expect(post.status).toBe("published");
expect(post.author).toBe("admin");
expect(post.excerpt).toBe("Welcome to our new blog!");
expect(post.categories).toContain("tutorials");
expect(post.tags).toContain("featured");
// Check WordPress metadata preserved
expect(post._wp.id).toBe(1);
expect(post._wp.link).toBe("https://example.com/2025/01/hello-world/");
});
it("handles draft posts correctly", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "work-in-progress.json"), "utf-8");
const post = JSON.parse(postContent);
expect(post.status).toBe("draft");
});
it("creates redirects map", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const redirectsContent = await readFile(join(testDir, "_redirects.json"), "utf-8");
const redirects = JSON.parse(redirectsContent);
expect(redirects["https://example.com/2025/01/hello-world/"]).toBe("/posts/hello-world");
expect(redirects["https://example.com/about/"]).toBe("/pages/about");
});
it("dry-run shows what would be created", async () => {
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: true,
json: false,
resume: false,
});
expect(result.dryRun).toBe(true);
expect(result.summary.postsImported).toBe(5);
// Check files would be created
const wouldCreate = result.files.filter((f) => f.action === "would_create");
expect(wouldCreate.length).toBeGreaterThan(0);
// Actual files should NOT exist
await expect(readdir(join(testDir, "posts"))).rejects.toThrow();
});
it("creates progress file for resumability", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const progressContent = await readFile(join(testDir, ".wp-migration-progress.json"), "utf-8");
const progress: ImportProgress = JSON.parse(progressContent);
expect(progress.importedPosts.length).toBe(5);
expect(progress.stats.importedPosts).toBe(5);
expect(progress.stats.totalPosts).toBe(7); // 3 posts + 2 pages + 1 attachment + 1 wp_block (nav_menu_item excluded)
expect(progress.errors.length).toBe(0);
});
it("resume skips already-imported posts", async () => {
// First import
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Second import with resume
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
// All should be skipped (resumed)
expect(result.summary.postsImported).toBe(0);
expect(result.summary.postsSkipped).toBe(7); // 5 content items + 1 attachment + 1 wp_block
});
it("resume imports only new posts", async () => {
// First import
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Modify progress to simulate partial import
const progressPath = join(testDir, ".wp-migration-progress.json");
const progressContent = await readFile(progressPath, "utf-8");
const progress: ImportProgress = JSON.parse(progressContent);
// Remove last 2 posts from imported list
progress.importedPosts = progress.importedPosts.slice(0, 3);
progress.stats.importedPosts = 3;
await writeFile(progressPath, JSON.stringify(progress, null, 2));
// Delete those files too
await rm(join(testDir, "pages", "about.json"));
await rm(join(testDir, "pages", "contact.json"));
// Resume import
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
// Should import only the 2 missing pages
expect(result.summary.postsImported).toBe(2);
expect(result.summary.postsSkipped).toBe(5); // 3 + 1 attachment + 1 wp_block
// Files should exist again
const pages = await readdir(join(testDir, "pages"));
expect(pages).toContain("about.json");
expect(pages).toContain("contact.json");
});
it("returns structured JSON result", async () => {
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: false,
});
expect(result.success).toBe(true);
expect(result.phase).toBe("execute");
expect(result.summary.postsImported).toBe(5);
expect(result.summary.errors).toBe(0);
expect(result.files.length).toBeGreaterThan(0);
expect(result.files.every((f) => f.action === "created")).toBe(true);
});
it("skips disabled post types", async () => {
// Modify config to disable pages
const config: MigrationConfig = JSON.parse(await readFile(configPath, "utf-8"));
config.collections.page.enabled = false;
await writeFile(configPath, JSON.stringify(config, null, 2));
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: false,
});
// Only posts should be imported
expect(result.summary.postsImported).toBe(3);
expect(result.summary.postsSkipped).toBe(4); // 2 pages + 1 attachment + 1 wp_block
// Pages directory should not exist
await expect(readdir(join(testDir, "pages"))).rejects.toThrow();
});
});
describe("Edge Cases", () => {
it("handles missing config file gracefully", async () => {
const badConfigPath = join(testDir, "nonexistent.json");
await expect(
executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath: badConfigPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
}),
).rejects.toThrow();
});
it("handles empty progress file on resume", async () => {
// Create config first
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
// Resume without prior import should work (fresh start)
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
expect(result.summary.postsImported).toBe(5);
});
});
});

View File

@@ -0,0 +1,366 @@
/**
* Integration tests using WordPress Theme Unit Test data
*
* Tests the full WordPress migration pipeline against the official
* WordPress Theme Unit Test dataset. The test data is downloaded from
* GitHub on first run and cached locally.
*
* @see https://github.com/WordPress/theme-test-data
*/
import { createReadStream, existsSync } from "node:fs";
import { mkdir, writeFile } from "node:fs/promises";
import { dirname, join } from "node:path";
import { gutenbergToPortableText } from "@emdash-cms/gutenberg-to-portable-text";
import { describe, it, expect, beforeAll } from "vitest";
import { parseWxr } from "../../../src/cli/wxr/parser.js";
// Test regex patterns
const PARAGRAPH_WITH_TEXT_REGEX = /<p[^>]*>[^<]+<\/p>/;
const TEST_DATA_PATH = join(
process.cwd(),
"../../examples/wp-theme-unit-test/themeunittestdata.wordpress.xml",
);
const TEST_DATA_URL =
"https://raw.githubusercontent.com/WordPress/theme-test-data/master/themeunittestdata.wordpress.xml";
/**
* Download the WordPress theme unit test data if it doesn't exist locally.
*/
async function ensureTestData(): Promise<void> {
if (existsSync(TEST_DATA_PATH)) return;
console.log(`Downloading WordPress theme unit test data from ${TEST_DATA_URL}...`);
const response = await fetch(TEST_DATA_URL);
if (!response.ok) {
throw new Error(`Failed to download test data: ${response.status} ${response.statusText}`);
}
const data = await response.text();
await mkdir(dirname(TEST_DATA_PATH), { recursive: true });
await writeFile(TEST_DATA_PATH, data, "utf-8");
console.log(`Downloaded to ${TEST_DATA_PATH}`);
}
describe("WordPress Theme Unit Test Migration", () => {
let wxrData: Awaited<ReturnType<typeof parseWxr>>;
beforeAll(async () => {
await ensureTestData();
const stream = createReadStream(TEST_DATA_PATH, { encoding: "utf-8" });
wxrData = await parseWxr(stream);
});
describe("WXR Parsing", () => {
it("parses site metadata", () => {
expect(wxrData.site.title).toBe("Theme Unit Test Data");
expect(wxrData.site.link).toBe("https://wpthemetestdata.wordpress.com");
expect(wxrData.site.language).toBe("en");
});
it("parses all posts", () => {
// Theme Unit Test has many posts covering different scenarios
expect(wxrData.posts.length).toBeGreaterThan(50);
});
it("parses all pages", () => {
const pages = wxrData.posts.filter((p) => p.postType === "page");
expect(pages.length).toBeGreaterThan(10);
});
it("parses categories with hierarchy", () => {
expect(wxrData.categories.length).toBeGreaterThan(20);
// Check for parent-child relationships
const parentCategory = wxrData.categories.find((c) => c.nicename === "parent-category");
expect(parentCategory).toBeDefined();
const childCategory = wxrData.categories.find((c) => c.nicename === "child-category-01");
expect(childCategory).toBeDefined();
expect(childCategory?.parent).toBe("parent-category");
});
it("parses tags", () => {
expect(wxrData.tags.length).toBeGreaterThan(50);
// Check for specific tags
const wpTag = wxrData.tags.find((t) => t.slug === "wordpress");
expect(wpTag).toBeDefined();
expect(wpTag?.name).toBe("WordPress");
});
it("parses authors", () => {
expect(wxrData.authors.length).toBeGreaterThanOrEqual(1);
const author = wxrData.authors.find((a) => a.login === "themereviewteam");
expect(author).toBeDefined();
expect(author?.displayName).toBe("Theme Reviewer");
});
it("parses attachments", () => {
expect(wxrData.attachments.length).toBeGreaterThan(0);
});
it("parses post categories and tags", () => {
// Find a post with both categories and tags
const postsWithTaxonomies = wxrData.posts.filter(
(p) => p.categories.length > 0 || p.tags.length > 0,
);
expect(postsWithTaxonomies.length).toBeGreaterThan(0);
});
});
describe("Gutenberg Block Conversion", () => {
it("converts paragraph blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:paragraph"));
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
expect(result.length).toBeGreaterThan(0);
const block = result.find((b) => b._type === "block");
expect(block).toBeDefined();
});
it("converts heading blocks with different levels", () => {
const post = wxrData.posts.find((p) => p.title === "WP 6.1 Font size scale");
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
// Should have h2 headings
const headings = result.filter(
(b) => b._type === "block" && (b as any).style?.startsWith("h"),
);
expect(headings.length).toBeGreaterThan(0);
});
it("converts list blocks", () => {
// Find a post with list content
const post = wxrData.posts.find((p) => p.content?.includes("wp:list"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const listItems = result.filter((b) => b._type === "block" && (b as any).listItem);
expect(listItems.length).toBeGreaterThan(0);
}
});
it("converts image blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:image"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const images = result.filter((b) => b._type === "image");
expect(images.length).toBeGreaterThan(0);
}
});
it("converts quote blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:quote"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const quotes = result.filter(
(b) => b._type === "block" && (b as any).style === "blockquote",
);
expect(quotes.length).toBeGreaterThan(0);
}
});
it("converts code blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:code"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const codeBlocks = result.filter((b) => b._type === "code");
expect(codeBlocks.length).toBeGreaterThan(0);
}
});
it("converts group blocks by flattening", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:group"));
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
// Groups should be flattened - no group type in output
const groups = result.filter((b) => b._type === "group");
expect(groups.length).toBe(0);
// But their content should still be present
expect(result.length).toBeGreaterThan(0);
});
it("handles classic editor content", () => {
// Find a post in the "Classic" category
const classicPost = wxrData.posts.find((p) => p.categories.includes("classic"));
if (classicPost && classicPost.content) {
// Classic content doesn't have wp: comments
const hasGutenbergBlocks = classicPost.content.includes("<!-- wp:");
if (!hasGutenbergBlocks && classicPost.content.trim()) {
const result = gutenbergToPortableText(classicPost.content);
expect(result.length).toBeGreaterThan(0);
}
}
});
it("preserves inline formatting", () => {
const post = wxrData.posts.find(
(p) => p.content?.includes("<strong>") || p.content?.includes("<em>"),
);
if (post) {
const result = gutenbergToPortableText(post.content || "");
const blocksWithMarks = result.filter(
(b) => b._type === "block" && (b as any).children?.some((c: any) => c.marks?.length > 0),
);
// Should have some formatted text
expect(blocksWithMarks.length).toBeGreaterThanOrEqual(0);
}
});
it("handles empty content gracefully", () => {
const result = gutenbergToPortableText("");
expect(result).toEqual([]);
});
it("handles malformed blocks gracefully", () => {
// Test with incomplete block markers
const malformed = "<!-- wp:paragraph --><p>Test<!-- /wp:paragraph";
const result = gutenbergToPortableText(malformed);
// Should not throw, may produce partial output or fallback
expect(Array.isArray(result)).toBe(true);
});
});
describe("Edge Cases", () => {
it("handles posts with special characters in title", () => {
// Find posts with special characters
const specialPosts = wxrData.posts.filter(
(p) => p.title?.includes("&") || p.title?.includes("<") || p.title?.includes('"'),
);
// Should parse without errors
expect(specialPosts).toBeDefined();
});
it("handles posts with very long content", () => {
// Find the longest post
const longestPost = wxrData.posts.reduce((longest, current) => {
const currentLength = current.content?.length || 0;
const longestLength = longest?.content?.length || 0;
return currentLength > longestLength ? current : longest;
}, wxrData.posts[0]);
if (longestPost?.content) {
const result = gutenbergToPortableText(longestPost.content);
expect(result.length).toBeGreaterThan(0);
}
});
it("handles deeply nested blocks", () => {
// Find posts with nested structures (columns, groups)
const nestedPost = wxrData.posts.find(
(p) => p.content?.includes("wp:columns") || p.content?.includes("wp:group"),
);
if (nestedPost) {
const result = gutenbergToPortableText(nestedPost.content || "");
expect(Array.isArray(result)).toBe(true);
}
});
it("handles posts with embeds", () => {
const embedPost = wxrData.posts.find((p) => p.content?.includes("wp:embed"));
if (embedPost) {
const result = gutenbergToPortableText(embedPost.content || "");
const embeds = result.filter((b) => b._type === "embed");
expect(embeds.length).toBeGreaterThanOrEqual(0);
}
});
});
describe("Content Integrity", () => {
it("preserves all text content through conversion", () => {
// Take a sample of posts and verify text isn't lost
const samplePosts = wxrData.posts.slice(0, 10);
for (const post of samplePosts) {
if (!post.content) continue;
const result = gutenbergToPortableText(post.content);
// Extract all text from result
const extractedText = result
.map((block) => {
if (block._type === "block" && (block as any).children) {
return (block as any).children.map((c: any) => c.text || "").join("");
}
if (block._type === "code") {
return (block as any).code || "";
}
return "";
})
.join(" ")
.trim();
// If there was content, we should have extracted some text
// (unless it was all images/embeds)
if (post.content.includes("<p>") || post.content.includes("wp:paragraph")) {
// Only check if there was actual text content
const hasTextContent = PARAGRAPH_WITH_TEXT_REGEX.test(post.content);
if (hasTextContent) {
expect(extractedText.length).toBeGreaterThan(0);
}
}
}
});
});
describe("Statistics", () => {
it("reports conversion statistics", () => {
let totalPosts = 0;
let successfulConversions = 0;
let failedConversions = 0;
let totalBlocks = 0;
const blockTypes = new Map<string, number>();
for (const post of wxrData.posts) {
totalPosts++;
try {
const result = gutenbergToPortableText(post.content || "");
successfulConversions++;
totalBlocks += result.length;
for (const block of result) {
const type = block._type;
blockTypes.set(type, (blockTypes.get(type) || 0) + 1);
}
} catch {
failedConversions++;
}
}
// Log statistics (visible in test output with --reporter=verbose)
console.log("\n=== WordPress Migration Statistics ===");
console.log(`Total posts: ${totalPosts}`);
console.log(`Successful: ${successfulConversions}`);
console.log(`Failed: ${failedConversions}`);
console.log(`Total blocks generated: ${totalBlocks}`);
console.log("\nBlock types:");
for (const [type, count] of blockTypes.entries()) {
console.log(` ${type}: ${count}`);
}
console.log("=====================================\n");
// All conversions should succeed
expect(failedConversions).toBe(0);
expect(successfulConversions).toBe(totalPosts);
});
});
});