Emdash source with visual editor image upload fix

Fixes:
1. media.ts: wrap placeholder generation in try-catch
2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
2026-05-03 10:44:54 +07:00
parent 78f81bebb6
commit 2d1be52177
2352 changed files with 662964 additions and 0 deletions

View File

@@ -0,0 +1,70 @@
import { describe, it, expect } from "vitest";
import { apiError, apiSuccess, handleError, unwrapResult } from "../../../src/api/error.js";
describe("API cache headers", () => {
const EXPECTED_CACHE_CONTROL = "private, no-store";
describe("apiSuccess", () => {
it("should include Cache-Control: private, no-store", () => {
const response = apiSuccess({ ok: true });
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
});
it("should not include Vary header", () => {
const response = apiSuccess({ ok: true });
expect(response.headers.has("Vary")).toBe(false);
});
it("should still include correct status and body", async () => {
const response = apiSuccess({ id: "123" }, 201);
expect(response.status).toBe(201);
const body = await response.json();
expect(body).toEqual({ data: { id: "123" } });
});
});
describe("apiError", () => {
it("should include Cache-Control: private, no-store", () => {
const response = apiError("NOT_FOUND", "Not found", 404);
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
});
it("should not include Vary header", () => {
const response = apiError("NOT_FOUND", "Not found", 404);
expect(response.headers.has("Vary")).toBe(false);
});
it("should still include correct status and body", async () => {
const response = apiError("FORBIDDEN", "Access denied", 403);
expect(response.status).toBe(403);
const body = await response.json();
expect(body).toEqual({ error: { code: "FORBIDDEN", message: "Access denied" } });
});
});
describe("handleError", () => {
it("should include cache headers on 500 responses", () => {
const response = handleError(new Error("db crash"), "Something went wrong", "INTERNAL");
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
expect(response.headers.has("Vary")).toBe(false);
});
});
describe("unwrapResult", () => {
it("should include cache headers on success", () => {
const response = unwrapResult({ success: true, data: { id: "1" } });
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
expect(response.headers.has("Vary")).toBe(false);
});
it("should include cache headers on error", () => {
const response = unwrapResult({
success: false,
error: { code: "NOT_FOUND", message: "Not found" },
});
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
expect(response.headers.has("Vary")).toBe(false);
});
});
});

View File

@@ -0,0 +1,307 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleContentCreate,
handleContentDuplicate,
handleContentGet,
handleContentList,
handleContentUpdate,
} from "../../../src/api/index.js";
import { BylineRepository } from "../../../src/database/repositories/byline.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("Content Handlers — auto-slug generation", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
// Add a "name" field to the page collection so we can test name-based slug generation
const registry = new SchemaRegistry(db);
await registry.createField("page", { slug: "name", label: "Name", type: "string" });
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("handleContentCreate", () => {
it("should auto-generate slug from title when slug is omitted", async () => {
const result = await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("hello-world");
});
it("should auto-generate slug from name when title is absent", async () => {
const result = await handleContentCreate(db, "page", {
data: { name: "My Widget" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("my-widget");
});
it("should prefer title over name for slug generation", async () => {
const result = await handleContentCreate(db, "page", {
data: { title: "From Title", name: "From Name" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("from-title");
});
it("should respect explicit slug and not auto-generate", async () => {
const result = await handleContentCreate(db, "post", {
data: { title: "Hello World" },
slug: "custom-slug",
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("custom-slug");
});
it("should handle slug collisions by appending numeric suffix", async () => {
// Create first item with the slug
await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
// Create second item with same title — should get unique slug
const result = await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("hello-world-1");
});
it("should increment suffix on repeated collisions", async () => {
await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
const result = await handleContentCreate(db, "post", {
data: { title: "Hello World" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("hello-world-2");
});
it("should leave slug null when no title or name is present", async () => {
// `data: {}` — no title, no name. Slug source isn't there, so the
// auto-generator has nothing to work with.
const result = await handleContentCreate(db, "post", {
data: {},
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBeNull();
});
it("should leave slug null when title is empty string", async () => {
const result = await handleContentCreate(db, "post", {
data: { title: "" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBeNull();
});
it("should handle unicode titles", async () => {
const result = await handleContentCreate(db, "post", {
data: { title: "Café Naïve" },
});
expect(result.success).toBe(true);
expect(result.data?.item.slug).toBe("cafe-naive");
});
it("should allow same auto-slug in different collections", async () => {
const postResult = await handleContentCreate(db, "post", {
data: { title: "About" },
});
const pageResult = await handleContentCreate(db, "page", {
data: { title: "About" },
});
expect(postResult.success).toBe(true);
expect(pageResult.success).toBe(true);
expect(postResult.data?.item.slug).toBe("about");
expect(pageResult.data?.item.slug).toBe("about");
});
it("preserves publishedAt and createdAt when provided — content migration use case", async () => {
const originalCreated = "2019-03-15T10:30:00.000Z";
const originalPublished = "2019-03-16T09:00:00.000Z";
const result = await handleContentCreate(db, "post", {
data: { title: "Migrated Post" },
createdAt: originalCreated,
publishedAt: originalPublished,
});
expect(result.success).toBe(true);
expect(result.data?.item.createdAt).toBe(originalCreated);
expect(result.data?.item.publishedAt).toBe(originalPublished);
});
});
describe("handleContentDuplicate", () => {
it("should generate slug from duplicated title", async () => {
const original = await handleContentCreate(db, "post", {
data: { title: "My Post" },
slug: "my-post",
});
const result = await handleContentDuplicate(db, "post", original.data!.item.id);
expect(result.success).toBe(true);
// Title becomes "My Post (Copy)", slug should be generated from it
expect(result.data?.item.slug).toBe("my-post-copy");
});
it("should handle duplicate slug collision from copy", async () => {
const original = await handleContentCreate(db, "post", {
data: { title: "My Post" },
slug: "my-post",
});
// First duplicate
const dup1 = await handleContentDuplicate(db, "post", original.data!.item.id);
expect(dup1.data?.item.slug).toBe("my-post-copy");
// Second duplicate — "My Post (Copy)" title slugifies to "my-post-copy"
// which now collides with the first duplicate
const dup2 = await handleContentDuplicate(db, "post", original.data!.item.id);
expect(dup2.success).toBe(true);
expect(dup2.data?.item.slug).toBe("my-post-copy-1");
});
});
describe("byline hydration and assignment", () => {
it("should assign and return bylines on create", async () => {
const bylineRepo = new BylineRepository(db);
const byline = await bylineRepo.create({
slug: "author-one",
displayName: "Author One",
});
const created = await handleContentCreate(db, "post", {
data: { title: "Bylined" },
bylines: [{ bylineId: byline.id, roleLabel: "Writer" }],
});
expect(created.success).toBe(true);
expect(created.data?.item.primaryBylineId).toBe(byline.id);
expect(created.data?.item.byline?.id).toBe(byline.id);
expect(created.data?.item.bylines).toHaveLength(1);
expect(created.data?.item.bylines?.[0]?.roleLabel).toBe("Writer");
});
it("should return bylines on get and list", async () => {
const bylineRepo = new BylineRepository(db);
const first = await bylineRepo.create({ slug: "first", displayName: "First" });
const second = await bylineRepo.create({ slug: "second", displayName: "Second" });
const created = await handleContentCreate(db, "post", {
data: { title: "Order Test" },
bylines: [{ bylineId: second.id }, { bylineId: first.id }],
});
expect(created.success).toBe(true);
const contentId = created.data!.item.id;
const fetched = await handleContentGet(db, "post", contentId);
expect(fetched.success).toBe(true);
expect(fetched.data?.item.bylines?.[0]?.byline.id).toBe(second.id);
expect(fetched.data?.item.bylines?.[1]?.byline.id).toBe(first.id);
expect(fetched.data?.item.byline?.id).toBe(second.id);
const listed = await handleContentList(db, "post", {});
expect(listed.success).toBe(true);
const listedItem = listed.data?.items.find((item) => item.id === contentId);
expect(listedItem?.byline?.id).toBe(second.id);
expect(listedItem?.bylines?.[0]?.byline.id).toBe(second.id);
});
it("should update byline ordering on update", async () => {
const bylineRepo = new BylineRepository(db);
const first = await bylineRepo.create({ slug: "first-upd", displayName: "First" });
const second = await bylineRepo.create({ slug: "second-upd", displayName: "Second" });
const created = await handleContentCreate(db, "post", {
data: { title: "Update Bylines" },
bylines: [{ bylineId: first.id }, { bylineId: second.id }],
});
expect(created.success).toBe(true);
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
bylines: [{ bylineId: second.id }, { bylineId: first.id }],
});
expect(updated.success).toBe(true);
expect(updated.data?.item.primaryBylineId).toBe(second.id);
expect(updated.data?.item.bylines?.[0]?.byline.id).toBe(second.id);
expect(updated.data?.item.bylines?.[1]?.byline.id).toBe(first.id);
});
it("should copy bylines when duplicating", async () => {
const bylineRepo = new BylineRepository(db);
const byline = await bylineRepo.create({
slug: "dup-author",
displayName: "Dup Author",
});
const original = await handleContentCreate(db, "post", {
data: { title: "Duplicate With Bylines" },
bylines: [{ bylineId: byline.id }],
});
expect(original.success).toBe(true);
const duplicated = await handleContentDuplicate(db, "post", original.data!.item.id);
expect(duplicated.success).toBe(true);
expect(duplicated.data?.item.byline?.id).toBe(byline.id);
expect(duplicated.data?.item.bylines).toHaveLength(1);
});
});
describe("handleContentUpdate — publishedAt override", () => {
it("persists publishedAt when provided", async () => {
const created = await handleContentCreate(db, "post", { data: { title: "Hi" } });
expect(created.success).toBe(true);
const newPublishedAt = "2019-03-16T09:00:00.000Z";
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
publishedAt: newPublishedAt,
});
expect(updated.success).toBe(true);
expect(updated.data?.item.publishedAt).toBe(newPublishedAt);
});
it("leaves createdAt untouched on update", async () => {
const originalCreated = "2019-03-15T10:30:00.000Z";
const created = await handleContentCreate(db, "post", {
data: { title: "Hi" },
createdAt: originalCreated,
});
expect(created.success).toBe(true);
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
data: { title: "Edited" },
publishedAt: "2020-01-01T00:00:00.000Z",
});
expect(updated.success).toBe(true);
expect(updated.data?.item.createdAt).toBe(originalCreated);
});
});
});

View File

@@ -0,0 +1,279 @@
import { Role } from "@emdash-cms/auth";
import { describe, it, expect, vi } from "vitest";
import { PUT as updateContent } from "../../../src/astro/routes/api/content/[collection]/[id].js";
import { POST as createContent } from "../../../src/astro/routes/api/content/[collection]/index.js";
/**
* Regression tests for the `publishedAt` / `createdAt` permission gate.
*
* The gate must trigger on *any* explicit presence of these fields —
* including `null` (explicit clear) — not just on non-null values. Checking
* only `!= null` would let a regular AUTHOR clear `published_at` on any item
* they can edit, bypassing `content:publish_any`.
*/
describe("content route — publishedAt / createdAt permission gate", () => {
const makeUser = (role: (typeof Role)[keyof typeof Role]) => ({
id: "user-1",
role,
});
const makeCache = () => ({ enabled: false, invalidate: vi.fn() });
describe("POST /_emdash/api/content/{collection}", () => {
it("returns 403 when an AUTHOR tries to set publishedAt", async () => {
const request = new Request("http://localhost/_emdash/api/content/post", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
data: { title: "Hi" },
publishedAt: "2019-03-15T10:30:00.000Z",
}),
});
const response = await createContent({
params: { collection: "post" },
request,
locals: {
emdash: {
handleContentCreate: vi.fn(),
handleContentGet: vi.fn(),
},
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof createContent>[0]);
expect(response.status).toBe(403);
await expect(response.json()).resolves.toMatchObject({
error: { code: "FORBIDDEN" },
});
});
it("returns 403 when an AUTHOR tries to clear publishedAt via null", async () => {
const request = new Request("http://localhost/_emdash/api/content/post", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { title: "Hi" }, publishedAt: null }),
});
const response = await createContent({
params: { collection: "post" },
request,
locals: {
emdash: {
handleContentCreate: vi.fn(),
handleContentGet: vi.fn(),
},
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof createContent>[0]);
expect(response.status).toBe(403);
});
it("returns 403 when an AUTHOR tries to set createdAt", async () => {
const request = new Request("http://localhost/_emdash/api/content/post", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
data: { title: "Hi" },
createdAt: "2019-03-15T10:30:00.000Z",
}),
});
const response = await createContent({
params: { collection: "post" },
request,
locals: {
emdash: {
handleContentCreate: vi.fn(),
handleContentGet: vi.fn(),
},
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof createContent>[0]);
expect(response.status).toBe(403);
});
it("lets EDITOR set publishedAt", async () => {
const handleContentCreate = vi.fn().mockResolvedValue({
success: true,
data: {
item: { id: "c1", publishedAt: "2019-03-15T10:30:00.000Z" },
_rev: "rev1",
},
});
const request = new Request("http://localhost/_emdash/api/content/post", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
data: { title: "Hi" },
publishedAt: "2019-03-15T10:30:00.000Z",
}),
});
const response = await createContent({
params: { collection: "post" },
request,
locals: {
emdash: { handleContentCreate, handleContentGet: vi.fn() },
user: makeUser(Role.EDITOR),
},
cache: makeCache(),
} as Parameters<typeof createContent>[0]);
expect(response.status).toBe(201);
expect(handleContentCreate).toHaveBeenCalledWith(
"post",
expect.objectContaining({ publishedAt: "2019-03-15T10:30:00.000Z" }),
);
});
it("lets AUTHOR create without date overrides", async () => {
const handleContentCreate = vi.fn().mockResolvedValue({
success: true,
data: { item: { id: "c1" }, _rev: "rev1" },
});
const request = new Request("http://localhost/_emdash/api/content/post", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { title: "Hi" } }),
});
const response = await createContent({
params: { collection: "post" },
request,
locals: {
emdash: { handleContentCreate, handleContentGet: vi.fn() },
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof createContent>[0]);
expect(response.status).toBe(201);
expect(handleContentCreate).toHaveBeenCalled();
});
});
describe("PUT /_emdash/api/content/{collection}/{id}", () => {
const ownedItem = {
success: true,
data: { item: { id: "c1", authorId: "user-1" }, _rev: "rev1" },
};
it("returns 403 when an AUTHOR tries to clear publishedAt via null on their own post", async () => {
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
const handleContentUpdate = vi.fn();
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ publishedAt: null }),
});
const response = await updateContent({
params: { collection: "post", id: "c1" },
request,
locals: {
emdash: { handleContentUpdate, handleContentGet },
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof updateContent>[0]);
expect(response.status).toBe(403);
expect(handleContentUpdate).not.toHaveBeenCalled();
});
it("returns 403 when an AUTHOR tries to set publishedAt on their own post", async () => {
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
const handleContentUpdate = vi.fn();
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ publishedAt: "2019-03-15T10:30:00.000Z" }),
});
const response = await updateContent({
params: { collection: "post", id: "c1" },
request,
locals: {
emdash: { handleContentUpdate, handleContentGet },
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof updateContent>[0]);
expect(response.status).toBe(403);
expect(handleContentUpdate).not.toHaveBeenCalled();
});
it("lets EDITOR set publishedAt", async () => {
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
const handleContentUpdate = vi.fn().mockResolvedValue({
success: true,
data: {
item: { id: "c1", publishedAt: "2019-03-15T10:30:00.000Z" },
_rev: "rev2",
},
});
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ publishedAt: "2019-03-15T10:30:00.000Z" }),
});
const response = await updateContent({
params: { collection: "post", id: "c1" },
request,
locals: {
emdash: { handleContentUpdate, handleContentGet },
user: makeUser(Role.EDITOR),
},
cache: makeCache(),
} as Parameters<typeof updateContent>[0]);
expect(response.status).toBe(200);
expect(handleContentUpdate).toHaveBeenCalledWith(
"post",
"c1",
expect.objectContaining({ publishedAt: "2019-03-15T10:30:00.000Z" }),
);
});
it("lets AUTHOR update their own post without date overrides", async () => {
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
const handleContentUpdate = vi.fn().mockResolvedValue({
success: true,
data: { item: { id: "c1" }, _rev: "rev2" },
});
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { title: "Edited" } }),
});
const response = await updateContent({
params: { collection: "post", id: "c1" },
request,
locals: {
emdash: { handleContentUpdate, handleContentGet },
user: makeUser(Role.AUTHOR),
},
cache: makeCache(),
} as Parameters<typeof updateContent>[0]);
expect(response.status).toBe(200);
expect(handleContentUpdate).toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,165 @@
import { describe, it, expect } from "vitest";
import { checkPublicCsrf } from "../../../src/api/csrf.js";
function makeRequest(method: string, headers: Record<string, string> = {}): Request {
return new Request("http://example.com/_emdash/api/comments/posts/abc", {
method,
headers,
});
}
function makeUrl(host = "example.com"): URL {
return new URL(`http://${host}/_emdash/api/comments/posts/abc`);
}
describe("checkPublicCsrf", () => {
describe("allows requests with X-EmDash-Request header", () => {
it("allows POST with custom header", () => {
const request = makeRequest("POST", { "X-EmDash-Request": "1" });
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
});
it("allows POST with custom header even if Origin is cross-origin", () => {
const request = makeRequest("POST", {
"X-EmDash-Request": "1",
Origin: "http://evil.com",
});
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
});
});
describe("allows same-origin requests", () => {
it("allows POST with matching Origin", () => {
const request = makeRequest("POST", {
Origin: "http://example.com",
});
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
});
it("allows POST with matching Origin on different path", () => {
const request = makeRequest("POST", {
Origin: "http://example.com",
});
const url = new URL("http://example.com/_emdash/api/auth/invite/complete");
expect(checkPublicCsrf(request, url)).toBeNull();
});
it("matches host including port", () => {
const request = makeRequest("POST", {
Origin: "http://localhost:4321",
});
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
expect(checkPublicCsrf(request, url)).toBeNull();
});
});
describe("blocks cross-origin requests", () => {
it("returns 403 with CSRF_REJECTED code", async () => {
const request = makeRequest("POST", {
Origin: "http://evil.com",
});
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
const body = await response!.json();
expect(body).toEqual({
error: { code: "CSRF_REJECTED", message: "Cross-origin request blocked" },
});
});
it("rejects Origin with different port", async () => {
const request = makeRequest("POST", {
Origin: "http://example.com:9999",
});
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
it("rejects Origin with different host", async () => {
const request = makeRequest("POST", {
Origin: "http://attacker.example.com",
});
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
it("rejects cross-scheme Origin (http vs https)", async () => {
const request = makeRequest("POST", {
Origin: "https://example.com",
});
// Request URL is http://example.com — same host but different scheme
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
it("rejects malformed Origin header", async () => {
const request = makeRequest("POST", {
Origin: "not-a-valid-url",
});
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
it("rejects Origin: null (sandboxed iframe)", async () => {
const request = makeRequest("POST", { Origin: "null" });
const response = checkPublicCsrf(request, makeUrl());
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
});
describe("dual-origin matching (reverse proxy)", () => {
it("accepts Origin matching public origin when behind proxy", () => {
const request = makeRequest("POST", {
Origin: "https://mysite.example.com",
});
// Internal URL is http, public is https — proxy scenario
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
expect(checkPublicCsrf(request, url, "https://mysite.example.com")).toBeNull();
});
it("still accepts Origin matching internal origin when publicOrigin is set", () => {
const request = makeRequest("POST", {
Origin: "http://localhost:4321",
});
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
expect(checkPublicCsrf(request, url, "https://mysite.example.com")).toBeNull();
});
it("rejects Origin matching neither internal nor public", () => {
const request = makeRequest("POST", {
Origin: "http://evil.com",
});
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
const response = checkPublicCsrf(request, url, "https://mysite.example.com");
expect(response).not.toBeNull();
expect(response!.status).toBe(403);
});
it("unchanged behavior when publicOrigin is undefined", () => {
const request = makeRequest("POST", {
Origin: "http://example.com",
});
expect(checkPublicCsrf(request, makeUrl(), undefined)).toBeNull();
});
});
describe("allows requests without Origin header", () => {
it("allows POST without any Origin (non-browser client)", () => {
const request = makeRequest("POST");
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
});
it("allows POST without Origin or custom header (curl/server)", () => {
const request = makeRequest("POST", {
"Content-Type": "application/json",
});
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
});
});
});

View File

@@ -0,0 +1,241 @@
import type { Kysely } from "kysely";
import { describe, it, expect, afterEach } from "vitest";
import { handleDashboardStats } from "../../../src/api/handlers/dashboard.js";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { createPostFixture, createPageFixture } from "../../utils/fixtures.js";
import {
setupTestDatabase,
setupTestDatabaseWithCollections,
teardownTestDatabase,
} from "../../utils/test-db.js";
describe("Dashboard Handlers", () => {
describe("handleDashboardStats", () => {
let db: Kysely<Database>;
afterEach(async () => {
await teardownTestDatabase(db);
});
it("returns empty stats when no collections exist", async () => {
db = await setupTestDatabase();
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
expect(result.data).toBeDefined();
expect(result.data!.collections).toEqual([]);
expect(result.data!.mediaCount).toBe(0);
expect(result.data!.userCount).toBe(0);
expect(result.data!.recentItems).toEqual([]);
});
it("returns collection stats with correct counts", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
// Create some posts with different statuses
await contentRepo.create(createPostFixture({ slug: "post-1" }));
await contentRepo.create(createPostFixture({ slug: "post-2", status: "published" }));
await contentRepo.create(createPostFixture({ slug: "post-3", status: "published" }));
// Create a draft page
await contentRepo.create(createPageFixture({ slug: "page-1" }));
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const { collections } = result.data!;
// Both collections should be present
expect(collections).toHaveLength(2);
const postStats = collections.find((c) => c.slug === "post");
expect(postStats).toBeDefined();
expect(postStats!.label).toBe("Posts");
expect(postStats!.total).toBe(3);
expect(postStats!.published).toBe(2);
expect(postStats!.draft).toBe(1);
const pageStats = collections.find((c) => c.slug === "page");
expect(pageStats).toBeDefined();
expect(pageStats!.label).toBe("Pages");
expect(pageStats!.total).toBe(1);
expect(pageStats!.published).toBe(0);
expect(pageStats!.draft).toBe(1);
});
it("returns recent items across collections", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
await contentRepo.create(createPostFixture({ slug: "post-1" }));
// Small delay for distinct updated_at
await new Promise((r) => setTimeout(r, 10));
await contentRepo.create(createPageFixture({ slug: "page-1" }));
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const { recentItems } = result.data!;
expect(recentItems.length).toBeGreaterThanOrEqual(2);
// Most recently updated should be first
expect(recentItems[0]!.collection).toBe("page");
expect(recentItems[0]!.collectionLabel).toBe("Pages");
expect(recentItems[0]!.slug).toBe("page-1");
expect(recentItems[0]!.status).toBe("draft");
expect(recentItems[1]!.collection).toBe("post");
expect(recentItems[1]!.collectionLabel).toBe("Posts");
expect(recentItems[1]!.slug).toBe("post-1");
});
it("recent items use title field when available", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
// setupTestDatabaseWithCollections creates post/page with title fields
await contentRepo.create(
createPostFixture({
slug: "my-post",
data: { title: "My Great Post", content: [] },
}),
);
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const postItem = result.data!.recentItems.find((i) => i.slug === "my-post");
expect(postItem).toBeDefined();
expect(postItem!.title).toBe("My Great Post");
});
it("recent items fall back to slug when collection has no title field", async () => {
db = await setupTestDatabase();
const registry = new SchemaRegistry(db);
// Create a collection without a title field
await registry.createCollection({
slug: "events",
label: "Events",
labelSingular: "Event",
});
await registry.createField("events", {
slug: "date",
label: "Date",
type: "datetime",
});
const contentRepo = new ContentRepository(db);
await contentRepo.create({
type: "events",
slug: "launch-party",
data: { date: "2026-03-01" },
status: "draft",
});
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const eventItem = result.data!.recentItems.find((i) => i.collection === "events");
expect(eventItem).toBeDefined();
// No title field, should fall back to slug
expect(eventItem!.title).toBe("launch-party");
});
it("excludes soft-deleted items from recent items", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
const post = await contentRepo.create(createPostFixture({ slug: "will-delete" }));
await contentRepo.create(createPostFixture({ slug: "will-keep" }));
// Soft-delete the first post
await contentRepo.delete("post", post.id);
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const slugs = result.data!.recentItems.map((i) => i.slug);
expect(slugs).toContain("will-keep");
expect(slugs).not.toContain("will-delete");
});
it("limits recent items to 10", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
// Create 15 posts
for (let i = 0; i < 15; i++) {
await contentRepo.create(createPostFixture({ slug: `post-${String(i).padStart(2, "0")}` }));
}
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
expect(result.data!.recentItems).toHaveLength(10);
});
it("recent items are ordered by updated_at descending", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
await contentRepo.create(createPostFixture({ slug: "oldest" }));
await new Promise((r) => setTimeout(r, 10));
await contentRepo.create(createPostFixture({ slug: "middle" }));
await new Promise((r) => setTimeout(r, 10));
await contentRepo.create(createPostFixture({ slug: "newest" }));
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const slugs = result.data!.recentItems.map((i) => i.slug);
expect(slugs).toEqual(["newest", "middle", "oldest"]);
});
it("counts exclude soft-deleted items", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
const post = await contentRepo.create(createPostFixture({ slug: "to-delete" }));
await contentRepo.create(createPostFixture({ slug: "to-keep" }));
await contentRepo.delete("post", post.id);
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const postStats = result.data!.collections.find((c) => c.slug === "post");
// count() in ContentRepository filters deleted_at IS NULL
expect(postStats!.total).toBe(1);
});
it("returns camelCase keys in recent items", async () => {
db = await setupTestDatabaseWithCollections();
const contentRepo = new ContentRepository(db);
await contentRepo.create(createPostFixture());
const result = await handleDashboardStats(db);
expect(result.success).toBe(true);
const item = result.data!.recentItems[0]!;
// Verify camelCase API shape
expect(item).toHaveProperty("id");
expect(item).toHaveProperty("collection");
expect(item).toHaveProperty("collectionLabel");
expect(item).toHaveProperty("title");
expect(item).toHaveProperty("slug");
expect(item).toHaveProperty("status");
expect(item).toHaveProperty("updatedAt");
expect(item).toHaveProperty("authorId");
// Should NOT have snake_case keys
expect(item).not.toHaveProperty("collection_label");
expect(item).not.toHaveProperty("updated_at");
expect(item).not.toHaveProperty("author_id");
});
});
});

View File

@@ -0,0 +1,22 @@
/**
* Email Settings Route Registration Test
*
* Regression test for https://github.com/emdash-cms/emdash/issues/151
* The email settings API route file existed but was never registered
* via injectRoute(), causing the endpoint to return 404.
*/
import { describe, expect, it, vi } from "vitest";
import { injectCoreRoutes } from "../../../src/astro/integration/routes.js";
describe("email settings route registration (#151)", () => {
it("registers /_emdash/api/settings/email route", () => {
const injectRoute = vi.fn();
injectCoreRoutes(injectRoute);
const patterns = injectRoute.mock.calls.map((call) => (call[0] as { pattern: string }).pattern);
expect(patterns).toContain("/_emdash/api/settings/email");
});
});

View File

@@ -0,0 +1,915 @@
/**
* Marketplace handler tests
*
* Tests the business logic for:
* - Install (handleMarketplaceInstall)
* - Update (handleMarketplaceUpdate)
* - Uninstall (handleMarketplaceUninstall)
* - Update check (handleMarketplaceUpdateCheck)
* - Search/GetPlugin proxies (handleMarketplaceSearch, handleMarketplaceGetPlugin)
*
* Uses a real in-memory SQLite database and mock Storage/SandboxRunner/fetch.
*/
import BetterSqlite3 from "better-sqlite3";
import { Kysely, SqliteDialect } from "kysely";
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
import {
handleMarketplaceInstall,
handleMarketplaceUpdate,
handleMarketplaceUninstall,
handleMarketplaceUpdateCheck,
handleMarketplaceSearch,
handleMarketplaceGetPlugin,
} from "../../../src/api/handlers/marketplace.js";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import type { Database as DbSchema } from "../../../src/database/types.js";
import type { MarketplacePluginDetail } from "../../../src/plugins/marketplace.js";
import type { SandboxRunner, SandboxedPlugin } from "../../../src/plugins/sandbox/types.js";
import { PluginStateRepository } from "../../../src/plugins/state.js";
import type { PluginManifest } from "../../../src/plugins/types.js";
import type {
Storage,
UploadResult,
DownloadResult,
ListResult,
SignedUploadUrl,
} from "../../../src/storage/types.js";
// ── Mock factories ────────────────────────────────────────────────
function createMockStorage(): Storage {
const store = new Map<string, { body: Uint8Array; contentType: string }>();
return {
async upload(opts: {
key: string;
body: Buffer | Uint8Array | ReadableStream<Uint8Array>;
contentType: string;
}): Promise<UploadResult> {
let body: Uint8Array;
if (opts.body instanceof Uint8Array) {
body = opts.body;
} else if (Buffer.isBuffer(opts.body)) {
body = new Uint8Array(opts.body);
} else {
// ReadableStream
const response = new Response(opts.body);
body = new Uint8Array(await response.arrayBuffer());
}
store.set(opts.key, { body, contentType: opts.contentType });
return { key: opts.key, url: `https://storage.test/${opts.key}`, size: body.length };
},
async download(key: string): Promise<DownloadResult> {
const item = store.get(key);
if (!item) throw new Error(`Not found: ${key}`);
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(item.body);
controller.close();
},
});
return { body: stream, contentType: item.contentType, size: item.body.length };
},
async delete(key: string): Promise<void> {
store.delete(key);
},
async exists(key: string): Promise<boolean> {
return store.has(key);
},
async list(): Promise<ListResult> {
return { files: [] };
},
async getSignedUploadUrl(): Promise<SignedUploadUrl> {
return {
url: "https://test.com/upload",
method: "PUT",
headers: {},
expiresAt: new Date().toISOString(),
};
},
getPublicUrl(key: string): string {
return `https://storage.test/${key}`;
},
};
}
function createMockSandboxRunner(): SandboxRunner & {
loadedPlugins: Array<{ manifest: PluginManifest; code: string }>;
} {
const loadedPlugins: Array<{ manifest: PluginManifest; code: string }> = [];
return {
loadedPlugins,
isAvailable(): boolean {
return true;
},
async load(manifest: PluginManifest, code: string): Promise<SandboxedPlugin> {
loadedPlugins.push({ manifest, code });
return {
id: manifest.id,
manifest,
async invokeHook() {
return undefined;
},
async invokeRoute() {
return undefined;
},
async terminate() {},
};
},
async terminateAll() {},
};
}
const MARKETPLACE_URL = "https://marketplace.example.com";
function mockManifest(id = "test-seo", version = "1.0.0"): PluginManifest {
return {
id,
version,
capabilities: ["content:read"],
allowedHosts: [],
storage: {},
hooks: [],
routes: [],
admin: {},
};
}
/**
* Create a gzipped tar bundle for use with mocked fetch.
* Uses CompressionStream + minimal tar format.
*/
async function createMockBundle(manifest: PluginManifest): Promise<Uint8Array> {
const encoder = new TextEncoder();
const manifestJson = JSON.stringify(manifest);
const backendCode = 'export default function() { return "hello"; }';
// Create simple tar
const files = [
{ name: "manifest.json", content: manifestJson },
{ name: "backend.js", content: backendCode },
];
const blocks: Uint8Array[] = [];
for (const file of files) {
const contentBytes = encoder.encode(file.content);
const header = new Uint8Array(512);
// Name
header.set(encoder.encode(file.name), 0);
// Mode
header.set(encoder.encode("0000644\0"), 100);
// UID/GID
header.set(encoder.encode("0000000\0"), 108);
header.set(encoder.encode("0000000\0"), 116);
// Size in octal
const sizeOctal = contentBytes.length.toString(8).padStart(11, "0") + "\0";
header.set(encoder.encode(sizeOctal), 124);
// Mtime
header.set(encoder.encode("00000000000\0"), 136);
// Type = regular file
header[156] = 0x30;
// Checksum spaces
header.set(encoder.encode(" "), 148);
let checksum = 0;
for (let i = 0; i < 512; i++) checksum += header[i]!;
header.set(encoder.encode(checksum.toString(8).padStart(6, "0") + "\0 "), 148);
blocks.push(header);
const paddedSize = Math.ceil(contentBytes.length / 512) * 512;
const dataBlock = new Uint8Array(paddedSize);
dataBlock.set(contentBytes, 0);
blocks.push(dataBlock);
}
blocks.push(new Uint8Array(1024)); // end-of-archive
const totalSize = blocks.reduce((sum, b) => sum + b.length, 0);
const tar = new Uint8Array(totalSize);
let offset = 0;
for (const block of blocks) {
tar.set(block, offset);
offset += block.length;
}
// Gzip
const cs = new CompressionStream("gzip");
const writer = cs.writable.getWriter();
const reader = cs.readable.getReader();
const writePromise = writer.write(tar).then(() => writer.close());
const chunks: Uint8Array[] = [];
let totalLen = 0;
for (;;) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
totalLen += value.length;
}
await writePromise;
const result = new Uint8Array(totalLen);
offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return result;
}
function mockPluginDetail(
id = "test-seo",
latestVersion = "1.0.0",
checksum?: string,
): MarketplacePluginDetail {
return {
id,
name: "Test SEO",
description: "SEO plugin",
author: { name: "Test", verified: true, avatarUrl: null },
capabilities: ["hooks"],
keywords: [],
installCount: 10,
hasIcon: false,
iconUrl: "",
createdAt: "2026-01-01T00:00:00Z",
updatedAt: "2026-02-01T00:00:00Z",
repositoryUrl: null,
homepageUrl: null,
license: "MIT",
latestVersion: {
version: latestVersion,
minEmDashVersion: null,
bundleSize: 1234,
checksum: checksum ?? "will-be-computed",
changelog: null,
readme: null,
hasIcon: false,
screenshotCount: 0,
screenshotUrls: [],
capabilities: ["hooks"],
auditVerdict: "pass",
imageAuditVerdict: "pass",
publishedAt: "2026-01-01T00:00:00Z",
},
};
}
describe("Marketplace handlers", () => {
let db: Kysely<DbSchema>;
let sqliteDb: BetterSqlite3.Database;
let storage: Storage;
let sandboxRunner: ReturnType<typeof createMockSandboxRunner>;
let fetchSpy: ReturnType<typeof vi.fn>;
beforeEach(async () => {
sqliteDb = new BetterSqlite3(":memory:");
db = new Kysely<DbSchema>({
dialect: new SqliteDialect({ database: sqliteDb }),
});
await runMigrations(db);
storage = createMockStorage();
sandboxRunner = createMockSandboxRunner();
fetchSpy = vi.fn();
vi.stubGlobal("fetch", fetchSpy);
});
afterEach(async () => {
await db.destroy();
sqliteDb.close();
vi.restoreAllMocks();
});
// ── Install ────────────────────────────────────────────────────
describe("handleMarketplaceInstall", () => {
it("returns error when marketplace not configured", async () => {
const result = await handleMarketplaceInstall(db, storage, sandboxRunner, undefined, "test");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
});
it("returns error when storage not available", async () => {
const result = await handleMarketplaceInstall(
db,
null,
sandboxRunner,
MARKETPLACE_URL,
"test",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("STORAGE_NOT_CONFIGURED");
});
it("returns error when sandbox runner not available", async () => {
const result = await handleMarketplaceInstall(db, storage, null, MARKETPLACE_URL, "test");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("SANDBOX_NOT_AVAILABLE");
});
it("successfully installs a marketplace plugin", async () => {
const manifest = mockManifest("test-seo", "1.0.0");
const bundleBytes = await createMockBundle(manifest);
// Mock: getPlugin detail — set checksum to undefined so the check is skipped
const detail = mockPluginDetail("test-seo", "1.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
// Mock: downloadBundle
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
// Mock: reportInstall
fetchSpy.mockResolvedValueOnce(new Response("OK", { status: 200 }));
const result = await handleMarketplaceInstall(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(true);
expect(result.data?.pluginId).toBe("test-seo");
expect(result.data?.version).toBe("1.0.0");
expect(result.data?.capabilities).toEqual(["content:read"]);
// Verify state was written
const repo = new PluginStateRepository(db);
const state = await repo.get("test-seo");
expect(state?.source).toBe("marketplace");
expect(state?.marketplaceVersion).toBe("1.0.0");
expect(state?.status).toBe("active");
});
it("rejects install if plugin already installed", async () => {
// Pre-install the plugin
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Mock: getPlugin detail (still needed — called before install check... actually, the existing check comes first)
const result = await handleMarketplaceInstall(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("ALREADY_INSTALLED");
});
it("rejects when manifest ID doesn't match requested plugin", async () => {
const manifest = mockManifest("wrong-id", "1.0.0");
const bundleBytes = await createMockBundle(manifest);
// Clear checksum so we reach the manifest check
const detail = mockPluginDetail("test-seo", "1.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
const result = await handleMarketplaceInstall(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MANIFEST_MISMATCH");
});
it("validates checksum against requested pinned version metadata", async () => {
const manifest = mockManifest("test-seo", "1.0.0");
const bundleBytes = await createMockBundle(manifest);
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "different-checksum";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
fetchSpy.mockResolvedValueOnce(
new Response(
JSON.stringify({
items: [
{
version: "1.0.0",
minEmDashVersion: null,
bundleSize: 1234,
checksum: "",
changelog: null,
capabilities: ["hooks"],
auditVerdict: "pass",
imageAuditVerdict: "pass",
publishedAt: "2026-01-01T00:00:00Z",
},
],
}),
{ status: 200 },
),
);
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
fetchSpy.mockResolvedValueOnce(new Response("OK", { status: 200 }));
const result = await handleMarketplaceInstall(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
{ version: "1.0.0" },
);
expect(result.success).toBe(true);
});
});
// ── Update ─────────────────────────────────────────────────────
describe("handleMarketplaceUpdate", () => {
it("returns error when plugin not found", async () => {
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"nonexistent",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
it("returns error when plugin is not from marketplace", async () => {
// Insert a config-sourced plugin
const repo = new PluginStateRepository(db);
await repo.upsert("config-plugin", "1.0.0", "active");
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"config-plugin",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
it("returns error when already up to date", async () => {
// Install v1.0.0
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Mock getPlugin returning same version
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify(mockPluginDetail("test-seo", "1.0.0")), { status: 200 }),
);
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("ALREADY_UP_TO_DATE");
});
it("rejects update on checksum mismatch", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "expected-checksum";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
const bundleBytes = await createMockBundle(mockManifest("test-seo", "2.0.0"));
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
{ confirmCapabilityChanges: true },
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("CHECKSUM_MISMATCH");
});
it("rejects update when bundle manifest version mismatches target", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
const wrongVersionManifest = mockManifest("test-seo", "9.9.9");
const bundleBytes = await createMockBundle(wrongVersionManifest);
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
{ confirmCapabilityChanges: true },
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MANIFEST_VERSION_MISMATCH");
});
it("requires confirmation for capability escalation", async () => {
// Install v1.0.0 with only "hooks" capability
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Store old bundle in R2 (needed for capability diff)
const oldManifest = mockManifest("test-seo", "1.0.0");
const encoder = new TextEncoder();
await storage.upload({
key: "marketplace/test-seo/1.0.0/manifest.json",
body: encoder.encode(JSON.stringify(oldManifest)),
contentType: "application/json",
});
await storage.upload({
key: "marketplace/test-seo/1.0.0/backend.js",
body: encoder.encode("export default {};"),
contentType: "application/javascript",
});
// New version has additional capability
const newManifest = {
...mockManifest("test-seo", "2.0.0"),
capabilities: ["content:read", "network:request"],
};
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
// Mock getPlugin
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
// Mock downloadBundle
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("CAPABILITY_ESCALATION");
expect(result.error?.details?.capabilityChanges).toBeDefined();
});
it("succeeds with confirmCapabilityChanges flag", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Store old bundle
const encoder = new TextEncoder();
const oldManifest = mockManifest("test-seo", "1.0.0");
await storage.upload({
key: "marketplace/test-seo/1.0.0/manifest.json",
body: encoder.encode(JSON.stringify(oldManifest)),
contentType: "application/json",
});
await storage.upload({
key: "marketplace/test-seo/1.0.0/backend.js",
body: encoder.encode("export default {};"),
contentType: "application/javascript",
});
const newManifest = {
...mockManifest("test-seo", "2.0.0"),
capabilities: ["content:read", "network:request"],
};
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
{ confirmCapabilityChanges: true },
);
expect(result.success).toBe(true);
expect(result.data?.oldVersion).toBe("1.0.0");
expect(result.data?.newVersion).toBe("2.0.0");
expect(result.data?.capabilityChanges.added).toContain("network:request");
});
it("treats deprecated → current capability rename as no change", async () => {
// Installed version declared the legacy name; new version
// declares the canonical name. diffCapabilities normalizes
// both sides, so the diff should be empty — no spurious
// "capability changed" prompt for a pure rename.
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
const encoder = new TextEncoder();
const oldManifest = {
...mockManifest("test-seo", "1.0.0"),
capabilities: ["read:content"],
};
await storage.upload({
key: "marketplace/test-seo/1.0.0/manifest.json",
body: encoder.encode(JSON.stringify(oldManifest)),
contentType: "application/json",
});
await storage.upload({
key: "marketplace/test-seo/1.0.0/backend.js",
body: encoder.encode("export default {};"),
contentType: "application/javascript",
});
const newManifest = {
...mockManifest("test-seo", "2.0.0"),
capabilities: ["content:read"],
};
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
const detail = mockPluginDetail("test-seo", "2.0.0");
detail.latestVersion!.checksum = "";
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
// No `confirmCapabilityChanges` — if the diff were non-empty,
// this would fail with CAPABILITY_ESCALATION.
const result = await handleMarketplaceUpdate(
db,
storage,
sandboxRunner,
MARKETPLACE_URL,
"test-seo",
);
expect(result.success).toBe(true);
expect(result.data?.capabilityChanges.added).toEqual([]);
expect(result.data?.capabilityChanges.removed).toEqual([]);
});
});
// ── Uninstall ──────────────────────────────────────────────────
describe("handleMarketplaceUninstall", () => {
it("returns error when plugin not found", async () => {
const result = await handleMarketplaceUninstall(db, storage, "nonexistent");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
it("returns error when plugin is not from marketplace", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("config-plugin", "1.0.0", "active");
const result = await handleMarketplaceUninstall(db, storage, "config-plugin");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
it("successfully uninstalls a marketplace plugin", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Store bundle files that should be cleaned up
const encoder = new TextEncoder();
await storage.upload({
key: "marketplace/test-seo/1.0.0/manifest.json",
body: encoder.encode("{}"),
contentType: "application/json",
});
await storage.upload({
key: "marketplace/test-seo/1.0.0/backend.js",
body: encoder.encode(""),
contentType: "application/javascript",
});
const result = await handleMarketplaceUninstall(db, storage, "test-seo");
expect(result.success).toBe(true);
expect(result.data?.pluginId).toBe("test-seo");
expect(result.data?.dataDeleted).toBe(false);
// Verify state was deleted
const state = await repo.get("test-seo");
expect(state).toBeNull();
});
it("deletes plugin storage data when deleteData=true", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Insert some plugin storage data
await db
.insertInto("_plugin_storage")
.values({
plugin_id: "test-seo",
collection: "default",
id: "test-key",
data: JSON.stringify({ foo: "bar" }),
})
.execute();
const result = await handleMarketplaceUninstall(db, storage, "test-seo", {
deleteData: true,
});
expect(result.success).toBe(true);
expect(result.data?.dataDeleted).toBe(true);
// Verify plugin storage data was deleted
const storageRows = await db
.selectFrom("_plugin_storage")
.selectAll()
.where("plugin_id", "=", "test-seo")
.execute();
expect(storageRows).toHaveLength(0);
});
});
// ── Update check ───────────────────────────────────────────────
describe("handleMarketplaceUpdateCheck", () => {
it("returns error when marketplace not configured", async () => {
const result = await handleMarketplaceUpdateCheck(db, undefined);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
});
it("returns empty items when no marketplace plugins installed", async () => {
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
expect(result.success).toBe(true);
expect(result.data?.items).toEqual([]);
});
it("detects available updates", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// Mock getPlugin returning newer version
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify(mockPluginDetail("test-seo", "2.0.0")), { status: 200 }),
);
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
expect(result.success).toBe(true);
expect(result.data?.items).toHaveLength(1);
expect(result.data?.items[0]?.hasUpdate).toBe(true);
expect(result.data?.items[0]?.installed).toBe("1.0.0");
expect(result.data?.items[0]?.latest).toBe("2.0.0");
});
it("reports no update when versions match", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify(mockPluginDetail("test-seo", "1.0.0")), { status: 200 }),
);
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
expect(result.success).toBe(true);
expect(result.data?.items[0]?.hasUpdate).toBe(false);
});
it("skips plugins that fail to check", async () => {
const repo = new PluginStateRepository(db);
await repo.upsert("test-seo", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
await repo.upsert("test-analytics", "1.0.0", "active", {
source: "marketplace",
marketplaceVersion: "1.0.0",
});
// First plugin check fails (404 — delisted)
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify({ error: "Not found" }), { status: 404 }),
);
// Second plugin check succeeds
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify(mockPluginDetail("test-analytics", "2.0.0")), { status: 200 }),
);
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
expect(result.success).toBe(true);
// Only the successful check should appear
expect(result.data?.items).toHaveLength(1);
expect(result.data?.items[0]?.pluginId).toBe("test-analytics");
});
});
// ── Search proxy ───────────────────────────────────────────────
describe("handleMarketplaceSearch", () => {
it("returns error when marketplace not configured", async () => {
const result = await handleMarketplaceSearch(undefined);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
});
it("proxies search request to marketplace", async () => {
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify({ items: [] }), { status: 200 }));
const result = await handleMarketplaceSearch(MARKETPLACE_URL, "seo");
expect(result.success).toBe(true);
const [url] = fetchSpy.mock.calls[0]!;
expect(url).toContain("/api/v1/plugins?q=seo");
});
});
// ── GetPlugin proxy ────────────────────────────────────────────
describe("handleMarketplaceGetPlugin", () => {
it("returns error when marketplace not configured", async () => {
const result = await handleMarketplaceGetPlugin(undefined, "test-seo");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
});
it("returns NOT_FOUND for missing plugin", async () => {
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify({ error: "Not found" }), { status: 404 }),
);
const result = await handleMarketplaceGetPlugin(MARKETPLACE_URL, "nonexistent");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
it("proxies plugin detail from marketplace", async () => {
fetchSpy.mockResolvedValueOnce(
new Response(JSON.stringify(mockPluginDetail()), { status: 200 }),
);
const result = await handleMarketplaceGetPlugin(MARKETPLACE_URL, "test-seo");
expect(result.success).toBe(true);
});
});
});

View File

@@ -0,0 +1,338 @@
import { describe, expect, it } from "vitest";
import { generateOpenApiDocument } from "../../../src/api/openapi/document.js";
describe("OpenAPI document generation", () => {
it("generates a valid OpenAPI 3.1 document", () => {
const doc = generateOpenApiDocument();
expect(doc.openapi).toBe("3.1.0");
expect(doc.info.title).toBe("EmDash CMS API");
expect(doc.info.version).toBe("0.1.0");
});
it("includes content paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/content/{collection}");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/publish");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/schedule");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/duplicate");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/compare");
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/translations");
expect(paths).toContain("/_emdash/api/content/{collection}/trash");
});
it("includes media paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/media");
expect(paths).toContain("/_emdash/api/media/{id}");
expect(paths).toContain("/_emdash/api/media/upload-url");
expect(paths).toContain("/_emdash/api/media/{id}/confirm");
});
it("includes schema paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/schema/collections");
expect(paths).toContain("/_emdash/api/schema/collections/{slug}");
expect(paths).toContain("/_emdash/api/schema/collections/{slug}/fields");
expect(paths).toContain("/_emdash/api/schema/collections/{slug}/fields/{fieldSlug}");
expect(paths).toContain("/_emdash/api/schema/orphans");
});
it("includes comments paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/comments/{collection}/{contentId}");
expect(paths).toContain("/_emdash/api/admin/comments");
expect(paths).toContain("/_emdash/api/admin/comments/counts");
expect(paths).toContain("/_emdash/api/admin/comments/bulk");
expect(paths).toContain("/_emdash/api/admin/comments/{id}");
});
it("includes taxonomy paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/taxonomies");
expect(paths).toContain("/_emdash/api/taxonomies/{name}/terms");
expect(paths).toContain("/_emdash/api/taxonomies/{name}/terms/{slug}");
});
it("includes menu paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/menus");
expect(paths).toContain("/_emdash/api/menus/{name}");
expect(paths).toContain("/_emdash/api/menus/{name}/items");
expect(paths).toContain("/_emdash/api/menus/{name}/reorder");
});
it("includes section paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/sections");
expect(paths).toContain("/_emdash/api/sections/{slug}");
});
it("includes widget paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/widget-areas");
expect(paths).toContain("/_emdash/api/widget-areas/{name}");
expect(paths).toContain("/_emdash/api/widget-areas/{name}/widgets");
expect(paths).toContain("/_emdash/api/widget-areas/{name}/widgets/{id}");
expect(paths).toContain("/_emdash/api/widget-areas/{name}/reorder");
});
it("includes settings paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/settings");
});
it("includes search paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/search");
expect(paths).toContain("/_emdash/api/search/suggest");
expect(paths).toContain("/_emdash/api/search/rebuild");
expect(paths).toContain("/_emdash/api/search/enable");
expect(paths).toContain("/_emdash/api/search/stats");
});
it("includes redirect paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/redirects");
expect(paths).toContain("/_emdash/api/redirects/{id}");
expect(paths).toContain("/_emdash/api/redirects/404s");
expect(paths).toContain("/_emdash/api/redirects/404s/summary");
});
it("includes user paths", () => {
const doc = generateOpenApiDocument();
const paths = Object.keys(doc.paths ?? {});
expect(paths).toContain("/_emdash/api/admin/users");
expect(paths).toContain("/_emdash/api/admin/users/{id}");
expect(paths).toContain("/_emdash/api/admin/users/{id}/disable");
expect(paths).toContain("/_emdash/api/admin/users/{id}/enable");
expect(paths).toContain("/_emdash/api/admin/allowed-domains");
expect(paths).toContain("/_emdash/api/admin/allowed-domains/{domain}");
});
it("has correct HTTP methods on content collection endpoint", () => {
const doc = generateOpenApiDocument();
const collectionPath = doc.paths?.["/_emdash/api/content/{collection}"];
expect(collectionPath).toBeDefined();
expect(collectionPath).toHaveProperty("get");
expect(collectionPath).toHaveProperty("post");
});
it("has correct HTTP methods on content item endpoint", () => {
const doc = generateOpenApiDocument();
const itemPath = doc.paths?.["/_emdash/api/content/{collection}/{id}"];
expect(itemPath).toBeDefined();
expect(itemPath).toHaveProperty("get");
expect(itemPath).toHaveProperty("put");
expect(itemPath).toHaveProperty("delete");
});
it("generates unique operation IDs for all operations", () => {
const doc = generateOpenApiDocument();
const operationIds: string[] = [];
for (const pathItem of Object.values(doc.paths ?? {})) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { operationId?: string }
| undefined;
if (op?.operationId) {
operationIds.push(op.operationId);
}
}
}
// Content operations
expect(operationIds).toContain("listContent");
expect(operationIds).toContain("createContent");
expect(operationIds).toContain("getContent");
expect(operationIds).toContain("updateContent");
expect(operationIds).toContain("deleteContent");
expect(operationIds).toContain("publishContent");
expect(operationIds).toContain("duplicateContent");
// Media operations
expect(operationIds).toContain("listMedia");
expect(operationIds).toContain("getMedia");
expect(operationIds).toContain("deleteMedia");
expect(operationIds).toContain("getMediaUploadUrl");
// Schema operations
expect(operationIds).toContain("listCollections");
expect(operationIds).toContain("createCollection");
expect(operationIds).toContain("listFields");
expect(operationIds).toContain("createField");
// Comments operations
expect(operationIds).toContain("listPublicComments");
expect(operationIds).toContain("createComment");
expect(operationIds).toContain("listAdminComments");
expect(operationIds).toContain("bulkCommentAction");
// Taxonomy operations
expect(operationIds).toContain("listTaxonomies");
expect(operationIds).toContain("listTerms");
expect(operationIds).toContain("createTerm");
// Menu operations
expect(operationIds).toContain("listMenus");
expect(operationIds).toContain("createMenu");
expect(operationIds).toContain("createMenuItem");
// Section operations
expect(operationIds).toContain("listSections");
expect(operationIds).toContain("createSection");
// Widget operations
expect(operationIds).toContain("listWidgetAreas");
expect(operationIds).toContain("createWidget");
// Settings operations
expect(operationIds).toContain("getSettings");
expect(operationIds).toContain("updateSettings");
// Search operations
expect(operationIds).toContain("search");
expect(operationIds).toContain("rebuildSearchIndex");
// Redirect operations
expect(operationIds).toContain("listRedirects");
expect(operationIds).toContain("createRedirect");
expect(operationIds).toContain("listNotFoundEntries");
// User operations
expect(operationIds).toContain("listUsers");
expect(operationIds).toContain("getUser");
expect(operationIds).toContain("disableUser");
// No duplicate operation IDs
const uniqueIds = new Set(operationIds);
expect(uniqueIds.size).toBe(operationIds.length);
});
it("includes reusable component schemas", () => {
const doc = generateOpenApiDocument();
const schemas = doc.components?.schemas ?? {};
// Content schemas
expect(schemas).toHaveProperty("ContentCreateBody");
expect(schemas).toHaveProperty("ContentUpdateBody");
expect(schemas).toHaveProperty("ContentItem");
expect(schemas).toHaveProperty("ContentResponse");
expect(schemas).toHaveProperty("ContentListResponse");
// Media schemas
expect(schemas).toHaveProperty("MediaItem");
expect(schemas).toHaveProperty("MediaListResponse");
// Schema schemas
expect(schemas).toHaveProperty("Collection");
expect(schemas).toHaveProperty("CollectionListResponse");
// Comment schemas
expect(schemas).toHaveProperty("PublicComment");
expect(schemas).toHaveProperty("Comment");
expect(schemas).toHaveProperty("CommentBulkBody");
// Taxonomy schemas
expect(schemas).toHaveProperty("Term");
expect(schemas).toHaveProperty("TermListResponse");
// Menu schemas
expect(schemas).toHaveProperty("MenuWithItems");
// User schemas
expect(schemas).toHaveProperty("User");
expect(schemas).toHaveProperty("UserListResponse");
});
it("wraps success responses in { data } envelope", () => {
const doc = generateOpenApiDocument();
const listPath = doc.paths?.["/_emdash/api/content/{collection}"];
const getResponse = (listPath as Record<string, unknown>)?.get as {
responses: Record<string, { content: Record<string, { schema: Record<string, unknown> }> }>;
};
const schema = getResponse?.responses?.["200"]?.content?.["application/json"]?.schema;
expect(schema).toBeDefined();
// The envelope should have a "data" property
expect(schema).toHaveProperty("properties");
const props = (schema as Record<string, unknown>).properties as Record<string, unknown>;
expect(props).toHaveProperty("data");
});
it("includes error response schemas", () => {
const doc = generateOpenApiDocument();
const listPath = doc.paths?.["/_emdash/api/content/{collection}"];
const getOp = (listPath as Record<string, unknown>)?.get as {
responses: Record<string, unknown>;
};
// Should have auth error responses
expect(getOp?.responses).toHaveProperty("401");
expect(getOp?.responses).toHaveProperty("403");
});
it("includes security schemes", () => {
const doc = generateOpenApiDocument();
const schemes = doc.components?.securitySchemes;
expect(schemes).toHaveProperty("session");
expect(schemes).toHaveProperty("bearer");
});
it("tags all 12 domains", () => {
const doc = generateOpenApiDocument();
const tagNames = (doc.tags ?? []).map((t: { name: string }) => t.name);
expect(tagNames).toContain("Content");
expect(tagNames).toContain("Media");
expect(tagNames).toContain("Schema");
expect(tagNames).toContain("Comments");
expect(tagNames).toContain("Taxonomies");
expect(tagNames).toContain("Menus");
expect(tagNames).toContain("Sections");
expect(tagNames).toContain("Widgets");
expect(tagNames).toContain("Settings");
expect(tagNames).toContain("Search");
expect(tagNames).toContain("Redirects");
expect(tagNames).toContain("Users");
expect(tagNames).toHaveLength(12);
});
it("produces valid JSON output", () => {
const doc = generateOpenApiDocument();
const json = JSON.stringify(doc);
// Should not throw
const parsed = JSON.parse(json);
expect(parsed.openapi).toBe("3.1.0");
});
});

View File

@@ -0,0 +1,122 @@
/**
* Tests for SEC-07: ownership extraction bugs (#12, #13, #14, #16)
*
* Verifies that handler response shapes carry authorId correctly
* and that ownership-related operations work as expected.
*/
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleContentCreate,
handleContentGet,
handleContentGetIncludingTrashed,
handleContentDelete,
handleContentDuplicate,
handleMediaCreate,
} from "../../../src/api/index.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("SEC-07: Ownership extraction", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("#12: handleContentGet returns authorId inside data.item", () => {
it("should expose authorId at data.item level, not data level", async () => {
const created = await handleContentCreate(db, "post", {
data: { title: "Owned Post" },
authorId: "user_author_123",
});
expect(created.success).toBe(true);
const result = await handleContentGet(db, "post", created.data!.item.id);
expect(result.success).toBe(true);
// The route pattern extracts: existing.data.item.authorId
// If authorId were only on data (wrong), ownership checks would always fail
const data = result.data as Record<string, unknown>;
const item = data.item as Record<string, unknown>;
expect(item.authorId).toBe("user_author_123");
// data level should NOT have authorId directly
expect(data.authorId).toBeUndefined();
});
it("should expose authorId at data.item level for trashed items", async () => {
const created = await handleContentCreate(db, "post", {
data: { title: "Trashed Post" },
authorId: "user_trash_owner",
});
expect(created.success).toBe(true);
await handleContentDelete(db, "post", created.data!.item.id);
const result = await handleContentGetIncludingTrashed(db, "post", created.data!.item.id);
expect(result.success).toBe(true);
const data = result.data as Record<string, unknown>;
const item = data.item as Record<string, unknown>;
expect(item.authorId).toBe("user_trash_owner");
expect(data.authorId).toBeUndefined();
});
});
describe("#14: handleContentDuplicate uses caller's authorId", () => {
it("should set the duplicate's authorId to the provided caller ID", async () => {
const original = await handleContentCreate(db, "post", {
data: { title: "Original Post" },
authorId: "original_author",
});
expect(original.success).toBe(true);
// Duplicate as a different user
const dup = await handleContentDuplicate(db, "post", original.data!.item.id, "caller_user");
expect(dup.success).toBe(true);
expect(dup.data?.item.authorId).toBe("caller_user");
});
it("should fall back to original authorId when caller ID not provided", async () => {
const original = await handleContentCreate(db, "post", {
data: { title: "Fallback Post" },
authorId: "original_author",
});
expect(original.success).toBe(true);
const dup = await handleContentDuplicate(db, "post", original.data!.item.id);
expect(dup.success).toBe(true);
expect(dup.data?.item.authorId).toBe("original_author");
});
});
describe("#16: handleMediaCreate persists authorId", () => {
it("should store authorId on created media item", async () => {
const result = await handleMediaCreate(db, {
filename: "photo.jpg",
mimeType: "image/jpeg",
storageKey: "test_key_123.jpg",
authorId: "media_uploader",
});
expect(result.success).toBe(true);
expect(result.data?.item.authorId).toBe("media_uploader");
});
it("should set authorId to null when not provided", async () => {
const result = await handleMediaCreate(db, {
filename: "orphan.jpg",
mimeType: "image/jpeg",
storageKey: "test_key_orphan.jpg",
});
expect(result.success).toBe(true);
expect(result.data?.item.authorId).toBeNull();
});
});
});

View File

@@ -0,0 +1,170 @@
import { afterEach, beforeEach, describe, it, expect } from "vitest";
import {
getPublicOrigin,
getPublicUrl,
getEnvAllowedOrigins,
_resetEnvCache,
} from "../../../src/api/public-url.js";
import type { EmDashConfig } from "../../../src/astro/integration/runtime.js";
// Snapshot env vars we'll mutate, and restore after every test.
const origEmdashSiteUrl = process.env.EMDASH_SITE_URL;
const origSiteUrl = process.env.SITE_URL;
const origAllowedOrigins = process.env.EMDASH_ALLOWED_ORIGINS;
afterEach(() => {
_resetEnvCache();
// Restore original env state (delete if originally absent)
if (origEmdashSiteUrl === undefined) delete process.env.EMDASH_SITE_URL;
else process.env.EMDASH_SITE_URL = origEmdashSiteUrl;
if (origSiteUrl === undefined) delete process.env.SITE_URL;
else process.env.SITE_URL = origSiteUrl;
if (origAllowedOrigins === undefined) delete process.env.EMDASH_ALLOWED_ORIGINS;
else process.env.EMDASH_ALLOWED_ORIGINS = origAllowedOrigins;
});
// Ensure clean state before every test (no cache, no test env vars).
beforeEach(() => {
_resetEnvCache();
delete process.env.EMDASH_SITE_URL;
delete process.env.SITE_URL;
delete process.env.EMDASH_ALLOWED_ORIGINS;
});
describe("getPublicOrigin()", () => {
it("returns config.siteUrl when set", () => {
const url = new URL("http://localhost:4321/admin");
const config: EmDashConfig = { siteUrl: "https://mysite.example.com" };
expect(getPublicOrigin(url, config)).toBe("https://mysite.example.com");
});
it("returns url.origin when config has no siteUrl", () => {
const url = new URL("http://localhost:4321/admin");
const config: EmDashConfig = {};
expect(getPublicOrigin(url, config)).toBe("http://localhost:4321");
});
it("returns url.origin when config is undefined", () => {
const url = new URL("https://example.com:8443/setup");
expect(getPublicOrigin(url)).toBe("https://example.com:8443");
});
it("returns url.origin when config.siteUrl is undefined", () => {
const url = new URL("http://127.0.0.1:4321/api");
expect(getPublicOrigin(url, { siteUrl: undefined })).toBe("http://127.0.0.1:4321");
});
it("does not return empty string siteUrl (falsy)", () => {
const url = new URL("http://localhost:4321/x");
// Empty string should fall through to url.origin
expect(getPublicOrigin(url, { siteUrl: "" })).toBe("http://localhost:4321");
});
});
describe("getPublicOrigin() env var fallback", () => {
it("falls back to EMDASH_SITE_URL when config has no siteUrl", () => {
process.env.EMDASH_SITE_URL = "https://env.example.com";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("https://env.example.com");
});
it("falls back to SITE_URL when EMDASH_SITE_URL is absent", () => {
process.env.SITE_URL = "https://site-url.example.com";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("https://site-url.example.com");
});
it("prefers EMDASH_SITE_URL over SITE_URL", () => {
process.env.EMDASH_SITE_URL = "https://emdash.example.com";
process.env.SITE_URL = "https://site.example.com";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("https://emdash.example.com");
});
it("normalizes env var to origin (strips path)", () => {
process.env.EMDASH_SITE_URL = "https://env.example.com/some/path";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("https://env.example.com");
});
it("falls through to url.origin when env var is invalid URL", () => {
process.env.EMDASH_SITE_URL = "not-a-url";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("http://localhost:4321");
});
it("config.siteUrl takes precedence over env var", () => {
process.env.EMDASH_SITE_URL = "https://env.example.com";
const url = new URL("http://localhost:4321/x");
const config: EmDashConfig = { siteUrl: "https://config.example.com" };
expect(getPublicOrigin(url, config)).toBe("https://config.example.com");
});
it("cache is invalidated by _resetEnvCache()", () => {
process.env.EMDASH_SITE_URL = "https://first.example.com";
const url = new URL("http://localhost:4321/x");
expect(getPublicOrigin(url, {})).toBe("https://first.example.com");
_resetEnvCache();
process.env.EMDASH_SITE_URL = "https://second.example.com";
expect(getPublicOrigin(url, {})).toBe("https://second.example.com");
});
});
describe("getEnvAllowedOrigins()", () => {
it("returns [] when EMDASH_ALLOWED_ORIGINS is unset", () => {
expect(getEnvAllowedOrigins()).toEqual([]);
});
it("parses a comma-separated list into origins", () => {
process.env.EMDASH_ALLOWED_ORIGINS = "https://example.com,https://preview.example.com";
expect(getEnvAllowedOrigins()).toEqual(["https://example.com", "https://preview.example.com"]);
});
it("trims whitespace around each entry", () => {
process.env.EMDASH_ALLOWED_ORIGINS = " https://example.com , https://preview.example.com ";
expect(getEnvAllowedOrigins()).toEqual(["https://example.com", "https://preview.example.com"]);
});
it("normalizes each entry to its origin (strips path/query)", () => {
process.env.EMDASH_ALLOWED_ORIGINS = "https://example.com/x?y=1";
expect(getEnvAllowedOrigins()).toEqual(["https://example.com"]);
});
it("throws on entries with non-http(s) protocols", () => {
process.env.EMDASH_ALLOWED_ORIGINS = "file:///etc/passwd,https://example.com";
expect(() => getEnvAllowedOrigins()).toThrow(/EMDASH_ALLOWED_ORIGINS.*must be http or https/);
});
it("throws on unparseable entries", () => {
process.env.EMDASH_ALLOWED_ORIGINS = "not-a-url,https://example.com";
expect(() => getEnvAllowedOrigins()).toThrow(/EMDASH_ALLOWED_ORIGINS.*invalid URL/);
});
it("cache is invalidated by _resetEnvCache()", () => {
process.env.EMDASH_ALLOWED_ORIGINS = "https://first.example.com";
expect(getEnvAllowedOrigins()).toEqual(["https://first.example.com"]);
_resetEnvCache();
process.env.EMDASH_ALLOWED_ORIGINS = "https://second.example.com";
expect(getEnvAllowedOrigins()).toEqual(["https://second.example.com"]);
});
});
describe("getPublicUrl()", () => {
it("builds full URL from siteUrl + path", () => {
const url = new URL("http://localhost:4321/x");
const config: EmDashConfig = { siteUrl: "https://mysite.example.com" };
expect(getPublicUrl(url, config, "/_emdash/admin/login")).toBe(
"https://mysite.example.com/_emdash/admin/login",
);
});
it("builds full URL from request origin when no siteUrl", () => {
const url = new URL("http://localhost:4321/x");
expect(getPublicUrl(url, undefined, "/_emdash/admin/login")).toBe(
"http://localhost:4321/_emdash/admin/login",
);
});
});

View File

@@ -0,0 +1,35 @@
import { describe, expect, it } from "vitest";
import { isSafeRedirect } from "#api/redirect.js";
describe("isSafeRedirect", () => {
it("accepts simple relative paths", () => {
expect(isSafeRedirect("/")).toBe(true);
expect(isSafeRedirect("/admin")).toBe(true);
expect(isSafeRedirect("/_emdash/admin")).toBe(true);
expect(isSafeRedirect("/foo/bar?baz=1")).toBe(true);
});
it("rejects protocol-relative URLs (double slash)", () => {
expect(isSafeRedirect("//evil.com")).toBe(false);
expect(isSafeRedirect("//evil.com/path")).toBe(false);
});
it("rejects backslash bypass (/\\evil.com normalizes to //evil.com)", () => {
expect(isSafeRedirect("/\\evil.com")).toBe(false);
expect(isSafeRedirect("/foo\\bar")).toBe(false);
expect(isSafeRedirect("\\evil.com")).toBe(false);
});
it("rejects URLs that do not start with /", () => {
expect(isSafeRedirect("https://evil.com")).toBe(false);
expect(isSafeRedirect("http://evil.com")).toBe(false);
expect(isSafeRedirect("evil.com")).toBe(false);
expect(isSafeRedirect("")).toBe(false);
});
it("rejects null and undefined", () => {
expect(isSafeRedirect(null)).toBe(false);
expect(isSafeRedirect(undefined)).toBe(false);
});
});

View File

@@ -0,0 +1,133 @@
/**
* Unit tests for _rev token generation and validation.
*/
import { describe, it, expect } from "vitest";
import { encodeRev, decodeRev, validateRev } from "../../../src/api/rev.js";
import type { ContentItem } from "../../../src/database/repositories/types.js";
function makeItem(overrides: Partial<ContentItem> = {}): ContentItem {
return {
id: "item_1",
type: "posts",
slug: "test",
status: "draft",
data: {},
authorId: null,
createdAt: "2026-01-01T00:00:00.000Z",
updatedAt: "2026-01-15T12:30:00.000Z",
publishedAt: null,
scheduledAt: null,
liveRevisionId: null,
draftRevisionId: null,
version: 3,
...overrides,
};
}
describe("encodeRev", () => {
it("produces a base64-encoded string", () => {
const item = makeItem();
const rev = encodeRev(item);
expect(rev).toBeTruthy();
// Should be valid base64
expect(() => atob(rev)).not.toThrow();
});
it("encodes version and updatedAt", () => {
const item = makeItem({ version: 5, updatedAt: "2026-02-14T10:00:00.000Z" });
const rev = encodeRev(item);
const decoded = atob(rev);
expect(decoded).toBe("5:2026-02-14T10:00:00.000Z");
});
it("produces different revs for different versions", () => {
const rev1 = encodeRev(makeItem({ version: 1 }));
const rev2 = encodeRev(makeItem({ version: 2 }));
expect(rev1).not.toBe(rev2);
});
it("produces different revs for different updatedAt", () => {
const rev1 = encodeRev(makeItem({ updatedAt: "2026-01-01T00:00:00.000Z" }));
const rev2 = encodeRev(makeItem({ updatedAt: "2026-01-02T00:00:00.000Z" }));
expect(rev1).not.toBe(rev2);
});
});
describe("decodeRev", () => {
it("decodes a valid rev", () => {
const rev = btoa("5:2026-02-14T10:00:00.000Z");
const result = decodeRev(rev);
expect(result).not.toBeNull();
expect(result!.version).toBe(5);
expect(result!.updatedAt).toBe("2026-02-14T10:00:00.000Z");
});
it("returns null for invalid base64", () => {
expect(decodeRev("not-valid-base64!!!")).toBeNull();
});
it("returns null for missing colon", () => {
expect(decodeRev(btoa("nocolon"))).toBeNull();
});
it("returns null for non-numeric version", () => {
expect(decodeRev(btoa("abc:2026-01-01"))).toBeNull();
});
it("round-trips with encodeRev", () => {
const item = makeItem({ version: 7, updatedAt: "2026-03-01T08:15:30.000Z" });
const rev = encodeRev(item);
const decoded = decodeRev(rev);
expect(decoded).not.toBeNull();
expect(decoded!.version).toBe(7);
expect(decoded!.updatedAt).toBe("2026-03-01T08:15:30.000Z");
});
});
describe("validateRev", () => {
it("returns valid when no rev is provided", () => {
const result = validateRev(undefined, makeItem());
expect(result.valid).toBe(true);
});
it("returns valid when rev matches", () => {
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
const rev = encodeRev(item);
const result = validateRev(rev, item);
expect(result.valid).toBe(true);
});
it("returns invalid when version mismatches", () => {
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
const staleRev = btoa("2:2026-01-15T12:30:00.000Z"); // Version 2, but item is at 3
const result = validateRev(staleRev, item);
expect(result.valid).toBe(false);
if (!result.valid) {
expect(result.message).toContain("modified");
}
});
it("returns invalid when updatedAt mismatches", () => {
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
const staleRev = btoa("3:2026-01-14T00:00:00.000Z"); // Right version, wrong timestamp
const result = validateRev(staleRev, item);
expect(result.valid).toBe(false);
});
it("returns invalid for malformed rev", () => {
const result = validateRev("garbage", makeItem());
expect(result.valid).toBe(false);
if (!result.valid) {
expect(result.message).toContain("Malformed");
}
});
});

View File

@@ -0,0 +1,230 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleRevisionList,
handleRevisionGet,
handleRevisionRestore,
} from "../../../src/api/index.js";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import { RevisionRepository } from "../../../src/database/repositories/revision.js";
import type { Database } from "../../../src/database/types.js";
import { createPostFixture } from "../../utils/fixtures.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("Revision Handlers", () => {
let db: Kysely<Database>;
let contentRepo: ContentRepository;
let revisionRepo: RevisionRepository;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
contentRepo = new ContentRepository(db);
revisionRepo = new RevisionRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("handleRevisionList", () => {
it("should return empty list when no revisions exist", async () => {
const content = await contentRepo.create(createPostFixture());
const result = await handleRevisionList(db, "post", content.id, {});
expect(result.success).toBe(true);
expect(result.data?.items).toEqual([]);
expect(result.data?.total).toBe(0);
});
it("should return revisions for a content entry", async () => {
const content = await contentRepo.create(createPostFixture());
// Create some revisions with small delay to ensure distinct ULIDs
await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "Version 1", content: "First version" },
});
// Small delay to ensure ULID timestamp differs
await new Promise((resolve) => setTimeout(resolve, 2));
await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "Version 2", content: "Second version" },
});
const result = await handleRevisionList(db, "post", content.id, {});
expect(result.success).toBe(true);
expect(result.data?.items).toHaveLength(2);
expect(result.data?.total).toBe(2);
// Should be newest first
expect(result.data?.items[0].data.title).toBe("Version 2");
expect(result.data?.items[1].data.title).toBe("Version 1");
});
it("should respect limit parameter", async () => {
const content = await contentRepo.create(createPostFixture());
// Create 5 revisions
for (let i = 1; i <= 5; i++) {
await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: `Version ${i}` },
});
}
const result = await handleRevisionList(db, "post", content.id, {
limit: 3,
});
expect(result.success).toBe(true);
expect(result.data?.items).toHaveLength(3);
expect(result.data?.total).toBe(5); // Total still reflects all revisions
});
it("should not return revisions from other entries", async () => {
const content1 = await contentRepo.create(createPostFixture());
const content2 = await contentRepo.create({
...createPostFixture(),
slug: "another-post",
});
await revisionRepo.create({
collection: "post",
entryId: content1.id,
data: { title: "Content 1 revision" },
});
await revisionRepo.create({
collection: "post",
entryId: content2.id,
data: { title: "Content 2 revision" },
});
const result = await handleRevisionList(db, "post", content1.id, {});
expect(result.success).toBe(true);
expect(result.data?.items).toHaveLength(1);
expect(result.data?.items[0].data.title).toBe("Content 1 revision");
});
});
describe("handleRevisionGet", () => {
it("should return a revision by ID", async () => {
const content = await contentRepo.create(createPostFixture());
const revision = await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "Test Revision" },
});
const result = await handleRevisionGet(db, revision.id);
expect(result.success).toBe(true);
expect(result.data?.item.id).toBe(revision.id);
expect(result.data?.item.data.title).toBe("Test Revision");
});
it("should return NOT_FOUND for non-existent revision", async () => {
const result = await handleRevisionGet(db, "nonexistent-id");
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
});
describe("handleRevisionRestore", () => {
const callerUserId = "user_caller_123";
it("should restore content to a previous revision", async () => {
const content = await contentRepo.create({
...createPostFixture(),
data: { title: "Original", content: "Original content" },
});
// Create a revision with the original state
const originalRevision = await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "Original", content: "Original content" },
});
// Update the content
await contentRepo.update("post", content.id, {
data: { title: "Updated", content: "Updated content" },
});
// Restore to original revision
const result = await handleRevisionRestore(db, originalRevision.id, callerUserId);
expect(result.success).toBe(true);
expect(result.data?.item.data.title).toBe("Original");
expect(result.data?.item.data.content).toBe("Original content");
});
it("should create a new revision when restoring", async () => {
const content = await contentRepo.create(createPostFixture());
const revision = await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "To restore" },
});
const beforeCount = await revisionRepo.countByEntry("post", content.id);
await handleRevisionRestore(db, revision.id, callerUserId);
const afterCount = await revisionRepo.countByEntry("post", content.id);
expect(afterCount).toBe(beforeCount + 1);
});
it("should attribute the new revision to the caller", async () => {
const content = await contentRepo.create(createPostFixture());
const revision = await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "To restore" },
authorId: "original_author",
});
await handleRevisionRestore(db, revision.id, callerUserId);
// The newest revision (restore record) should be attributed to the caller
const latestRevision = await revisionRepo.findLatest("post", content.id);
expect(latestRevision).not.toBeNull();
expect(latestRevision!.authorId).toBe(callerUserId);
});
it("should handle revision data containing _slug", async () => {
const content = await contentRepo.create({
...createPostFixture(),
data: { title: "Original" },
});
// Revision data includes _slug (added by runtime when slug changes)
const revision = await revisionRepo.create({
collection: "post",
entryId: content.id,
data: { title: "With slug change", _slug: "new-slug" },
});
const result = await handleRevisionRestore(db, revision.id, callerUserId);
expect(result.success).toBe(true);
expect(result.data?.item.data.title).toBe("With slug change");
expect(result.data?.item.slug).toBe("new-slug");
});
it("should return NOT_FOUND for non-existent revision", async () => {
const result = await handleRevisionRestore(db, "nonexistent-id", callerUserId);
expect(result.success).toBe(false);
expect(result.error?.code).toBe("NOT_FOUND");
});
});
});

View File

@@ -0,0 +1,226 @@
import { describe, it, expect } from "vitest";
import {
contentCreateBody,
contentUpdateBody,
httpUrl,
mediaUploadUrlBody,
DEFAULT_MAX_UPLOAD_SIZE,
} from "../../../src/api/schemas/index.js";
describe("contentCreateBody schema", () => {
it("accepts status 'draft'", () => {
const result = contentCreateBody.parse({ data: { title: "Hi" }, status: "draft" });
expect(result.status).toBe("draft");
});
it("accepts omitted status", () => {
const result = contentCreateBody.parse({ data: { title: "Hi" } });
expect(result.status).toBeUndefined();
});
it("rejects status 'published'", () => {
expect(() => contentCreateBody.parse({ data: { title: "Hi" }, status: "published" })).toThrow();
});
it("rejects status 'scheduled'", () => {
expect(() => contentCreateBody.parse({ data: { title: "Hi" }, status: "scheduled" })).toThrow();
});
it("preserves publishedAt and createdAt when valid ISO 8601 datetimes are provided", () => {
const result = contentCreateBody.parse({
data: { title: "Hi" },
publishedAt: "2019-03-15T10:30:00.000Z",
createdAt: "2019-03-15T10:30:00.000Z",
});
expect(result.publishedAt).toBe("2019-03-15T10:30:00.000Z");
expect(result.createdAt).toBe("2019-03-15T10:30:00.000Z");
});
it("accepts offset-suffixed ISO datetimes", () => {
const result = contentCreateBody.parse({
data: { title: "Hi" },
publishedAt: "2019-03-15T10:30:00+00:00",
});
expect(result.publishedAt).toBe("2019-03-15T10:30:00+00:00");
});
it("rejects malformed datetime strings", () => {
expect(() =>
contentCreateBody.parse({ data: { title: "Hi" }, publishedAt: "yesterday" }),
).toThrow();
expect(() =>
contentCreateBody.parse({ data: { title: "Hi" }, createdAt: "2019-03-15" }),
).toThrow();
});
it("accepts null to explicitly clear the field", () => {
const result = contentCreateBody.parse({ data: { title: "Hi" }, publishedAt: null });
expect(result.publishedAt).toBeNull();
});
});
describe("contentUpdateBody schema", () => {
it("should pass through skipRevision when present", () => {
const input = {
data: { title: "Hello" },
skipRevision: true,
};
const result = contentUpdateBody.parse(input);
expect(result.skipRevision).toBe(true);
});
it("should accept updates without skipRevision", () => {
const input = {
data: { title: "Hello" },
};
const result = contentUpdateBody.parse(input);
expect(result.skipRevision).toBeUndefined();
});
it("accepts status 'draft'", () => {
const result = contentUpdateBody.parse({ data: { title: "Hi" }, status: "draft" });
expect(result.status).toBe("draft");
});
it("accepts omitted status", () => {
const result = contentUpdateBody.parse({ data: { title: "Hi" } });
expect(result.status).toBeUndefined();
});
it("rejects status 'published'", () => {
expect(() => contentUpdateBody.parse({ data: { title: "Hi" }, status: "published" })).toThrow();
});
it("rejects status 'scheduled'", () => {
expect(() => contentUpdateBody.parse({ data: { title: "Hi" }, status: "scheduled" })).toThrow();
});
it("preserves publishedAt when a valid ISO 8601 datetime is provided", () => {
const result = contentUpdateBody.parse({
data: { title: "Hi" },
publishedAt: "2019-03-15T10:30:00.000Z",
});
expect(result.publishedAt).toBe("2019-03-15T10:30:00.000Z");
});
it("rejects malformed publishedAt strings", () => {
expect(() =>
contentUpdateBody.parse({ data: { title: "Hi" }, publishedAt: "yesterday" }),
).toThrow();
});
it("strips createdAt — treat created_at as immutable on update", () => {
const result = contentUpdateBody.parse({
data: { title: "Hi" },
createdAt: "2019-03-15T10:30:00.000Z",
} as Parameters<typeof contentUpdateBody.parse>[0]);
expect("createdAt" in result).toBe(false);
});
});
describe("httpUrl validator", () => {
it("accepts http URLs", () => {
expect(httpUrl.parse("http://example.com")).toBe("http://example.com");
});
it("accepts https URLs", () => {
expect(httpUrl.parse("https://example.com/path?q=1")).toBe("https://example.com/path?q=1");
});
it("rejects javascript: URIs", () => {
expect(() => httpUrl.parse("javascript:alert(1)")).toThrow();
});
it("rejects data: URIs", () => {
expect(() => httpUrl.parse("data:text/html,<script>alert(1)</script>")).toThrow();
});
it("rejects ftp: URIs", () => {
expect(() => httpUrl.parse("ftp://example.com")).toThrow();
});
it("rejects empty string", () => {
expect(() => httpUrl.parse("")).toThrow();
});
it("rejects non-URL strings", () => {
expect(() => httpUrl.parse("not a url")).toThrow();
});
it("is case-insensitive for scheme", () => {
expect(httpUrl.parse("HTTPS://EXAMPLE.COM")).toBe("HTTPS://EXAMPLE.COM");
});
});
describe("mediaUploadUrlBody schema factory", () => {
it("DEFAULT_MAX_UPLOAD_SIZE is 50 MB", () => {
expect(DEFAULT_MAX_UPLOAD_SIZE).toBe(50 * 1024 * 1024);
});
it("rejects size above the configured limit", () => {
const schema = mediaUploadUrlBody(1_000);
expect(() =>
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 1_001 }),
).toThrow();
});
it("accepts size equal to the configured limit", () => {
const schema = mediaUploadUrlBody(1_000);
const result = schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 1_000 });
expect(result.size).toBe(1_000);
});
it("accepts size below the configured limit", () => {
const schema = mediaUploadUrlBody(1_000);
const result = schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 });
expect(result.size).toBe(500);
});
it("each call returns an independent schema with its own limit", () => {
const strict = mediaUploadUrlBody(100);
const loose = mediaUploadUrlBody(1_000_000);
expect(() =>
strict.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 }),
).toThrow();
expect(() =>
loose.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 }),
).not.toThrow();
});
it("throws when maxSize is NaN", () => {
expect(() => mediaUploadUrlBody(NaN)).toThrow(/maxUploadSize/);
});
it("throws when maxSize is 0", () => {
expect(() => mediaUploadUrlBody(0)).toThrow(/maxUploadSize/);
});
it("throws when maxSize is negative", () => {
expect(() => mediaUploadUrlBody(-1024)).toThrow(/maxUploadSize/);
});
it("error message uses whole MB, not fractional", () => {
const schema = mediaUploadUrlBody(75_000_000);
let errorMessage = "";
try {
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 75_000_001 });
} catch (e) {
errorMessage = String(e);
}
expect(errorMessage).not.toBe("");
expect(errorMessage).not.toMatch(/\d+\.\d+MB/);
});
it("error message does not overstate the limit in MB", () => {
// 75_000_000 bytes / 1024 / 1024 ≈ 71.5 MB; floor gives 71, round gives 72
const schema = mediaUploadUrlBody(75_000_000);
let errorMessage = "";
try {
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 75_000_001 });
} catch (e) {
errorMessage = String(e);
}
expect(errorMessage).toContain("71MB");
});
});