Emdash source with visual editor image upload fix
Fixes: 1. media.ts: wrap placeholder generation in try-catch 2. toolbar.ts: check r.ok, display error message in popover
This commit is contained in:
56
packages/core/tests/unit/after.test.ts
Normal file
56
packages/core/tests/unit/after.test.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
// Default stub: no host-provided waitUntil. Mirrors what Node (and
|
||||
// any adapter that doesn't implement the virtual) sees at runtime.
|
||||
vi.mock("virtual:emdash/wait-until", () => ({ waitUntil: undefined }), { virtual: true });
|
||||
|
||||
// The waitUntil-handoff path is exercised end-to-end by the cron
|
||||
// integration in emdash-runtime.ts; testing it here would require
|
||||
// swapping out a module that's already bound at load time, which
|
||||
// fights vitest's module cache. These unit tests cover the three
|
||||
// behaviors that don't need a real waitUntil: the callback fires,
|
||||
// errors don't escape, and the caller doesn't block.
|
||||
import { after } from "../../src/after.js";
|
||||
|
||||
describe("after()", () => {
|
||||
it("runs the callback", async () => {
|
||||
const fn = vi.fn();
|
||||
after(fn);
|
||||
await new Promise((r) => setTimeout(r, 0));
|
||||
expect(fn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("swallows errors and logs them with the emdash prefix", async () => {
|
||||
const errorSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
const boom = new Error("boom");
|
||||
|
||||
try {
|
||||
expect(() =>
|
||||
after(async () => {
|
||||
throw boom;
|
||||
}),
|
||||
).not.toThrow();
|
||||
|
||||
await new Promise((r) => setTimeout(r, 0));
|
||||
|
||||
expect(errorSpy).toHaveBeenCalledWith(
|
||||
expect.stringContaining("[emdash] deferred task failed"),
|
||||
boom,
|
||||
);
|
||||
} finally {
|
||||
// Restore unconditionally so a failed assertion above doesn't leak
|
||||
// the spy into later tests.
|
||||
errorSpy.mockRestore();
|
||||
}
|
||||
});
|
||||
|
||||
it("returns synchronously without waiting for the callback", async () => {
|
||||
let ran = false;
|
||||
after(async () => {
|
||||
await new Promise((r) => setTimeout(r, 10));
|
||||
ran = true;
|
||||
});
|
||||
// after() returned already — the callback hasn't completed.
|
||||
expect(ran).toBe(false);
|
||||
});
|
||||
});
|
||||
70
packages/core/tests/unit/api/cache-headers.test.ts
Normal file
70
packages/core/tests/unit/api/cache-headers.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { apiError, apiSuccess, handleError, unwrapResult } from "../../../src/api/error.js";
|
||||
|
||||
describe("API cache headers", () => {
|
||||
const EXPECTED_CACHE_CONTROL = "private, no-store";
|
||||
|
||||
describe("apiSuccess", () => {
|
||||
it("should include Cache-Control: private, no-store", () => {
|
||||
const response = apiSuccess({ ok: true });
|
||||
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
|
||||
});
|
||||
|
||||
it("should not include Vary header", () => {
|
||||
const response = apiSuccess({ ok: true });
|
||||
expect(response.headers.has("Vary")).toBe(false);
|
||||
});
|
||||
|
||||
it("should still include correct status and body", async () => {
|
||||
const response = apiSuccess({ id: "123" }, 201);
|
||||
expect(response.status).toBe(201);
|
||||
const body = await response.json();
|
||||
expect(body).toEqual({ data: { id: "123" } });
|
||||
});
|
||||
});
|
||||
|
||||
describe("apiError", () => {
|
||||
it("should include Cache-Control: private, no-store", () => {
|
||||
const response = apiError("NOT_FOUND", "Not found", 404);
|
||||
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
|
||||
});
|
||||
|
||||
it("should not include Vary header", () => {
|
||||
const response = apiError("NOT_FOUND", "Not found", 404);
|
||||
expect(response.headers.has("Vary")).toBe(false);
|
||||
});
|
||||
|
||||
it("should still include correct status and body", async () => {
|
||||
const response = apiError("FORBIDDEN", "Access denied", 403);
|
||||
expect(response.status).toBe(403);
|
||||
const body = await response.json();
|
||||
expect(body).toEqual({ error: { code: "FORBIDDEN", message: "Access denied" } });
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleError", () => {
|
||||
it("should include cache headers on 500 responses", () => {
|
||||
const response = handleError(new Error("db crash"), "Something went wrong", "INTERNAL");
|
||||
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
|
||||
expect(response.headers.has("Vary")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unwrapResult", () => {
|
||||
it("should include cache headers on success", () => {
|
||||
const response = unwrapResult({ success: true, data: { id: "1" } });
|
||||
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
|
||||
expect(response.headers.has("Vary")).toBe(false);
|
||||
});
|
||||
|
||||
it("should include cache headers on error", () => {
|
||||
const response = unwrapResult({
|
||||
success: false,
|
||||
error: { code: "NOT_FOUND", message: "Not found" },
|
||||
});
|
||||
expect(response.headers.get("Cache-Control")).toBe(EXPECTED_CACHE_CONTROL);
|
||||
expect(response.headers.has("Vary")).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
307
packages/core/tests/unit/api/content-handlers.test.ts
Normal file
307
packages/core/tests/unit/api/content-handlers.test.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
handleContentCreate,
|
||||
handleContentDuplicate,
|
||||
handleContentGet,
|
||||
handleContentList,
|
||||
handleContentUpdate,
|
||||
} from "../../../src/api/index.js";
|
||||
import { BylineRepository } from "../../../src/database/repositories/byline.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { SchemaRegistry } from "../../../src/schema/registry.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Content Handlers — auto-slug generation", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
// Add a "name" field to the page collection so we can test name-based slug generation
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.createField("page", { slug: "name", label: "Name", type: "string" });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("handleContentCreate", () => {
|
||||
it("should auto-generate slug from title when slug is omitted", async () => {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("hello-world");
|
||||
});
|
||||
|
||||
it("should auto-generate slug from name when title is absent", async () => {
|
||||
const result = await handleContentCreate(db, "page", {
|
||||
data: { name: "My Widget" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("my-widget");
|
||||
});
|
||||
|
||||
it("should prefer title over name for slug generation", async () => {
|
||||
const result = await handleContentCreate(db, "page", {
|
||||
data: { title: "From Title", name: "From Name" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("from-title");
|
||||
});
|
||||
|
||||
it("should respect explicit slug and not auto-generate", async () => {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
slug: "custom-slug",
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("custom-slug");
|
||||
});
|
||||
|
||||
it("should handle slug collisions by appending numeric suffix", async () => {
|
||||
// Create first item with the slug
|
||||
await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
|
||||
// Create second item with same title — should get unique slug
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("hello-world-1");
|
||||
});
|
||||
|
||||
it("should increment suffix on repeated collisions", async () => {
|
||||
await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Hello World" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("hello-world-2");
|
||||
});
|
||||
|
||||
it("should leave slug null when no title or name is present", async () => {
|
||||
// `data: {}` — no title, no name. Slug source isn't there, so the
|
||||
// auto-generator has nothing to work with.
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: {},
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBeNull();
|
||||
});
|
||||
|
||||
it("should leave slug null when title is empty string", async () => {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle unicode titles", async () => {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Café Naïve" },
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.slug).toBe("cafe-naive");
|
||||
});
|
||||
|
||||
it("should allow same auto-slug in different collections", async () => {
|
||||
const postResult = await handleContentCreate(db, "post", {
|
||||
data: { title: "About" },
|
||||
});
|
||||
const pageResult = await handleContentCreate(db, "page", {
|
||||
data: { title: "About" },
|
||||
});
|
||||
|
||||
expect(postResult.success).toBe(true);
|
||||
expect(pageResult.success).toBe(true);
|
||||
expect(postResult.data?.item.slug).toBe("about");
|
||||
expect(pageResult.data?.item.slug).toBe("about");
|
||||
});
|
||||
|
||||
it("preserves publishedAt and createdAt when provided — content migration use case", async () => {
|
||||
const originalCreated = "2019-03-15T10:30:00.000Z";
|
||||
const originalPublished = "2019-03-16T09:00:00.000Z";
|
||||
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title: "Migrated Post" },
|
||||
createdAt: originalCreated,
|
||||
publishedAt: originalPublished,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.createdAt).toBe(originalCreated);
|
||||
expect(result.data?.item.publishedAt).toBe(originalPublished);
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleContentDuplicate", () => {
|
||||
it("should generate slug from duplicated title", async () => {
|
||||
const original = await handleContentCreate(db, "post", {
|
||||
data: { title: "My Post" },
|
||||
slug: "my-post",
|
||||
});
|
||||
|
||||
const result = await handleContentDuplicate(db, "post", original.data!.item.id);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
// Title becomes "My Post (Copy)", slug should be generated from it
|
||||
expect(result.data?.item.slug).toBe("my-post-copy");
|
||||
});
|
||||
|
||||
it("should handle duplicate slug collision from copy", async () => {
|
||||
const original = await handleContentCreate(db, "post", {
|
||||
data: { title: "My Post" },
|
||||
slug: "my-post",
|
||||
});
|
||||
|
||||
// First duplicate
|
||||
const dup1 = await handleContentDuplicate(db, "post", original.data!.item.id);
|
||||
expect(dup1.data?.item.slug).toBe("my-post-copy");
|
||||
|
||||
// Second duplicate — "My Post (Copy)" title slugifies to "my-post-copy"
|
||||
// which now collides with the first duplicate
|
||||
const dup2 = await handleContentDuplicate(db, "post", original.data!.item.id);
|
||||
expect(dup2.success).toBe(true);
|
||||
expect(dup2.data?.item.slug).toBe("my-post-copy-1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("byline hydration and assignment", () => {
|
||||
it("should assign and return bylines on create", async () => {
|
||||
const bylineRepo = new BylineRepository(db);
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "author-one",
|
||||
displayName: "Author One",
|
||||
});
|
||||
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Bylined" },
|
||||
bylines: [{ bylineId: byline.id, roleLabel: "Writer" }],
|
||||
});
|
||||
|
||||
expect(created.success).toBe(true);
|
||||
expect(created.data?.item.primaryBylineId).toBe(byline.id);
|
||||
expect(created.data?.item.byline?.id).toBe(byline.id);
|
||||
expect(created.data?.item.bylines).toHaveLength(1);
|
||||
expect(created.data?.item.bylines?.[0]?.roleLabel).toBe("Writer");
|
||||
});
|
||||
|
||||
it("should return bylines on get and list", async () => {
|
||||
const bylineRepo = new BylineRepository(db);
|
||||
const first = await bylineRepo.create({ slug: "first", displayName: "First" });
|
||||
const second = await bylineRepo.create({ slug: "second", displayName: "Second" });
|
||||
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Order Test" },
|
||||
bylines: [{ bylineId: second.id }, { bylineId: first.id }],
|
||||
});
|
||||
expect(created.success).toBe(true);
|
||||
const contentId = created.data!.item.id;
|
||||
|
||||
const fetched = await handleContentGet(db, "post", contentId);
|
||||
expect(fetched.success).toBe(true);
|
||||
expect(fetched.data?.item.bylines?.[0]?.byline.id).toBe(second.id);
|
||||
expect(fetched.data?.item.bylines?.[1]?.byline.id).toBe(first.id);
|
||||
expect(fetched.data?.item.byline?.id).toBe(second.id);
|
||||
|
||||
const listed = await handleContentList(db, "post", {});
|
||||
expect(listed.success).toBe(true);
|
||||
const listedItem = listed.data?.items.find((item) => item.id === contentId);
|
||||
expect(listedItem?.byline?.id).toBe(second.id);
|
||||
expect(listedItem?.bylines?.[0]?.byline.id).toBe(second.id);
|
||||
});
|
||||
|
||||
it("should update byline ordering on update", async () => {
|
||||
const bylineRepo = new BylineRepository(db);
|
||||
const first = await bylineRepo.create({ slug: "first-upd", displayName: "First" });
|
||||
const second = await bylineRepo.create({ slug: "second-upd", displayName: "Second" });
|
||||
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Update Bylines" },
|
||||
bylines: [{ bylineId: first.id }, { bylineId: second.id }],
|
||||
});
|
||||
expect(created.success).toBe(true);
|
||||
|
||||
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
|
||||
bylines: [{ bylineId: second.id }, { bylineId: first.id }],
|
||||
});
|
||||
|
||||
expect(updated.success).toBe(true);
|
||||
expect(updated.data?.item.primaryBylineId).toBe(second.id);
|
||||
expect(updated.data?.item.bylines?.[0]?.byline.id).toBe(second.id);
|
||||
expect(updated.data?.item.bylines?.[1]?.byline.id).toBe(first.id);
|
||||
});
|
||||
|
||||
it("should copy bylines when duplicating", async () => {
|
||||
const bylineRepo = new BylineRepository(db);
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "dup-author",
|
||||
displayName: "Dup Author",
|
||||
});
|
||||
|
||||
const original = await handleContentCreate(db, "post", {
|
||||
data: { title: "Duplicate With Bylines" },
|
||||
bylines: [{ bylineId: byline.id }],
|
||||
});
|
||||
expect(original.success).toBe(true);
|
||||
|
||||
const duplicated = await handleContentDuplicate(db, "post", original.data!.item.id);
|
||||
expect(duplicated.success).toBe(true);
|
||||
expect(duplicated.data?.item.byline?.id).toBe(byline.id);
|
||||
expect(duplicated.data?.item.bylines).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleContentUpdate — publishedAt override", () => {
|
||||
it("persists publishedAt when provided", async () => {
|
||||
const created = await handleContentCreate(db, "post", { data: { title: "Hi" } });
|
||||
expect(created.success).toBe(true);
|
||||
|
||||
const newPublishedAt = "2019-03-16T09:00:00.000Z";
|
||||
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
|
||||
publishedAt: newPublishedAt,
|
||||
});
|
||||
|
||||
expect(updated.success).toBe(true);
|
||||
expect(updated.data?.item.publishedAt).toBe(newPublishedAt);
|
||||
});
|
||||
|
||||
it("leaves createdAt untouched on update", async () => {
|
||||
const originalCreated = "2019-03-15T10:30:00.000Z";
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Hi" },
|
||||
createdAt: originalCreated,
|
||||
});
|
||||
expect(created.success).toBe(true);
|
||||
|
||||
const updated = await handleContentUpdate(db, "post", created.data!.item.id, {
|
||||
data: { title: "Edited" },
|
||||
publishedAt: "2020-01-01T00:00:00.000Z",
|
||||
});
|
||||
|
||||
expect(updated.success).toBe(true);
|
||||
expect(updated.data?.item.createdAt).toBe(originalCreated);
|
||||
});
|
||||
});
|
||||
});
|
||||
279
packages/core/tests/unit/api/content-route-permissions.test.ts
Normal file
279
packages/core/tests/unit/api/content-route-permissions.test.ts
Normal file
@@ -0,0 +1,279 @@
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { PUT as updateContent } from "../../../src/astro/routes/api/content/[collection]/[id].js";
|
||||
import { POST as createContent } from "../../../src/astro/routes/api/content/[collection]/index.js";
|
||||
|
||||
/**
|
||||
* Regression tests for the `publishedAt` / `createdAt` permission gate.
|
||||
*
|
||||
* The gate must trigger on *any* explicit presence of these fields —
|
||||
* including `null` (explicit clear) — not just on non-null values. Checking
|
||||
* only `!= null` would let a regular AUTHOR clear `published_at` on any item
|
||||
* they can edit, bypassing `content:publish_any`.
|
||||
*/
|
||||
describe("content route — publishedAt / createdAt permission gate", () => {
|
||||
const makeUser = (role: (typeof Role)[keyof typeof Role]) => ({
|
||||
id: "user-1",
|
||||
role,
|
||||
});
|
||||
|
||||
const makeCache = () => ({ enabled: false, invalidate: vi.fn() });
|
||||
|
||||
describe("POST /_emdash/api/content/{collection}", () => {
|
||||
it("returns 403 when an AUTHOR tries to set publishedAt", async () => {
|
||||
const request = new Request("http://localhost/_emdash/api/content/post", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
data: { title: "Hi" },
|
||||
publishedAt: "2019-03-15T10:30:00.000Z",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await createContent({
|
||||
params: { collection: "post" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
handleContentCreate: vi.fn(),
|
||||
handleContentGet: vi.fn(),
|
||||
},
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof createContent>[0]);
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
await expect(response.json()).resolves.toMatchObject({
|
||||
error: { code: "FORBIDDEN" },
|
||||
});
|
||||
});
|
||||
|
||||
it("returns 403 when an AUTHOR tries to clear publishedAt via null", async () => {
|
||||
const request = new Request("http://localhost/_emdash/api/content/post", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ data: { title: "Hi" }, publishedAt: null }),
|
||||
});
|
||||
|
||||
const response = await createContent({
|
||||
params: { collection: "post" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
handleContentCreate: vi.fn(),
|
||||
handleContentGet: vi.fn(),
|
||||
},
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof createContent>[0]);
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
});
|
||||
|
||||
it("returns 403 when an AUTHOR tries to set createdAt", async () => {
|
||||
const request = new Request("http://localhost/_emdash/api/content/post", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
data: { title: "Hi" },
|
||||
createdAt: "2019-03-15T10:30:00.000Z",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await createContent({
|
||||
params: { collection: "post" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
handleContentCreate: vi.fn(),
|
||||
handleContentGet: vi.fn(),
|
||||
},
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof createContent>[0]);
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
});
|
||||
|
||||
it("lets EDITOR set publishedAt", async () => {
|
||||
const handleContentCreate = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
data: {
|
||||
item: { id: "c1", publishedAt: "2019-03-15T10:30:00.000Z" },
|
||||
_rev: "rev1",
|
||||
},
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
data: { title: "Hi" },
|
||||
publishedAt: "2019-03-15T10:30:00.000Z",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await createContent({
|
||||
params: { collection: "post" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentCreate, handleContentGet: vi.fn() },
|
||||
user: makeUser(Role.EDITOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof createContent>[0]);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(handleContentCreate).toHaveBeenCalledWith(
|
||||
"post",
|
||||
expect.objectContaining({ publishedAt: "2019-03-15T10:30:00.000Z" }),
|
||||
);
|
||||
});
|
||||
|
||||
it("lets AUTHOR create without date overrides", async () => {
|
||||
const handleContentCreate = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
data: { item: { id: "c1" }, _rev: "rev1" },
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ data: { title: "Hi" } }),
|
||||
});
|
||||
|
||||
const response = await createContent({
|
||||
params: { collection: "post" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentCreate, handleContentGet: vi.fn() },
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof createContent>[0]);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(handleContentCreate).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("PUT /_emdash/api/content/{collection}/{id}", () => {
|
||||
const ownedItem = {
|
||||
success: true,
|
||||
data: { item: { id: "c1", authorId: "user-1" }, _rev: "rev1" },
|
||||
};
|
||||
|
||||
it("returns 403 when an AUTHOR tries to clear publishedAt via null on their own post", async () => {
|
||||
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
|
||||
const handleContentUpdate = vi.fn();
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ publishedAt: null }),
|
||||
});
|
||||
|
||||
const response = await updateContent({
|
||||
params: { collection: "post", id: "c1" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentUpdate, handleContentGet },
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof updateContent>[0]);
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(handleContentUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("returns 403 when an AUTHOR tries to set publishedAt on their own post", async () => {
|
||||
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
|
||||
const handleContentUpdate = vi.fn();
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ publishedAt: "2019-03-15T10:30:00.000Z" }),
|
||||
});
|
||||
|
||||
const response = await updateContent({
|
||||
params: { collection: "post", id: "c1" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentUpdate, handleContentGet },
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof updateContent>[0]);
|
||||
|
||||
expect(response.status).toBe(403);
|
||||
expect(handleContentUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("lets EDITOR set publishedAt", async () => {
|
||||
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
|
||||
const handleContentUpdate = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
data: {
|
||||
item: { id: "c1", publishedAt: "2019-03-15T10:30:00.000Z" },
|
||||
_rev: "rev2",
|
||||
},
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ publishedAt: "2019-03-15T10:30:00.000Z" }),
|
||||
});
|
||||
|
||||
const response = await updateContent({
|
||||
params: { collection: "post", id: "c1" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentUpdate, handleContentGet },
|
||||
user: makeUser(Role.EDITOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof updateContent>[0]);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(handleContentUpdate).toHaveBeenCalledWith(
|
||||
"post",
|
||||
"c1",
|
||||
expect.objectContaining({ publishedAt: "2019-03-15T10:30:00.000Z" }),
|
||||
);
|
||||
});
|
||||
|
||||
it("lets AUTHOR update their own post without date overrides", async () => {
|
||||
const handleContentGet = vi.fn().mockResolvedValue(ownedItem);
|
||||
const handleContentUpdate = vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
data: { item: { id: "c1" }, _rev: "rev2" },
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/_emdash/api/content/post/c1", {
|
||||
method: "PUT",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ data: { title: "Edited" } }),
|
||||
});
|
||||
|
||||
const response = await updateContent({
|
||||
params: { collection: "post", id: "c1" },
|
||||
request,
|
||||
locals: {
|
||||
emdash: { handleContentUpdate, handleContentGet },
|
||||
user: makeUser(Role.AUTHOR),
|
||||
},
|
||||
cache: makeCache(),
|
||||
} as Parameters<typeof updateContent>[0]);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(handleContentUpdate).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
165
packages/core/tests/unit/api/csrf.test.ts
Normal file
165
packages/core/tests/unit/api/csrf.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { checkPublicCsrf } from "../../../src/api/csrf.js";
|
||||
|
||||
function makeRequest(method: string, headers: Record<string, string> = {}): Request {
|
||||
return new Request("http://example.com/_emdash/api/comments/posts/abc", {
|
||||
method,
|
||||
headers,
|
||||
});
|
||||
}
|
||||
|
||||
function makeUrl(host = "example.com"): URL {
|
||||
return new URL(`http://${host}/_emdash/api/comments/posts/abc`);
|
||||
}
|
||||
|
||||
describe("checkPublicCsrf", () => {
|
||||
describe("allows requests with X-EmDash-Request header", () => {
|
||||
it("allows POST with custom header", () => {
|
||||
const request = makeRequest("POST", { "X-EmDash-Request": "1" });
|
||||
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
|
||||
});
|
||||
|
||||
it("allows POST with custom header even if Origin is cross-origin", () => {
|
||||
const request = makeRequest("POST", {
|
||||
"X-EmDash-Request": "1",
|
||||
Origin: "http://evil.com",
|
||||
});
|
||||
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("allows same-origin requests", () => {
|
||||
it("allows POST with matching Origin", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://example.com",
|
||||
});
|
||||
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
|
||||
});
|
||||
|
||||
it("allows POST with matching Origin on different path", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://example.com",
|
||||
});
|
||||
const url = new URL("http://example.com/_emdash/api/auth/invite/complete");
|
||||
expect(checkPublicCsrf(request, url)).toBeNull();
|
||||
});
|
||||
|
||||
it("matches host including port", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://localhost:4321",
|
||||
});
|
||||
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
|
||||
expect(checkPublicCsrf(request, url)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("blocks cross-origin requests", () => {
|
||||
it("returns 403 with CSRF_REJECTED code", async () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://evil.com",
|
||||
});
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
const body = await response!.json();
|
||||
expect(body).toEqual({
|
||||
error: { code: "CSRF_REJECTED", message: "Cross-origin request blocked" },
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects Origin with different port", async () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://example.com:9999",
|
||||
});
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("rejects Origin with different host", async () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://attacker.example.com",
|
||||
});
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("rejects cross-scheme Origin (http vs https)", async () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "https://example.com",
|
||||
});
|
||||
// Request URL is http://example.com — same host but different scheme
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("rejects malformed Origin header", async () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "not-a-valid-url",
|
||||
});
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("rejects Origin: null (sandboxed iframe)", async () => {
|
||||
const request = makeRequest("POST", { Origin: "null" });
|
||||
const response = checkPublicCsrf(request, makeUrl());
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
});
|
||||
|
||||
describe("dual-origin matching (reverse proxy)", () => {
|
||||
it("accepts Origin matching public origin when behind proxy", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "https://mysite.example.com",
|
||||
});
|
||||
// Internal URL is http, public is https — proxy scenario
|
||||
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
|
||||
expect(checkPublicCsrf(request, url, "https://mysite.example.com")).toBeNull();
|
||||
});
|
||||
|
||||
it("still accepts Origin matching internal origin when publicOrigin is set", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://localhost:4321",
|
||||
});
|
||||
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
|
||||
expect(checkPublicCsrf(request, url, "https://mysite.example.com")).toBeNull();
|
||||
});
|
||||
|
||||
it("rejects Origin matching neither internal nor public", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://evil.com",
|
||||
});
|
||||
const url = new URL("http://localhost:4321/_emdash/api/comments/posts/abc");
|
||||
const response = checkPublicCsrf(request, url, "https://mysite.example.com");
|
||||
expect(response).not.toBeNull();
|
||||
expect(response!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("unchanged behavior when publicOrigin is undefined", () => {
|
||||
const request = makeRequest("POST", {
|
||||
Origin: "http://example.com",
|
||||
});
|
||||
expect(checkPublicCsrf(request, makeUrl(), undefined)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("allows requests without Origin header", () => {
|
||||
it("allows POST without any Origin (non-browser client)", () => {
|
||||
const request = makeRequest("POST");
|
||||
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
|
||||
});
|
||||
|
||||
it("allows POST without Origin or custom header (curl/server)", () => {
|
||||
const request = makeRequest("POST", {
|
||||
"Content-Type": "application/json",
|
||||
});
|
||||
expect(checkPublicCsrf(request, makeUrl())).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
241
packages/core/tests/unit/api/dashboard-handlers.test.ts
Normal file
241
packages/core/tests/unit/api/dashboard-handlers.test.ts
Normal file
@@ -0,0 +1,241 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, afterEach } from "vitest";
|
||||
|
||||
import { handleDashboardStats } from "../../../src/api/handlers/dashboard.js";
|
||||
import { ContentRepository } from "../../../src/database/repositories/content.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { SchemaRegistry } from "../../../src/schema/registry.js";
|
||||
import { createPostFixture, createPageFixture } from "../../utils/fixtures.js";
|
||||
import {
|
||||
setupTestDatabase,
|
||||
setupTestDatabaseWithCollections,
|
||||
teardownTestDatabase,
|
||||
} from "../../utils/test-db.js";
|
||||
|
||||
describe("Dashboard Handlers", () => {
|
||||
describe("handleDashboardStats", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("returns empty stats when no collections exist", async () => {
|
||||
db = await setupTestDatabase();
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data!.collections).toEqual([]);
|
||||
expect(result.data!.mediaCount).toBe(0);
|
||||
expect(result.data!.userCount).toBe(0);
|
||||
expect(result.data!.recentItems).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns collection stats with correct counts", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
// Create some posts with different statuses
|
||||
await contentRepo.create(createPostFixture({ slug: "post-1" }));
|
||||
await contentRepo.create(createPostFixture({ slug: "post-2", status: "published" }));
|
||||
await contentRepo.create(createPostFixture({ slug: "post-3", status: "published" }));
|
||||
|
||||
// Create a draft page
|
||||
await contentRepo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const { collections } = result.data!;
|
||||
|
||||
// Both collections should be present
|
||||
expect(collections).toHaveLength(2);
|
||||
|
||||
const postStats = collections.find((c) => c.slug === "post");
|
||||
expect(postStats).toBeDefined();
|
||||
expect(postStats!.label).toBe("Posts");
|
||||
expect(postStats!.total).toBe(3);
|
||||
expect(postStats!.published).toBe(2);
|
||||
expect(postStats!.draft).toBe(1);
|
||||
|
||||
const pageStats = collections.find((c) => c.slug === "page");
|
||||
expect(pageStats).toBeDefined();
|
||||
expect(pageStats!.label).toBe("Pages");
|
||||
expect(pageStats!.total).toBe(1);
|
||||
expect(pageStats!.published).toBe(0);
|
||||
expect(pageStats!.draft).toBe(1);
|
||||
});
|
||||
|
||||
it("returns recent items across collections", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
await contentRepo.create(createPostFixture({ slug: "post-1" }));
|
||||
// Small delay for distinct updated_at
|
||||
await new Promise((r) => setTimeout(r, 10));
|
||||
await contentRepo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const { recentItems } = result.data!;
|
||||
|
||||
expect(recentItems.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// Most recently updated should be first
|
||||
expect(recentItems[0]!.collection).toBe("page");
|
||||
expect(recentItems[0]!.collectionLabel).toBe("Pages");
|
||||
expect(recentItems[0]!.slug).toBe("page-1");
|
||||
expect(recentItems[0]!.status).toBe("draft");
|
||||
|
||||
expect(recentItems[1]!.collection).toBe("post");
|
||||
expect(recentItems[1]!.collectionLabel).toBe("Posts");
|
||||
expect(recentItems[1]!.slug).toBe("post-1");
|
||||
});
|
||||
|
||||
it("recent items use title field when available", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
// setupTestDatabaseWithCollections creates post/page with title fields
|
||||
await contentRepo.create(
|
||||
createPostFixture({
|
||||
slug: "my-post",
|
||||
data: { title: "My Great Post", content: [] },
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const postItem = result.data!.recentItems.find((i) => i.slug === "my-post");
|
||||
expect(postItem).toBeDefined();
|
||||
expect(postItem!.title).toBe("My Great Post");
|
||||
});
|
||||
|
||||
it("recent items fall back to slug when collection has no title field", async () => {
|
||||
db = await setupTestDatabase();
|
||||
const registry = new SchemaRegistry(db);
|
||||
|
||||
// Create a collection without a title field
|
||||
await registry.createCollection({
|
||||
slug: "events",
|
||||
label: "Events",
|
||||
labelSingular: "Event",
|
||||
});
|
||||
await registry.createField("events", {
|
||||
slug: "date",
|
||||
label: "Date",
|
||||
type: "datetime",
|
||||
});
|
||||
|
||||
const contentRepo = new ContentRepository(db);
|
||||
await contentRepo.create({
|
||||
type: "events",
|
||||
slug: "launch-party",
|
||||
data: { date: "2026-03-01" },
|
||||
status: "draft",
|
||||
});
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const eventItem = result.data!.recentItems.find((i) => i.collection === "events");
|
||||
expect(eventItem).toBeDefined();
|
||||
// No title field, should fall back to slug
|
||||
expect(eventItem!.title).toBe("launch-party");
|
||||
});
|
||||
|
||||
it("excludes soft-deleted items from recent items", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
const post = await contentRepo.create(createPostFixture({ slug: "will-delete" }));
|
||||
await contentRepo.create(createPostFixture({ slug: "will-keep" }));
|
||||
|
||||
// Soft-delete the first post
|
||||
await contentRepo.delete("post", post.id);
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const slugs = result.data!.recentItems.map((i) => i.slug);
|
||||
expect(slugs).toContain("will-keep");
|
||||
expect(slugs).not.toContain("will-delete");
|
||||
});
|
||||
|
||||
it("limits recent items to 10", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
// Create 15 posts
|
||||
for (let i = 0; i < 15; i++) {
|
||||
await contentRepo.create(createPostFixture({ slug: `post-${String(i).padStart(2, "0")}` }));
|
||||
}
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data!.recentItems).toHaveLength(10);
|
||||
});
|
||||
|
||||
it("recent items are ordered by updated_at descending", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
await contentRepo.create(createPostFixture({ slug: "oldest" }));
|
||||
await new Promise((r) => setTimeout(r, 10));
|
||||
await contentRepo.create(createPostFixture({ slug: "middle" }));
|
||||
await new Promise((r) => setTimeout(r, 10));
|
||||
await contentRepo.create(createPostFixture({ slug: "newest" }));
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const slugs = result.data!.recentItems.map((i) => i.slug);
|
||||
expect(slugs).toEqual(["newest", "middle", "oldest"]);
|
||||
});
|
||||
|
||||
it("counts exclude soft-deleted items", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
|
||||
const post = await contentRepo.create(createPostFixture({ slug: "to-delete" }));
|
||||
await contentRepo.create(createPostFixture({ slug: "to-keep" }));
|
||||
await contentRepo.delete("post", post.id);
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const postStats = result.data!.collections.find((c) => c.slug === "post");
|
||||
// count() in ContentRepository filters deleted_at IS NULL
|
||||
expect(postStats!.total).toBe(1);
|
||||
});
|
||||
|
||||
it("returns camelCase keys in recent items", async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const contentRepo = new ContentRepository(db);
|
||||
await contentRepo.create(createPostFixture());
|
||||
|
||||
const result = await handleDashboardStats(db);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const item = result.data!.recentItems[0]!;
|
||||
// Verify camelCase API shape
|
||||
expect(item).toHaveProperty("id");
|
||||
expect(item).toHaveProperty("collection");
|
||||
expect(item).toHaveProperty("collectionLabel");
|
||||
expect(item).toHaveProperty("title");
|
||||
expect(item).toHaveProperty("slug");
|
||||
expect(item).toHaveProperty("status");
|
||||
expect(item).toHaveProperty("updatedAt");
|
||||
expect(item).toHaveProperty("authorId");
|
||||
// Should NOT have snake_case keys
|
||||
expect(item).not.toHaveProperty("collection_label");
|
||||
expect(item).not.toHaveProperty("updated_at");
|
||||
expect(item).not.toHaveProperty("author_id");
|
||||
});
|
||||
});
|
||||
});
|
||||
22
packages/core/tests/unit/api/email-settings-route.test.ts
Normal file
22
packages/core/tests/unit/api/email-settings-route.test.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Email Settings Route Registration Test
|
||||
*
|
||||
* Regression test for https://github.com/emdash-cms/emdash/issues/151
|
||||
* The email settings API route file existed but was never registered
|
||||
* via injectRoute(), causing the endpoint to return 404.
|
||||
*/
|
||||
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { injectCoreRoutes } from "../../../src/astro/integration/routes.js";
|
||||
|
||||
describe("email settings route registration (#151)", () => {
|
||||
it("registers /_emdash/api/settings/email route", () => {
|
||||
const injectRoute = vi.fn();
|
||||
|
||||
injectCoreRoutes(injectRoute);
|
||||
|
||||
const patterns = injectRoute.mock.calls.map((call) => (call[0] as { pattern: string }).pattern);
|
||||
expect(patterns).toContain("/_emdash/api/settings/email");
|
||||
});
|
||||
});
|
||||
915
packages/core/tests/unit/api/marketplace-handlers.test.ts
Normal file
915
packages/core/tests/unit/api/marketplace-handlers.test.ts
Normal file
@@ -0,0 +1,915 @@
|
||||
/**
|
||||
* Marketplace handler tests
|
||||
*
|
||||
* Tests the business logic for:
|
||||
* - Install (handleMarketplaceInstall)
|
||||
* - Update (handleMarketplaceUpdate)
|
||||
* - Uninstall (handleMarketplaceUninstall)
|
||||
* - Update check (handleMarketplaceUpdateCheck)
|
||||
* - Search/GetPlugin proxies (handleMarketplaceSearch, handleMarketplaceGetPlugin)
|
||||
*
|
||||
* Uses a real in-memory SQLite database and mock Storage/SandboxRunner/fetch.
|
||||
*/
|
||||
|
||||
import BetterSqlite3 from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
handleMarketplaceInstall,
|
||||
handleMarketplaceUpdate,
|
||||
handleMarketplaceUninstall,
|
||||
handleMarketplaceUpdateCheck,
|
||||
handleMarketplaceSearch,
|
||||
handleMarketplaceGetPlugin,
|
||||
} from "../../../src/api/handlers/marketplace.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import type { Database as DbSchema } from "../../../src/database/types.js";
|
||||
import type { MarketplacePluginDetail } from "../../../src/plugins/marketplace.js";
|
||||
import type { SandboxRunner, SandboxedPlugin } from "../../../src/plugins/sandbox/types.js";
|
||||
import { PluginStateRepository } from "../../../src/plugins/state.js";
|
||||
import type { PluginManifest } from "../../../src/plugins/types.js";
|
||||
import type {
|
||||
Storage,
|
||||
UploadResult,
|
||||
DownloadResult,
|
||||
ListResult,
|
||||
SignedUploadUrl,
|
||||
} from "../../../src/storage/types.js";
|
||||
|
||||
// ── Mock factories ────────────────────────────────────────────────
|
||||
|
||||
function createMockStorage(): Storage {
|
||||
const store = new Map<string, { body: Uint8Array; contentType: string }>();
|
||||
|
||||
return {
|
||||
async upload(opts: {
|
||||
key: string;
|
||||
body: Buffer | Uint8Array | ReadableStream<Uint8Array>;
|
||||
contentType: string;
|
||||
}): Promise<UploadResult> {
|
||||
let body: Uint8Array;
|
||||
if (opts.body instanceof Uint8Array) {
|
||||
body = opts.body;
|
||||
} else if (Buffer.isBuffer(opts.body)) {
|
||||
body = new Uint8Array(opts.body);
|
||||
} else {
|
||||
// ReadableStream
|
||||
const response = new Response(opts.body);
|
||||
body = new Uint8Array(await response.arrayBuffer());
|
||||
}
|
||||
store.set(opts.key, { body, contentType: opts.contentType });
|
||||
return { key: opts.key, url: `https://storage.test/${opts.key}`, size: body.length };
|
||||
},
|
||||
async download(key: string): Promise<DownloadResult> {
|
||||
const item = store.get(key);
|
||||
if (!item) throw new Error(`Not found: ${key}`);
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
start(controller) {
|
||||
controller.enqueue(item.body);
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
return { body: stream, contentType: item.contentType, size: item.body.length };
|
||||
},
|
||||
async delete(key: string): Promise<void> {
|
||||
store.delete(key);
|
||||
},
|
||||
async exists(key: string): Promise<boolean> {
|
||||
return store.has(key);
|
||||
},
|
||||
async list(): Promise<ListResult> {
|
||||
return { files: [] };
|
||||
},
|
||||
async getSignedUploadUrl(): Promise<SignedUploadUrl> {
|
||||
return {
|
||||
url: "https://test.com/upload",
|
||||
method: "PUT",
|
||||
headers: {},
|
||||
expiresAt: new Date().toISOString(),
|
||||
};
|
||||
},
|
||||
getPublicUrl(key: string): string {
|
||||
return `https://storage.test/${key}`;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createMockSandboxRunner(): SandboxRunner & {
|
||||
loadedPlugins: Array<{ manifest: PluginManifest; code: string }>;
|
||||
} {
|
||||
const loadedPlugins: Array<{ manifest: PluginManifest; code: string }> = [];
|
||||
|
||||
return {
|
||||
loadedPlugins,
|
||||
isAvailable(): boolean {
|
||||
return true;
|
||||
},
|
||||
async load(manifest: PluginManifest, code: string): Promise<SandboxedPlugin> {
|
||||
loadedPlugins.push({ manifest, code });
|
||||
return {
|
||||
id: manifest.id,
|
||||
manifest,
|
||||
async invokeHook() {
|
||||
return undefined;
|
||||
},
|
||||
async invokeRoute() {
|
||||
return undefined;
|
||||
},
|
||||
async terminate() {},
|
||||
};
|
||||
},
|
||||
async terminateAll() {},
|
||||
};
|
||||
}
|
||||
|
||||
const MARKETPLACE_URL = "https://marketplace.example.com";
|
||||
|
||||
function mockManifest(id = "test-seo", version = "1.0.0"): PluginManifest {
|
||||
return {
|
||||
id,
|
||||
version,
|
||||
capabilities: ["content:read"],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a gzipped tar bundle for use with mocked fetch.
|
||||
* Uses CompressionStream + minimal tar format.
|
||||
*/
|
||||
async function createMockBundle(manifest: PluginManifest): Promise<Uint8Array> {
|
||||
const encoder = new TextEncoder();
|
||||
const manifestJson = JSON.stringify(manifest);
|
||||
const backendCode = 'export default function() { return "hello"; }';
|
||||
|
||||
// Create simple tar
|
||||
const files = [
|
||||
{ name: "manifest.json", content: manifestJson },
|
||||
{ name: "backend.js", content: backendCode },
|
||||
];
|
||||
|
||||
const blocks: Uint8Array[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
const contentBytes = encoder.encode(file.content);
|
||||
const header = new Uint8Array(512);
|
||||
|
||||
// Name
|
||||
header.set(encoder.encode(file.name), 0);
|
||||
// Mode
|
||||
header.set(encoder.encode("0000644\0"), 100);
|
||||
// UID/GID
|
||||
header.set(encoder.encode("0000000\0"), 108);
|
||||
header.set(encoder.encode("0000000\0"), 116);
|
||||
// Size in octal
|
||||
const sizeOctal = contentBytes.length.toString(8).padStart(11, "0") + "\0";
|
||||
header.set(encoder.encode(sizeOctal), 124);
|
||||
// Mtime
|
||||
header.set(encoder.encode("00000000000\0"), 136);
|
||||
// Type = regular file
|
||||
header[156] = 0x30;
|
||||
// Checksum spaces
|
||||
header.set(encoder.encode(" "), 148);
|
||||
|
||||
let checksum = 0;
|
||||
for (let i = 0; i < 512; i++) checksum += header[i]!;
|
||||
header.set(encoder.encode(checksum.toString(8).padStart(6, "0") + "\0 "), 148);
|
||||
|
||||
blocks.push(header);
|
||||
|
||||
const paddedSize = Math.ceil(contentBytes.length / 512) * 512;
|
||||
const dataBlock = new Uint8Array(paddedSize);
|
||||
dataBlock.set(contentBytes, 0);
|
||||
blocks.push(dataBlock);
|
||||
}
|
||||
|
||||
blocks.push(new Uint8Array(1024)); // end-of-archive
|
||||
|
||||
const totalSize = blocks.reduce((sum, b) => sum + b.length, 0);
|
||||
const tar = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const block of blocks) {
|
||||
tar.set(block, offset);
|
||||
offset += block.length;
|
||||
}
|
||||
|
||||
// Gzip
|
||||
const cs = new CompressionStream("gzip");
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
const writePromise = writer.write(tar).then(() => writer.close());
|
||||
const chunks: Uint8Array[] = [];
|
||||
let totalLen = 0;
|
||||
|
||||
for (;;) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
totalLen += value.length;
|
||||
}
|
||||
await writePromise;
|
||||
|
||||
const result = new Uint8Array(totalLen);
|
||||
offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function mockPluginDetail(
|
||||
id = "test-seo",
|
||||
latestVersion = "1.0.0",
|
||||
checksum?: string,
|
||||
): MarketplacePluginDetail {
|
||||
return {
|
||||
id,
|
||||
name: "Test SEO",
|
||||
description: "SEO plugin",
|
||||
author: { name: "Test", verified: true, avatarUrl: null },
|
||||
capabilities: ["hooks"],
|
||||
keywords: [],
|
||||
installCount: 10,
|
||||
hasIcon: false,
|
||||
iconUrl: "",
|
||||
createdAt: "2026-01-01T00:00:00Z",
|
||||
updatedAt: "2026-02-01T00:00:00Z",
|
||||
repositoryUrl: null,
|
||||
homepageUrl: null,
|
||||
license: "MIT",
|
||||
latestVersion: {
|
||||
version: latestVersion,
|
||||
minEmDashVersion: null,
|
||||
bundleSize: 1234,
|
||||
checksum: checksum ?? "will-be-computed",
|
||||
changelog: null,
|
||||
readme: null,
|
||||
hasIcon: false,
|
||||
screenshotCount: 0,
|
||||
screenshotUrls: [],
|
||||
capabilities: ["hooks"],
|
||||
auditVerdict: "pass",
|
||||
imageAuditVerdict: "pass",
|
||||
publishedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe("Marketplace handlers", () => {
|
||||
let db: Kysely<DbSchema>;
|
||||
let sqliteDb: BetterSqlite3.Database;
|
||||
let storage: Storage;
|
||||
let sandboxRunner: ReturnType<typeof createMockSandboxRunner>;
|
||||
let fetchSpy: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(async () => {
|
||||
sqliteDb = new BetterSqlite3(":memory:");
|
||||
db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqliteDb }),
|
||||
});
|
||||
await runMigrations(db);
|
||||
|
||||
storage = createMockStorage();
|
||||
sandboxRunner = createMockSandboxRunner();
|
||||
fetchSpy = vi.fn();
|
||||
vi.stubGlobal("fetch", fetchSpy);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqliteDb.close();
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
// ── Install ────────────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceInstall", () => {
|
||||
it("returns error when marketplace not configured", async () => {
|
||||
const result = await handleMarketplaceInstall(db, storage, sandboxRunner, undefined, "test");
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
|
||||
});
|
||||
|
||||
it("returns error when storage not available", async () => {
|
||||
const result = await handleMarketplaceInstall(
|
||||
db,
|
||||
null,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test",
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("STORAGE_NOT_CONFIGURED");
|
||||
});
|
||||
|
||||
it("returns error when sandbox runner not available", async () => {
|
||||
const result = await handleMarketplaceInstall(db, storage, null, MARKETPLACE_URL, "test");
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("SANDBOX_NOT_AVAILABLE");
|
||||
});
|
||||
|
||||
it("successfully installs a marketplace plugin", async () => {
|
||||
const manifest = mockManifest("test-seo", "1.0.0");
|
||||
const bundleBytes = await createMockBundle(manifest);
|
||||
|
||||
// Mock: getPlugin detail — set checksum to undefined so the check is skipped
|
||||
const detail = mockPluginDetail("test-seo", "1.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
// Mock: downloadBundle
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
// Mock: reportInstall
|
||||
fetchSpy.mockResolvedValueOnce(new Response("OK", { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceInstall(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.pluginId).toBe("test-seo");
|
||||
expect(result.data?.version).toBe("1.0.0");
|
||||
expect(result.data?.capabilities).toEqual(["content:read"]);
|
||||
|
||||
// Verify state was written
|
||||
const repo = new PluginStateRepository(db);
|
||||
const state = await repo.get("test-seo");
|
||||
expect(state?.source).toBe("marketplace");
|
||||
expect(state?.marketplaceVersion).toBe("1.0.0");
|
||||
expect(state?.status).toBe("active");
|
||||
});
|
||||
|
||||
it("rejects install if plugin already installed", async () => {
|
||||
// Pre-install the plugin
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Mock: getPlugin detail (still needed — called before install check... actually, the existing check comes first)
|
||||
const result = await handleMarketplaceInstall(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("ALREADY_INSTALLED");
|
||||
});
|
||||
|
||||
it("rejects when manifest ID doesn't match requested plugin", async () => {
|
||||
const manifest = mockManifest("wrong-id", "1.0.0");
|
||||
const bundleBytes = await createMockBundle(manifest);
|
||||
|
||||
// Clear checksum so we reach the manifest check
|
||||
const detail = mockPluginDetail("test-seo", "1.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceInstall(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MANIFEST_MISMATCH");
|
||||
});
|
||||
|
||||
it("validates checksum against requested pinned version metadata", async () => {
|
||||
const manifest = mockManifest("test-seo", "1.0.0");
|
||||
const bundleBytes = await createMockBundle(manifest);
|
||||
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "different-checksum";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify({
|
||||
items: [
|
||||
{
|
||||
version: "1.0.0",
|
||||
minEmDashVersion: null,
|
||||
bundleSize: 1234,
|
||||
checksum: "",
|
||||
changelog: null,
|
||||
capabilities: ["hooks"],
|
||||
auditVerdict: "pass",
|
||||
imageAuditVerdict: "pass",
|
||||
publishedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
],
|
||||
}),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(new Response("OK", { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceInstall(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
{ version: "1.0.0" },
|
||||
);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Update ─────────────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceUpdate", () => {
|
||||
it("returns error when plugin not found", async () => {
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"nonexistent",
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
|
||||
it("returns error when plugin is not from marketplace", async () => {
|
||||
// Insert a config-sourced plugin
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("config-plugin", "1.0.0", "active");
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"config-plugin",
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
|
||||
it("returns error when already up to date", async () => {
|
||||
// Install v1.0.0
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Mock getPlugin returning same version
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(mockPluginDetail("test-seo", "1.0.0")), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("ALREADY_UP_TO_DATE");
|
||||
});
|
||||
|
||||
it("rejects update on checksum mismatch", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "expected-checksum";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
|
||||
const bundleBytes = await createMockBundle(mockManifest("test-seo", "2.0.0"));
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
{ confirmCapabilityChanges: true },
|
||||
);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("CHECKSUM_MISMATCH");
|
||||
});
|
||||
|
||||
it("rejects update when bundle manifest version mismatches target", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
|
||||
const wrongVersionManifest = mockManifest("test-seo", "9.9.9");
|
||||
const bundleBytes = await createMockBundle(wrongVersionManifest);
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
{ confirmCapabilityChanges: true },
|
||||
);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MANIFEST_VERSION_MISMATCH");
|
||||
});
|
||||
|
||||
it("requires confirmation for capability escalation", async () => {
|
||||
// Install v1.0.0 with only "hooks" capability
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Store old bundle in R2 (needed for capability diff)
|
||||
const oldManifest = mockManifest("test-seo", "1.0.0");
|
||||
const encoder = new TextEncoder();
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/manifest.json",
|
||||
body: encoder.encode(JSON.stringify(oldManifest)),
|
||||
contentType: "application/json",
|
||||
});
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/backend.js",
|
||||
body: encoder.encode("export default {};"),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
// New version has additional capability
|
||||
const newManifest = {
|
||||
...mockManifest("test-seo", "2.0.0"),
|
||||
capabilities: ["content:read", "network:request"],
|
||||
};
|
||||
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
|
||||
|
||||
// Mock getPlugin
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
// Mock downloadBundle
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("CAPABILITY_ESCALATION");
|
||||
expect(result.error?.details?.capabilityChanges).toBeDefined();
|
||||
});
|
||||
|
||||
it("succeeds with confirmCapabilityChanges flag", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Store old bundle
|
||||
const encoder = new TextEncoder();
|
||||
const oldManifest = mockManifest("test-seo", "1.0.0");
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/manifest.json",
|
||||
body: encoder.encode(JSON.stringify(oldManifest)),
|
||||
contentType: "application/json",
|
||||
});
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/backend.js",
|
||||
body: encoder.encode("export default {};"),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
const newManifest = {
|
||||
...mockManifest("test-seo", "2.0.0"),
|
||||
capabilities: ["content:read", "network:request"],
|
||||
};
|
||||
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
|
||||
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
{ confirmCapabilityChanges: true },
|
||||
);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.oldVersion).toBe("1.0.0");
|
||||
expect(result.data?.newVersion).toBe("2.0.0");
|
||||
expect(result.data?.capabilityChanges.added).toContain("network:request");
|
||||
});
|
||||
|
||||
it("treats deprecated → current capability rename as no change", async () => {
|
||||
// Installed version declared the legacy name; new version
|
||||
// declares the canonical name. diffCapabilities normalizes
|
||||
// both sides, so the diff should be empty — no spurious
|
||||
// "capability changed" prompt for a pure rename.
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
const oldManifest = {
|
||||
...mockManifest("test-seo", "1.0.0"),
|
||||
capabilities: ["read:content"],
|
||||
};
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/manifest.json",
|
||||
body: encoder.encode(JSON.stringify(oldManifest)),
|
||||
contentType: "application/json",
|
||||
});
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/backend.js",
|
||||
body: encoder.encode("export default {};"),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
const newManifest = {
|
||||
...mockManifest("test-seo", "2.0.0"),
|
||||
capabilities: ["content:read"],
|
||||
};
|
||||
const bundleBytes = await createMockBundle(newManifest as PluginManifest);
|
||||
|
||||
const detail = mockPluginDetail("test-seo", "2.0.0");
|
||||
detail.latestVersion!.checksum = "";
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(detail), { status: 200 }));
|
||||
fetchSpy.mockResolvedValueOnce(new Response(bundleBytes, { status: 200 }));
|
||||
|
||||
// No `confirmCapabilityChanges` — if the diff were non-empty,
|
||||
// this would fail with CAPABILITY_ESCALATION.
|
||||
const result = await handleMarketplaceUpdate(
|
||||
db,
|
||||
storage,
|
||||
sandboxRunner,
|
||||
MARKETPLACE_URL,
|
||||
"test-seo",
|
||||
);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.capabilityChanges.added).toEqual([]);
|
||||
expect(result.data?.capabilityChanges.removed).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Uninstall ──────────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceUninstall", () => {
|
||||
it("returns error when plugin not found", async () => {
|
||||
const result = await handleMarketplaceUninstall(db, storage, "nonexistent");
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
|
||||
it("returns error when plugin is not from marketplace", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("config-plugin", "1.0.0", "active");
|
||||
|
||||
const result = await handleMarketplaceUninstall(db, storage, "config-plugin");
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
|
||||
it("successfully uninstalls a marketplace plugin", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Store bundle files that should be cleaned up
|
||||
const encoder = new TextEncoder();
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/manifest.json",
|
||||
body: encoder.encode("{}"),
|
||||
contentType: "application/json",
|
||||
});
|
||||
await storage.upload({
|
||||
key: "marketplace/test-seo/1.0.0/backend.js",
|
||||
body: encoder.encode(""),
|
||||
contentType: "application/javascript",
|
||||
});
|
||||
|
||||
const result = await handleMarketplaceUninstall(db, storage, "test-seo");
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.pluginId).toBe("test-seo");
|
||||
expect(result.data?.dataDeleted).toBe(false);
|
||||
|
||||
// Verify state was deleted
|
||||
const state = await repo.get("test-seo");
|
||||
expect(state).toBeNull();
|
||||
});
|
||||
|
||||
it("deletes plugin storage data when deleteData=true", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Insert some plugin storage data
|
||||
await db
|
||||
.insertInto("_plugin_storage")
|
||||
.values({
|
||||
plugin_id: "test-seo",
|
||||
collection: "default",
|
||||
id: "test-key",
|
||||
data: JSON.stringify({ foo: "bar" }),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const result = await handleMarketplaceUninstall(db, storage, "test-seo", {
|
||||
deleteData: true,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.dataDeleted).toBe(true);
|
||||
|
||||
// Verify plugin storage data was deleted
|
||||
const storageRows = await db
|
||||
.selectFrom("_plugin_storage")
|
||||
.selectAll()
|
||||
.where("plugin_id", "=", "test-seo")
|
||||
.execute();
|
||||
expect(storageRows).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── Update check ───────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceUpdateCheck", () => {
|
||||
it("returns error when marketplace not configured", async () => {
|
||||
const result = await handleMarketplaceUpdateCheck(db, undefined);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
|
||||
});
|
||||
|
||||
it("returns empty items when no marketplace plugins installed", async () => {
|
||||
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toEqual([]);
|
||||
});
|
||||
|
||||
it("detects available updates", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// Mock getPlugin returning newer version
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(mockPluginDetail("test-seo", "2.0.0")), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toHaveLength(1);
|
||||
expect(result.data?.items[0]?.hasUpdate).toBe(true);
|
||||
expect(result.data?.items[0]?.installed).toBe("1.0.0");
|
||||
expect(result.data?.items[0]?.latest).toBe("2.0.0");
|
||||
});
|
||||
|
||||
it("reports no update when versions match", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(mockPluginDetail("test-seo", "1.0.0")), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items[0]?.hasUpdate).toBe(false);
|
||||
});
|
||||
|
||||
it("skips plugins that fail to check", async () => {
|
||||
const repo = new PluginStateRepository(db);
|
||||
await repo.upsert("test-seo", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
await repo.upsert("test-analytics", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
// First plugin check fails (404 — delisted)
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify({ error: "Not found" }), { status: 404 }),
|
||||
);
|
||||
// Second plugin check succeeds
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(mockPluginDetail("test-analytics", "2.0.0")), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceUpdateCheck(db, MARKETPLACE_URL);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
// Only the successful check should appear
|
||||
expect(result.data?.items).toHaveLength(1);
|
||||
expect(result.data?.items[0]?.pluginId).toBe("test-analytics");
|
||||
});
|
||||
});
|
||||
|
||||
// ── Search proxy ───────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceSearch", () => {
|
||||
it("returns error when marketplace not configured", async () => {
|
||||
const result = await handleMarketplaceSearch(undefined);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
|
||||
});
|
||||
|
||||
it("proxies search request to marketplace", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify({ items: [] }), { status: 200 }));
|
||||
|
||||
const result = await handleMarketplaceSearch(MARKETPLACE_URL, "seo");
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const [url] = fetchSpy.mock.calls[0]!;
|
||||
expect(url).toContain("/api/v1/plugins?q=seo");
|
||||
});
|
||||
});
|
||||
|
||||
// ── GetPlugin proxy ────────────────────────────────────────────
|
||||
|
||||
describe("handleMarketplaceGetPlugin", () => {
|
||||
it("returns error when marketplace not configured", async () => {
|
||||
const result = await handleMarketplaceGetPlugin(undefined, "test-seo");
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("MARKETPLACE_NOT_CONFIGURED");
|
||||
});
|
||||
|
||||
it("returns NOT_FOUND for missing plugin", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify({ error: "Not found" }), { status: 404 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceGetPlugin(MARKETPLACE_URL, "nonexistent");
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
|
||||
it("proxies plugin detail from marketplace", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(mockPluginDetail()), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await handleMarketplaceGetPlugin(MARKETPLACE_URL, "test-seo");
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
338
packages/core/tests/unit/api/openapi.test.ts
Normal file
338
packages/core/tests/unit/api/openapi.test.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { generateOpenApiDocument } from "../../../src/api/openapi/document.js";
|
||||
|
||||
describe("OpenAPI document generation", () => {
|
||||
it("generates a valid OpenAPI 3.1 document", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
|
||||
expect(doc.openapi).toBe("3.1.0");
|
||||
expect(doc.info.title).toBe("EmDash CMS API");
|
||||
expect(doc.info.version).toBe("0.1.0");
|
||||
});
|
||||
|
||||
it("includes content paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/publish");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/schedule");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/duplicate");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/compare");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/{id}/translations");
|
||||
expect(paths).toContain("/_emdash/api/content/{collection}/trash");
|
||||
});
|
||||
|
||||
it("includes media paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/media");
|
||||
expect(paths).toContain("/_emdash/api/media/{id}");
|
||||
expect(paths).toContain("/_emdash/api/media/upload-url");
|
||||
expect(paths).toContain("/_emdash/api/media/{id}/confirm");
|
||||
});
|
||||
|
||||
it("includes schema paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/schema/collections");
|
||||
expect(paths).toContain("/_emdash/api/schema/collections/{slug}");
|
||||
expect(paths).toContain("/_emdash/api/schema/collections/{slug}/fields");
|
||||
expect(paths).toContain("/_emdash/api/schema/collections/{slug}/fields/{fieldSlug}");
|
||||
expect(paths).toContain("/_emdash/api/schema/orphans");
|
||||
});
|
||||
|
||||
it("includes comments paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/comments/{collection}/{contentId}");
|
||||
expect(paths).toContain("/_emdash/api/admin/comments");
|
||||
expect(paths).toContain("/_emdash/api/admin/comments/counts");
|
||||
expect(paths).toContain("/_emdash/api/admin/comments/bulk");
|
||||
expect(paths).toContain("/_emdash/api/admin/comments/{id}");
|
||||
});
|
||||
|
||||
it("includes taxonomy paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/taxonomies");
|
||||
expect(paths).toContain("/_emdash/api/taxonomies/{name}/terms");
|
||||
expect(paths).toContain("/_emdash/api/taxonomies/{name}/terms/{slug}");
|
||||
});
|
||||
|
||||
it("includes menu paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/menus");
|
||||
expect(paths).toContain("/_emdash/api/menus/{name}");
|
||||
expect(paths).toContain("/_emdash/api/menus/{name}/items");
|
||||
expect(paths).toContain("/_emdash/api/menus/{name}/reorder");
|
||||
});
|
||||
|
||||
it("includes section paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/sections");
|
||||
expect(paths).toContain("/_emdash/api/sections/{slug}");
|
||||
});
|
||||
|
||||
it("includes widget paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/widget-areas");
|
||||
expect(paths).toContain("/_emdash/api/widget-areas/{name}");
|
||||
expect(paths).toContain("/_emdash/api/widget-areas/{name}/widgets");
|
||||
expect(paths).toContain("/_emdash/api/widget-areas/{name}/widgets/{id}");
|
||||
expect(paths).toContain("/_emdash/api/widget-areas/{name}/reorder");
|
||||
});
|
||||
|
||||
it("includes settings paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/settings");
|
||||
});
|
||||
|
||||
it("includes search paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/search");
|
||||
expect(paths).toContain("/_emdash/api/search/suggest");
|
||||
expect(paths).toContain("/_emdash/api/search/rebuild");
|
||||
expect(paths).toContain("/_emdash/api/search/enable");
|
||||
expect(paths).toContain("/_emdash/api/search/stats");
|
||||
});
|
||||
|
||||
it("includes redirect paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/redirects");
|
||||
expect(paths).toContain("/_emdash/api/redirects/{id}");
|
||||
expect(paths).toContain("/_emdash/api/redirects/404s");
|
||||
expect(paths).toContain("/_emdash/api/redirects/404s/summary");
|
||||
});
|
||||
|
||||
it("includes user paths", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const paths = Object.keys(doc.paths ?? {});
|
||||
|
||||
expect(paths).toContain("/_emdash/api/admin/users");
|
||||
expect(paths).toContain("/_emdash/api/admin/users/{id}");
|
||||
expect(paths).toContain("/_emdash/api/admin/users/{id}/disable");
|
||||
expect(paths).toContain("/_emdash/api/admin/users/{id}/enable");
|
||||
expect(paths).toContain("/_emdash/api/admin/allowed-domains");
|
||||
expect(paths).toContain("/_emdash/api/admin/allowed-domains/{domain}");
|
||||
});
|
||||
|
||||
it("has correct HTTP methods on content collection endpoint", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const collectionPath = doc.paths?.["/_emdash/api/content/{collection}"];
|
||||
|
||||
expect(collectionPath).toBeDefined();
|
||||
expect(collectionPath).toHaveProperty("get");
|
||||
expect(collectionPath).toHaveProperty("post");
|
||||
});
|
||||
|
||||
it("has correct HTTP methods on content item endpoint", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const itemPath = doc.paths?.["/_emdash/api/content/{collection}/{id}"];
|
||||
|
||||
expect(itemPath).toBeDefined();
|
||||
expect(itemPath).toHaveProperty("get");
|
||||
expect(itemPath).toHaveProperty("put");
|
||||
expect(itemPath).toHaveProperty("delete");
|
||||
});
|
||||
|
||||
it("generates unique operation IDs for all operations", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const operationIds: string[] = [];
|
||||
|
||||
for (const pathItem of Object.values(doc.paths ?? {})) {
|
||||
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
|
||||
const op = (pathItem as Record<string, unknown>)?.[method] as
|
||||
| { operationId?: string }
|
||||
| undefined;
|
||||
if (op?.operationId) {
|
||||
operationIds.push(op.operationId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Content operations
|
||||
expect(operationIds).toContain("listContent");
|
||||
expect(operationIds).toContain("createContent");
|
||||
expect(operationIds).toContain("getContent");
|
||||
expect(operationIds).toContain("updateContent");
|
||||
expect(operationIds).toContain("deleteContent");
|
||||
expect(operationIds).toContain("publishContent");
|
||||
expect(operationIds).toContain("duplicateContent");
|
||||
|
||||
// Media operations
|
||||
expect(operationIds).toContain("listMedia");
|
||||
expect(operationIds).toContain("getMedia");
|
||||
expect(operationIds).toContain("deleteMedia");
|
||||
expect(operationIds).toContain("getMediaUploadUrl");
|
||||
|
||||
// Schema operations
|
||||
expect(operationIds).toContain("listCollections");
|
||||
expect(operationIds).toContain("createCollection");
|
||||
expect(operationIds).toContain("listFields");
|
||||
expect(operationIds).toContain("createField");
|
||||
|
||||
// Comments operations
|
||||
expect(operationIds).toContain("listPublicComments");
|
||||
expect(operationIds).toContain("createComment");
|
||||
expect(operationIds).toContain("listAdminComments");
|
||||
expect(operationIds).toContain("bulkCommentAction");
|
||||
|
||||
// Taxonomy operations
|
||||
expect(operationIds).toContain("listTaxonomies");
|
||||
expect(operationIds).toContain("listTerms");
|
||||
expect(operationIds).toContain("createTerm");
|
||||
|
||||
// Menu operations
|
||||
expect(operationIds).toContain("listMenus");
|
||||
expect(operationIds).toContain("createMenu");
|
||||
expect(operationIds).toContain("createMenuItem");
|
||||
|
||||
// Section operations
|
||||
expect(operationIds).toContain("listSections");
|
||||
expect(operationIds).toContain("createSection");
|
||||
|
||||
// Widget operations
|
||||
expect(operationIds).toContain("listWidgetAreas");
|
||||
expect(operationIds).toContain("createWidget");
|
||||
|
||||
// Settings operations
|
||||
expect(operationIds).toContain("getSettings");
|
||||
expect(operationIds).toContain("updateSettings");
|
||||
|
||||
// Search operations
|
||||
expect(operationIds).toContain("search");
|
||||
expect(operationIds).toContain("rebuildSearchIndex");
|
||||
|
||||
// Redirect operations
|
||||
expect(operationIds).toContain("listRedirects");
|
||||
expect(operationIds).toContain("createRedirect");
|
||||
expect(operationIds).toContain("listNotFoundEntries");
|
||||
|
||||
// User operations
|
||||
expect(operationIds).toContain("listUsers");
|
||||
expect(operationIds).toContain("getUser");
|
||||
expect(operationIds).toContain("disableUser");
|
||||
|
||||
// No duplicate operation IDs
|
||||
const uniqueIds = new Set(operationIds);
|
||||
expect(uniqueIds.size).toBe(operationIds.length);
|
||||
});
|
||||
|
||||
it("includes reusable component schemas", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const schemas = doc.components?.schemas ?? {};
|
||||
|
||||
// Content schemas
|
||||
expect(schemas).toHaveProperty("ContentCreateBody");
|
||||
expect(schemas).toHaveProperty("ContentUpdateBody");
|
||||
expect(schemas).toHaveProperty("ContentItem");
|
||||
expect(schemas).toHaveProperty("ContentResponse");
|
||||
expect(schemas).toHaveProperty("ContentListResponse");
|
||||
|
||||
// Media schemas
|
||||
expect(schemas).toHaveProperty("MediaItem");
|
||||
expect(schemas).toHaveProperty("MediaListResponse");
|
||||
|
||||
// Schema schemas
|
||||
expect(schemas).toHaveProperty("Collection");
|
||||
expect(schemas).toHaveProperty("CollectionListResponse");
|
||||
|
||||
// Comment schemas
|
||||
expect(schemas).toHaveProperty("PublicComment");
|
||||
expect(schemas).toHaveProperty("Comment");
|
||||
expect(schemas).toHaveProperty("CommentBulkBody");
|
||||
|
||||
// Taxonomy schemas
|
||||
expect(schemas).toHaveProperty("Term");
|
||||
expect(schemas).toHaveProperty("TermListResponse");
|
||||
|
||||
// Menu schemas
|
||||
expect(schemas).toHaveProperty("MenuWithItems");
|
||||
|
||||
// User schemas
|
||||
expect(schemas).toHaveProperty("User");
|
||||
expect(schemas).toHaveProperty("UserListResponse");
|
||||
});
|
||||
|
||||
it("wraps success responses in { data } envelope", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const listPath = doc.paths?.["/_emdash/api/content/{collection}"];
|
||||
const getResponse = (listPath as Record<string, unknown>)?.get as {
|
||||
responses: Record<string, { content: Record<string, { schema: Record<string, unknown> }> }>;
|
||||
};
|
||||
const schema = getResponse?.responses?.["200"]?.content?.["application/json"]?.schema;
|
||||
|
||||
expect(schema).toBeDefined();
|
||||
// The envelope should have a "data" property
|
||||
expect(schema).toHaveProperty("properties");
|
||||
const props = (schema as Record<string, unknown>).properties as Record<string, unknown>;
|
||||
expect(props).toHaveProperty("data");
|
||||
});
|
||||
|
||||
it("includes error response schemas", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const listPath = doc.paths?.["/_emdash/api/content/{collection}"];
|
||||
const getOp = (listPath as Record<string, unknown>)?.get as {
|
||||
responses: Record<string, unknown>;
|
||||
};
|
||||
|
||||
// Should have auth error responses
|
||||
expect(getOp?.responses).toHaveProperty("401");
|
||||
expect(getOp?.responses).toHaveProperty("403");
|
||||
});
|
||||
|
||||
it("includes security schemes", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const schemes = doc.components?.securitySchemes;
|
||||
|
||||
expect(schemes).toHaveProperty("session");
|
||||
expect(schemes).toHaveProperty("bearer");
|
||||
});
|
||||
|
||||
it("tags all 12 domains", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const tagNames = (doc.tags ?? []).map((t: { name: string }) => t.name);
|
||||
|
||||
expect(tagNames).toContain("Content");
|
||||
expect(tagNames).toContain("Media");
|
||||
expect(tagNames).toContain("Schema");
|
||||
expect(tagNames).toContain("Comments");
|
||||
expect(tagNames).toContain("Taxonomies");
|
||||
expect(tagNames).toContain("Menus");
|
||||
expect(tagNames).toContain("Sections");
|
||||
expect(tagNames).toContain("Widgets");
|
||||
expect(tagNames).toContain("Settings");
|
||||
expect(tagNames).toContain("Search");
|
||||
expect(tagNames).toContain("Redirects");
|
||||
expect(tagNames).toContain("Users");
|
||||
expect(tagNames).toHaveLength(12);
|
||||
});
|
||||
|
||||
it("produces valid JSON output", () => {
|
||||
const doc = generateOpenApiDocument();
|
||||
const json = JSON.stringify(doc);
|
||||
|
||||
// Should not throw
|
||||
const parsed = JSON.parse(json);
|
||||
expect(parsed.openapi).toBe("3.1.0");
|
||||
});
|
||||
});
|
||||
122
packages/core/tests/unit/api/ownership-extraction.test.ts
Normal file
122
packages/core/tests/unit/api/ownership-extraction.test.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
/**
|
||||
* Tests for SEC-07: ownership extraction bugs (#12, #13, #14, #16)
|
||||
*
|
||||
* Verifies that handler response shapes carry authorId correctly
|
||||
* and that ownership-related operations work as expected.
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
handleContentCreate,
|
||||
handleContentGet,
|
||||
handleContentGetIncludingTrashed,
|
||||
handleContentDelete,
|
||||
handleContentDuplicate,
|
||||
handleMediaCreate,
|
||||
} from "../../../src/api/index.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("SEC-07: Ownership extraction", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("#12: handleContentGet returns authorId inside data.item", () => {
|
||||
it("should expose authorId at data.item level, not data level", async () => {
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Owned Post" },
|
||||
authorId: "user_author_123",
|
||||
});
|
||||
expect(created.success).toBe(true);
|
||||
|
||||
const result = await handleContentGet(db, "post", created.data!.item.id);
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
// The route pattern extracts: existing.data.item.authorId
|
||||
// If authorId were only on data (wrong), ownership checks would always fail
|
||||
const data = result.data as Record<string, unknown>;
|
||||
const item = data.item as Record<string, unknown>;
|
||||
|
||||
expect(item.authorId).toBe("user_author_123");
|
||||
// data level should NOT have authorId directly
|
||||
expect(data.authorId).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should expose authorId at data.item level for trashed items", async () => {
|
||||
const created = await handleContentCreate(db, "post", {
|
||||
data: { title: "Trashed Post" },
|
||||
authorId: "user_trash_owner",
|
||||
});
|
||||
expect(created.success).toBe(true);
|
||||
await handleContentDelete(db, "post", created.data!.item.id);
|
||||
|
||||
const result = await handleContentGetIncludingTrashed(db, "post", created.data!.item.id);
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const data = result.data as Record<string, unknown>;
|
||||
const item = data.item as Record<string, unknown>;
|
||||
|
||||
expect(item.authorId).toBe("user_trash_owner");
|
||||
expect(data.authorId).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("#14: handleContentDuplicate uses caller's authorId", () => {
|
||||
it("should set the duplicate's authorId to the provided caller ID", async () => {
|
||||
const original = await handleContentCreate(db, "post", {
|
||||
data: { title: "Original Post" },
|
||||
authorId: "original_author",
|
||||
});
|
||||
expect(original.success).toBe(true);
|
||||
|
||||
// Duplicate as a different user
|
||||
const dup = await handleContentDuplicate(db, "post", original.data!.item.id, "caller_user");
|
||||
expect(dup.success).toBe(true);
|
||||
expect(dup.data?.item.authorId).toBe("caller_user");
|
||||
});
|
||||
|
||||
it("should fall back to original authorId when caller ID not provided", async () => {
|
||||
const original = await handleContentCreate(db, "post", {
|
||||
data: { title: "Fallback Post" },
|
||||
authorId: "original_author",
|
||||
});
|
||||
expect(original.success).toBe(true);
|
||||
|
||||
const dup = await handleContentDuplicate(db, "post", original.data!.item.id);
|
||||
expect(dup.success).toBe(true);
|
||||
expect(dup.data?.item.authorId).toBe("original_author");
|
||||
});
|
||||
});
|
||||
|
||||
describe("#16: handleMediaCreate persists authorId", () => {
|
||||
it("should store authorId on created media item", async () => {
|
||||
const result = await handleMediaCreate(db, {
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: "test_key_123.jpg",
|
||||
authorId: "media_uploader",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.authorId).toBe("media_uploader");
|
||||
});
|
||||
|
||||
it("should set authorId to null when not provided", async () => {
|
||||
const result = await handleMediaCreate(db, {
|
||||
filename: "orphan.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: "test_key_orphan.jpg",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.authorId).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
170
packages/core/tests/unit/api/public-url.test.ts
Normal file
170
packages/core/tests/unit/api/public-url.test.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { afterEach, beforeEach, describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
getPublicOrigin,
|
||||
getPublicUrl,
|
||||
getEnvAllowedOrigins,
|
||||
_resetEnvCache,
|
||||
} from "../../../src/api/public-url.js";
|
||||
import type { EmDashConfig } from "../../../src/astro/integration/runtime.js";
|
||||
|
||||
// Snapshot env vars we'll mutate, and restore after every test.
|
||||
const origEmdashSiteUrl = process.env.EMDASH_SITE_URL;
|
||||
const origSiteUrl = process.env.SITE_URL;
|
||||
const origAllowedOrigins = process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
|
||||
afterEach(() => {
|
||||
_resetEnvCache();
|
||||
// Restore original env state (delete if originally absent)
|
||||
if (origEmdashSiteUrl === undefined) delete process.env.EMDASH_SITE_URL;
|
||||
else process.env.EMDASH_SITE_URL = origEmdashSiteUrl;
|
||||
if (origSiteUrl === undefined) delete process.env.SITE_URL;
|
||||
else process.env.SITE_URL = origSiteUrl;
|
||||
if (origAllowedOrigins === undefined) delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
else process.env.EMDASH_ALLOWED_ORIGINS = origAllowedOrigins;
|
||||
});
|
||||
|
||||
// Ensure clean state before every test (no cache, no test env vars).
|
||||
beforeEach(() => {
|
||||
_resetEnvCache();
|
||||
delete process.env.EMDASH_SITE_URL;
|
||||
delete process.env.SITE_URL;
|
||||
delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
});
|
||||
|
||||
describe("getPublicOrigin()", () => {
|
||||
it("returns config.siteUrl when set", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config: EmDashConfig = { siteUrl: "https://mysite.example.com" };
|
||||
expect(getPublicOrigin(url, config)).toBe("https://mysite.example.com");
|
||||
});
|
||||
|
||||
it("returns url.origin when config has no siteUrl", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config: EmDashConfig = {};
|
||||
expect(getPublicOrigin(url, config)).toBe("http://localhost:4321");
|
||||
});
|
||||
|
||||
it("returns url.origin when config is undefined", () => {
|
||||
const url = new URL("https://example.com:8443/setup");
|
||||
expect(getPublicOrigin(url)).toBe("https://example.com:8443");
|
||||
});
|
||||
|
||||
it("returns url.origin when config.siteUrl is undefined", () => {
|
||||
const url = new URL("http://127.0.0.1:4321/api");
|
||||
expect(getPublicOrigin(url, { siteUrl: undefined })).toBe("http://127.0.0.1:4321");
|
||||
});
|
||||
|
||||
it("does not return empty string siteUrl (falsy)", () => {
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
// Empty string should fall through to url.origin
|
||||
expect(getPublicOrigin(url, { siteUrl: "" })).toBe("http://localhost:4321");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPublicOrigin() env var fallback", () => {
|
||||
it("falls back to EMDASH_SITE_URL when config has no siteUrl", () => {
|
||||
process.env.EMDASH_SITE_URL = "https://env.example.com";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("https://env.example.com");
|
||||
});
|
||||
|
||||
it("falls back to SITE_URL when EMDASH_SITE_URL is absent", () => {
|
||||
process.env.SITE_URL = "https://site-url.example.com";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("https://site-url.example.com");
|
||||
});
|
||||
|
||||
it("prefers EMDASH_SITE_URL over SITE_URL", () => {
|
||||
process.env.EMDASH_SITE_URL = "https://emdash.example.com";
|
||||
process.env.SITE_URL = "https://site.example.com";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("https://emdash.example.com");
|
||||
});
|
||||
|
||||
it("normalizes env var to origin (strips path)", () => {
|
||||
process.env.EMDASH_SITE_URL = "https://env.example.com/some/path";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("https://env.example.com");
|
||||
});
|
||||
|
||||
it("falls through to url.origin when env var is invalid URL", () => {
|
||||
process.env.EMDASH_SITE_URL = "not-a-url";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("http://localhost:4321");
|
||||
});
|
||||
|
||||
it("config.siteUrl takes precedence over env var", () => {
|
||||
process.env.EMDASH_SITE_URL = "https://env.example.com";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
const config: EmDashConfig = { siteUrl: "https://config.example.com" };
|
||||
expect(getPublicOrigin(url, config)).toBe("https://config.example.com");
|
||||
});
|
||||
|
||||
it("cache is invalidated by _resetEnvCache()", () => {
|
||||
process.env.EMDASH_SITE_URL = "https://first.example.com";
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicOrigin(url, {})).toBe("https://first.example.com");
|
||||
|
||||
_resetEnvCache();
|
||||
process.env.EMDASH_SITE_URL = "https://second.example.com";
|
||||
expect(getPublicOrigin(url, {})).toBe("https://second.example.com");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getEnvAllowedOrigins()", () => {
|
||||
it("returns [] when EMDASH_ALLOWED_ORIGINS is unset", () => {
|
||||
expect(getEnvAllowedOrigins()).toEqual([]);
|
||||
});
|
||||
|
||||
it("parses a comma-separated list into origins", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://example.com,https://preview.example.com";
|
||||
expect(getEnvAllowedOrigins()).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("trims whitespace around each entry", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = " https://example.com , https://preview.example.com ";
|
||||
expect(getEnvAllowedOrigins()).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("normalizes each entry to its origin (strips path/query)", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://example.com/x?y=1";
|
||||
expect(getEnvAllowedOrigins()).toEqual(["https://example.com"]);
|
||||
});
|
||||
|
||||
it("throws on entries with non-http(s) protocols", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "file:///etc/passwd,https://example.com";
|
||||
expect(() => getEnvAllowedOrigins()).toThrow(/EMDASH_ALLOWED_ORIGINS.*must be http or https/);
|
||||
});
|
||||
|
||||
it("throws on unparseable entries", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "not-a-url,https://example.com";
|
||||
expect(() => getEnvAllowedOrigins()).toThrow(/EMDASH_ALLOWED_ORIGINS.*invalid URL/);
|
||||
});
|
||||
|
||||
it("cache is invalidated by _resetEnvCache()", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://first.example.com";
|
||||
expect(getEnvAllowedOrigins()).toEqual(["https://first.example.com"]);
|
||||
|
||||
_resetEnvCache();
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://second.example.com";
|
||||
expect(getEnvAllowedOrigins()).toEqual(["https://second.example.com"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPublicUrl()", () => {
|
||||
it("builds full URL from siteUrl + path", () => {
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
const config: EmDashConfig = { siteUrl: "https://mysite.example.com" };
|
||||
expect(getPublicUrl(url, config, "/_emdash/admin/login")).toBe(
|
||||
"https://mysite.example.com/_emdash/admin/login",
|
||||
);
|
||||
});
|
||||
|
||||
it("builds full URL from request origin when no siteUrl", () => {
|
||||
const url = new URL("http://localhost:4321/x");
|
||||
expect(getPublicUrl(url, undefined, "/_emdash/admin/login")).toBe(
|
||||
"http://localhost:4321/_emdash/admin/login",
|
||||
);
|
||||
});
|
||||
});
|
||||
35
packages/core/tests/unit/api/redirect.test.ts
Normal file
35
packages/core/tests/unit/api/redirect.test.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { isSafeRedirect } from "#api/redirect.js";
|
||||
|
||||
describe("isSafeRedirect", () => {
|
||||
it("accepts simple relative paths", () => {
|
||||
expect(isSafeRedirect("/")).toBe(true);
|
||||
expect(isSafeRedirect("/admin")).toBe(true);
|
||||
expect(isSafeRedirect("/_emdash/admin")).toBe(true);
|
||||
expect(isSafeRedirect("/foo/bar?baz=1")).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects protocol-relative URLs (double slash)", () => {
|
||||
expect(isSafeRedirect("//evil.com")).toBe(false);
|
||||
expect(isSafeRedirect("//evil.com/path")).toBe(false);
|
||||
});
|
||||
|
||||
it("rejects backslash bypass (/\\evil.com normalizes to //evil.com)", () => {
|
||||
expect(isSafeRedirect("/\\evil.com")).toBe(false);
|
||||
expect(isSafeRedirect("/foo\\bar")).toBe(false);
|
||||
expect(isSafeRedirect("\\evil.com")).toBe(false);
|
||||
});
|
||||
|
||||
it("rejects URLs that do not start with /", () => {
|
||||
expect(isSafeRedirect("https://evil.com")).toBe(false);
|
||||
expect(isSafeRedirect("http://evil.com")).toBe(false);
|
||||
expect(isSafeRedirect("evil.com")).toBe(false);
|
||||
expect(isSafeRedirect("")).toBe(false);
|
||||
});
|
||||
|
||||
it("rejects null and undefined", () => {
|
||||
expect(isSafeRedirect(null)).toBe(false);
|
||||
expect(isSafeRedirect(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
133
packages/core/tests/unit/api/rev.test.ts
Normal file
133
packages/core/tests/unit/api/rev.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
/**
|
||||
* Unit tests for _rev token generation and validation.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { encodeRev, decodeRev, validateRev } from "../../../src/api/rev.js";
|
||||
import type { ContentItem } from "../../../src/database/repositories/types.js";
|
||||
|
||||
function makeItem(overrides: Partial<ContentItem> = {}): ContentItem {
|
||||
return {
|
||||
id: "item_1",
|
||||
type: "posts",
|
||||
slug: "test",
|
||||
status: "draft",
|
||||
data: {},
|
||||
authorId: null,
|
||||
createdAt: "2026-01-01T00:00:00.000Z",
|
||||
updatedAt: "2026-01-15T12:30:00.000Z",
|
||||
publishedAt: null,
|
||||
scheduledAt: null,
|
||||
liveRevisionId: null,
|
||||
draftRevisionId: null,
|
||||
version: 3,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("encodeRev", () => {
|
||||
it("produces a base64-encoded string", () => {
|
||||
const item = makeItem();
|
||||
const rev = encodeRev(item);
|
||||
|
||||
expect(rev).toBeTruthy();
|
||||
// Should be valid base64
|
||||
expect(() => atob(rev)).not.toThrow();
|
||||
});
|
||||
|
||||
it("encodes version and updatedAt", () => {
|
||||
const item = makeItem({ version: 5, updatedAt: "2026-02-14T10:00:00.000Z" });
|
||||
const rev = encodeRev(item);
|
||||
const decoded = atob(rev);
|
||||
|
||||
expect(decoded).toBe("5:2026-02-14T10:00:00.000Z");
|
||||
});
|
||||
|
||||
it("produces different revs for different versions", () => {
|
||||
const rev1 = encodeRev(makeItem({ version: 1 }));
|
||||
const rev2 = encodeRev(makeItem({ version: 2 }));
|
||||
expect(rev1).not.toBe(rev2);
|
||||
});
|
||||
|
||||
it("produces different revs for different updatedAt", () => {
|
||||
const rev1 = encodeRev(makeItem({ updatedAt: "2026-01-01T00:00:00.000Z" }));
|
||||
const rev2 = encodeRev(makeItem({ updatedAt: "2026-01-02T00:00:00.000Z" }));
|
||||
expect(rev1).not.toBe(rev2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("decodeRev", () => {
|
||||
it("decodes a valid rev", () => {
|
||||
const rev = btoa("5:2026-02-14T10:00:00.000Z");
|
||||
const result = decodeRev(rev);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.version).toBe(5);
|
||||
expect(result!.updatedAt).toBe("2026-02-14T10:00:00.000Z");
|
||||
});
|
||||
|
||||
it("returns null for invalid base64", () => {
|
||||
expect(decodeRev("not-valid-base64!!!")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for missing colon", () => {
|
||||
expect(decodeRev(btoa("nocolon"))).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for non-numeric version", () => {
|
||||
expect(decodeRev(btoa("abc:2026-01-01"))).toBeNull();
|
||||
});
|
||||
|
||||
it("round-trips with encodeRev", () => {
|
||||
const item = makeItem({ version: 7, updatedAt: "2026-03-01T08:15:30.000Z" });
|
||||
const rev = encodeRev(item);
|
||||
const decoded = decodeRev(rev);
|
||||
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded!.version).toBe(7);
|
||||
expect(decoded!.updatedAt).toBe("2026-03-01T08:15:30.000Z");
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateRev", () => {
|
||||
it("returns valid when no rev is provided", () => {
|
||||
const result = validateRev(undefined, makeItem());
|
||||
expect(result.valid).toBe(true);
|
||||
});
|
||||
|
||||
it("returns valid when rev matches", () => {
|
||||
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
|
||||
const rev = encodeRev(item);
|
||||
|
||||
const result = validateRev(rev, item);
|
||||
expect(result.valid).toBe(true);
|
||||
});
|
||||
|
||||
it("returns invalid when version mismatches", () => {
|
||||
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
|
||||
const staleRev = btoa("2:2026-01-15T12:30:00.000Z"); // Version 2, but item is at 3
|
||||
|
||||
const result = validateRev(staleRev, item);
|
||||
expect(result.valid).toBe(false);
|
||||
if (!result.valid) {
|
||||
expect(result.message).toContain("modified");
|
||||
}
|
||||
});
|
||||
|
||||
it("returns invalid when updatedAt mismatches", () => {
|
||||
const item = makeItem({ version: 3, updatedAt: "2026-01-15T12:30:00.000Z" });
|
||||
const staleRev = btoa("3:2026-01-14T00:00:00.000Z"); // Right version, wrong timestamp
|
||||
|
||||
const result = validateRev(staleRev, item);
|
||||
expect(result.valid).toBe(false);
|
||||
});
|
||||
|
||||
it("returns invalid for malformed rev", () => {
|
||||
const result = validateRev("garbage", makeItem());
|
||||
expect(result.valid).toBe(false);
|
||||
if (!result.valid) {
|
||||
expect(result.message).toContain("Malformed");
|
||||
}
|
||||
});
|
||||
});
|
||||
230
packages/core/tests/unit/api/revision-handlers.test.ts
Normal file
230
packages/core/tests/unit/api/revision-handlers.test.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
handleRevisionList,
|
||||
handleRevisionGet,
|
||||
handleRevisionRestore,
|
||||
} from "../../../src/api/index.js";
|
||||
import { ContentRepository } from "../../../src/database/repositories/content.js";
|
||||
import { RevisionRepository } from "../../../src/database/repositories/revision.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { createPostFixture } from "../../utils/fixtures.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Revision Handlers", () => {
|
||||
let db: Kysely<Database>;
|
||||
let contentRepo: ContentRepository;
|
||||
let revisionRepo: RevisionRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
contentRepo = new ContentRepository(db);
|
||||
revisionRepo = new RevisionRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("handleRevisionList", () => {
|
||||
it("should return empty list when no revisions exist", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
|
||||
const result = await handleRevisionList(db, "post", content.id, {});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toEqual([]);
|
||||
expect(result.data?.total).toBe(0);
|
||||
});
|
||||
|
||||
it("should return revisions for a content entry", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
|
||||
// Create some revisions with small delay to ensure distinct ULIDs
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "Version 1", content: "First version" },
|
||||
});
|
||||
// Small delay to ensure ULID timestamp differs
|
||||
await new Promise((resolve) => setTimeout(resolve, 2));
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "Version 2", content: "Second version" },
|
||||
});
|
||||
|
||||
const result = await handleRevisionList(db, "post", content.id, {});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toHaveLength(2);
|
||||
expect(result.data?.total).toBe(2);
|
||||
// Should be newest first
|
||||
expect(result.data?.items[0].data.title).toBe("Version 2");
|
||||
expect(result.data?.items[1].data.title).toBe("Version 1");
|
||||
});
|
||||
|
||||
it("should respect limit parameter", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
|
||||
// Create 5 revisions
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: `Version ${i}` },
|
||||
});
|
||||
}
|
||||
|
||||
const result = await handleRevisionList(db, "post", content.id, {
|
||||
limit: 3,
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toHaveLength(3);
|
||||
expect(result.data?.total).toBe(5); // Total still reflects all revisions
|
||||
});
|
||||
|
||||
it("should not return revisions from other entries", async () => {
|
||||
const content1 = await contentRepo.create(createPostFixture());
|
||||
const content2 = await contentRepo.create({
|
||||
...createPostFixture(),
|
||||
slug: "another-post",
|
||||
});
|
||||
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content1.id,
|
||||
data: { title: "Content 1 revision" },
|
||||
});
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content2.id,
|
||||
data: { title: "Content 2 revision" },
|
||||
});
|
||||
|
||||
const result = await handleRevisionList(db, "post", content1.id, {});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.items).toHaveLength(1);
|
||||
expect(result.data?.items[0].data.title).toBe("Content 1 revision");
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleRevisionGet", () => {
|
||||
it("should return a revision by ID", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "Test Revision" },
|
||||
});
|
||||
|
||||
const result = await handleRevisionGet(db, revision.id);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.id).toBe(revision.id);
|
||||
expect(result.data?.item.data.title).toBe("Test Revision");
|
||||
});
|
||||
|
||||
it("should return NOT_FOUND for non-existent revision", async () => {
|
||||
const result = await handleRevisionGet(db, "nonexistent-id");
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleRevisionRestore", () => {
|
||||
const callerUserId = "user_caller_123";
|
||||
|
||||
it("should restore content to a previous revision", async () => {
|
||||
const content = await contentRepo.create({
|
||||
...createPostFixture(),
|
||||
data: { title: "Original", content: "Original content" },
|
||||
});
|
||||
|
||||
// Create a revision with the original state
|
||||
const originalRevision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "Original", content: "Original content" },
|
||||
});
|
||||
|
||||
// Update the content
|
||||
await contentRepo.update("post", content.id, {
|
||||
data: { title: "Updated", content: "Updated content" },
|
||||
});
|
||||
|
||||
// Restore to original revision
|
||||
const result = await handleRevisionRestore(db, originalRevision.id, callerUserId);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.data.title).toBe("Original");
|
||||
expect(result.data?.item.data.content).toBe("Original content");
|
||||
});
|
||||
|
||||
it("should create a new revision when restoring", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "To restore" },
|
||||
});
|
||||
|
||||
const beforeCount = await revisionRepo.countByEntry("post", content.id);
|
||||
|
||||
await handleRevisionRestore(db, revision.id, callerUserId);
|
||||
|
||||
const afterCount = await revisionRepo.countByEntry("post", content.id);
|
||||
expect(afterCount).toBe(beforeCount + 1);
|
||||
});
|
||||
|
||||
it("should attribute the new revision to the caller", async () => {
|
||||
const content = await contentRepo.create(createPostFixture());
|
||||
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "To restore" },
|
||||
authorId: "original_author",
|
||||
});
|
||||
|
||||
await handleRevisionRestore(db, revision.id, callerUserId);
|
||||
|
||||
// The newest revision (restore record) should be attributed to the caller
|
||||
const latestRevision = await revisionRepo.findLatest("post", content.id);
|
||||
expect(latestRevision).not.toBeNull();
|
||||
expect(latestRevision!.authorId).toBe(callerUserId);
|
||||
});
|
||||
|
||||
it("should handle revision data containing _slug", async () => {
|
||||
const content = await contentRepo.create({
|
||||
...createPostFixture(),
|
||||
data: { title: "Original" },
|
||||
});
|
||||
|
||||
// Revision data includes _slug (added by runtime when slug changes)
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: content.id,
|
||||
data: { title: "With slug change", _slug: "new-slug" },
|
||||
});
|
||||
|
||||
const result = await handleRevisionRestore(db, revision.id, callerUserId);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.item.data.title).toBe("With slug change");
|
||||
expect(result.data?.item.slug).toBe("new-slug");
|
||||
});
|
||||
|
||||
it("should return NOT_FOUND for non-existent revision", async () => {
|
||||
const result = await handleRevisionRestore(db, "nonexistent-id", callerUserId);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
});
|
||||
});
|
||||
});
|
||||
226
packages/core/tests/unit/api/schemas.test.ts
Normal file
226
packages/core/tests/unit/api/schemas.test.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
contentCreateBody,
|
||||
contentUpdateBody,
|
||||
httpUrl,
|
||||
mediaUploadUrlBody,
|
||||
DEFAULT_MAX_UPLOAD_SIZE,
|
||||
} from "../../../src/api/schemas/index.js";
|
||||
|
||||
describe("contentCreateBody schema", () => {
|
||||
it("accepts status 'draft'", () => {
|
||||
const result = contentCreateBody.parse({ data: { title: "Hi" }, status: "draft" });
|
||||
expect(result.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("accepts omitted status", () => {
|
||||
const result = contentCreateBody.parse({ data: { title: "Hi" } });
|
||||
expect(result.status).toBeUndefined();
|
||||
});
|
||||
|
||||
it("rejects status 'published'", () => {
|
||||
expect(() => contentCreateBody.parse({ data: { title: "Hi" }, status: "published" })).toThrow();
|
||||
});
|
||||
|
||||
it("rejects status 'scheduled'", () => {
|
||||
expect(() => contentCreateBody.parse({ data: { title: "Hi" }, status: "scheduled" })).toThrow();
|
||||
});
|
||||
|
||||
it("preserves publishedAt and createdAt when valid ISO 8601 datetimes are provided", () => {
|
||||
const result = contentCreateBody.parse({
|
||||
data: { title: "Hi" },
|
||||
publishedAt: "2019-03-15T10:30:00.000Z",
|
||||
createdAt: "2019-03-15T10:30:00.000Z",
|
||||
});
|
||||
expect(result.publishedAt).toBe("2019-03-15T10:30:00.000Z");
|
||||
expect(result.createdAt).toBe("2019-03-15T10:30:00.000Z");
|
||||
});
|
||||
|
||||
it("accepts offset-suffixed ISO datetimes", () => {
|
||||
const result = contentCreateBody.parse({
|
||||
data: { title: "Hi" },
|
||||
publishedAt: "2019-03-15T10:30:00+00:00",
|
||||
});
|
||||
expect(result.publishedAt).toBe("2019-03-15T10:30:00+00:00");
|
||||
});
|
||||
|
||||
it("rejects malformed datetime strings", () => {
|
||||
expect(() =>
|
||||
contentCreateBody.parse({ data: { title: "Hi" }, publishedAt: "yesterday" }),
|
||||
).toThrow();
|
||||
expect(() =>
|
||||
contentCreateBody.parse({ data: { title: "Hi" }, createdAt: "2019-03-15" }),
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
it("accepts null to explicitly clear the field", () => {
|
||||
const result = contentCreateBody.parse({ data: { title: "Hi" }, publishedAt: null });
|
||||
expect(result.publishedAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("contentUpdateBody schema", () => {
|
||||
it("should pass through skipRevision when present", () => {
|
||||
const input = {
|
||||
data: { title: "Hello" },
|
||||
skipRevision: true,
|
||||
};
|
||||
const result = contentUpdateBody.parse(input);
|
||||
expect(result.skipRevision).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept updates without skipRevision", () => {
|
||||
const input = {
|
||||
data: { title: "Hello" },
|
||||
};
|
||||
const result = contentUpdateBody.parse(input);
|
||||
expect(result.skipRevision).toBeUndefined();
|
||||
});
|
||||
|
||||
it("accepts status 'draft'", () => {
|
||||
const result = contentUpdateBody.parse({ data: { title: "Hi" }, status: "draft" });
|
||||
expect(result.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("accepts omitted status", () => {
|
||||
const result = contentUpdateBody.parse({ data: { title: "Hi" } });
|
||||
expect(result.status).toBeUndefined();
|
||||
});
|
||||
|
||||
it("rejects status 'published'", () => {
|
||||
expect(() => contentUpdateBody.parse({ data: { title: "Hi" }, status: "published" })).toThrow();
|
||||
});
|
||||
|
||||
it("rejects status 'scheduled'", () => {
|
||||
expect(() => contentUpdateBody.parse({ data: { title: "Hi" }, status: "scheduled" })).toThrow();
|
||||
});
|
||||
|
||||
it("preserves publishedAt when a valid ISO 8601 datetime is provided", () => {
|
||||
const result = contentUpdateBody.parse({
|
||||
data: { title: "Hi" },
|
||||
publishedAt: "2019-03-15T10:30:00.000Z",
|
||||
});
|
||||
expect(result.publishedAt).toBe("2019-03-15T10:30:00.000Z");
|
||||
});
|
||||
|
||||
it("rejects malformed publishedAt strings", () => {
|
||||
expect(() =>
|
||||
contentUpdateBody.parse({ data: { title: "Hi" }, publishedAt: "yesterday" }),
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
it("strips createdAt — treat created_at as immutable on update", () => {
|
||||
const result = contentUpdateBody.parse({
|
||||
data: { title: "Hi" },
|
||||
createdAt: "2019-03-15T10:30:00.000Z",
|
||||
} as Parameters<typeof contentUpdateBody.parse>[0]);
|
||||
expect("createdAt" in result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("httpUrl validator", () => {
|
||||
it("accepts http URLs", () => {
|
||||
expect(httpUrl.parse("http://example.com")).toBe("http://example.com");
|
||||
});
|
||||
|
||||
it("accepts https URLs", () => {
|
||||
expect(httpUrl.parse("https://example.com/path?q=1")).toBe("https://example.com/path?q=1");
|
||||
});
|
||||
|
||||
it("rejects javascript: URIs", () => {
|
||||
expect(() => httpUrl.parse("javascript:alert(1)")).toThrow();
|
||||
});
|
||||
|
||||
it("rejects data: URIs", () => {
|
||||
expect(() => httpUrl.parse("data:text/html,<script>alert(1)</script>")).toThrow();
|
||||
});
|
||||
|
||||
it("rejects ftp: URIs", () => {
|
||||
expect(() => httpUrl.parse("ftp://example.com")).toThrow();
|
||||
});
|
||||
|
||||
it("rejects empty string", () => {
|
||||
expect(() => httpUrl.parse("")).toThrow();
|
||||
});
|
||||
|
||||
it("rejects non-URL strings", () => {
|
||||
expect(() => httpUrl.parse("not a url")).toThrow();
|
||||
});
|
||||
|
||||
it("is case-insensitive for scheme", () => {
|
||||
expect(httpUrl.parse("HTTPS://EXAMPLE.COM")).toBe("HTTPS://EXAMPLE.COM");
|
||||
});
|
||||
});
|
||||
|
||||
describe("mediaUploadUrlBody schema factory", () => {
|
||||
it("DEFAULT_MAX_UPLOAD_SIZE is 50 MB", () => {
|
||||
expect(DEFAULT_MAX_UPLOAD_SIZE).toBe(50 * 1024 * 1024);
|
||||
});
|
||||
|
||||
it("rejects size above the configured limit", () => {
|
||||
const schema = mediaUploadUrlBody(1_000);
|
||||
expect(() =>
|
||||
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 1_001 }),
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
it("accepts size equal to the configured limit", () => {
|
||||
const schema = mediaUploadUrlBody(1_000);
|
||||
const result = schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 1_000 });
|
||||
expect(result.size).toBe(1_000);
|
||||
});
|
||||
|
||||
it("accepts size below the configured limit", () => {
|
||||
const schema = mediaUploadUrlBody(1_000);
|
||||
const result = schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 });
|
||||
expect(result.size).toBe(500);
|
||||
});
|
||||
|
||||
it("each call returns an independent schema with its own limit", () => {
|
||||
const strict = mediaUploadUrlBody(100);
|
||||
const loose = mediaUploadUrlBody(1_000_000);
|
||||
expect(() =>
|
||||
strict.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 }),
|
||||
).toThrow();
|
||||
expect(() =>
|
||||
loose.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 500 }),
|
||||
).not.toThrow();
|
||||
});
|
||||
|
||||
it("throws when maxSize is NaN", () => {
|
||||
expect(() => mediaUploadUrlBody(NaN)).toThrow(/maxUploadSize/);
|
||||
});
|
||||
|
||||
it("throws when maxSize is 0", () => {
|
||||
expect(() => mediaUploadUrlBody(0)).toThrow(/maxUploadSize/);
|
||||
});
|
||||
|
||||
it("throws when maxSize is negative", () => {
|
||||
expect(() => mediaUploadUrlBody(-1024)).toThrow(/maxUploadSize/);
|
||||
});
|
||||
|
||||
it("error message uses whole MB, not fractional", () => {
|
||||
const schema = mediaUploadUrlBody(75_000_000);
|
||||
let errorMessage = "";
|
||||
try {
|
||||
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 75_000_001 });
|
||||
} catch (e) {
|
||||
errorMessage = String(e);
|
||||
}
|
||||
expect(errorMessage).not.toBe("");
|
||||
expect(errorMessage).not.toMatch(/\d+\.\d+MB/);
|
||||
});
|
||||
|
||||
it("error message does not overstate the limit in MB", () => {
|
||||
// 75_000_000 bytes / 1024 / 1024 ≈ 71.5 MB; floor gives 71, round gives 72
|
||||
const schema = mediaUploadUrlBody(75_000_000);
|
||||
let errorMessage = "";
|
||||
try {
|
||||
schema.parse({ filename: "a.jpg", contentType: "image/jpeg", size: 75_000_001 });
|
||||
} catch (e) {
|
||||
errorMessage = String(e);
|
||||
}
|
||||
expect(errorMessage).toContain("71MB");
|
||||
});
|
||||
});
|
||||
363
packages/core/tests/unit/astro/content-routes-authz.test.ts
Normal file
363
packages/core/tests/unit/astro/content-routes-authz.test.ts
Normal file
@@ -0,0 +1,363 @@
|
||||
/**
|
||||
* Content read endpoint authorization.
|
||||
*
|
||||
* content:read is granted to SUBSCRIBER so member-only published content can
|
||||
* be read via the admin API. Drafts, scheduled, trashed items, and editor
|
||||
* views (revisions, compare, preview-url) are gated on content:read_drafts
|
||||
* (CONTRIBUTOR+):
|
||||
*
|
||||
* - GET /content/:c forces status=published for SUBSCRIBER, ignoring any
|
||||
* caller-supplied status filter.
|
||||
* - GET /content/:c/:id returns 404 to SUBSCRIBER for non-published items
|
||||
* (404 to avoid leaking existence via status code).
|
||||
* - /compare, /revisions, /trash, /preview-url require content:read_drafts.
|
||||
* - /translations filters non-published locales out for SUBSCRIBER.
|
||||
*/
|
||||
|
||||
import { Role, type RoleLevel } from "@emdash-cms/auth";
|
||||
import type { APIContext } from "astro";
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { GET as getItem } from "../../../src/astro/routes/api/content/[collection]/[id].js";
|
||||
import { GET as getCompare } from "../../../src/astro/routes/api/content/[collection]/[id]/compare.js";
|
||||
import { POST as postPreviewUrl } from "../../../src/astro/routes/api/content/[collection]/[id]/preview-url.js";
|
||||
import { GET as getRevisions } from "../../../src/astro/routes/api/content/[collection]/[id]/revisions.js";
|
||||
import { GET as getTranslations } from "../../../src/astro/routes/api/content/[collection]/[id]/translations.js";
|
||||
import { GET as getList } from "../../../src/astro/routes/api/content/[collection]/index.js";
|
||||
import { GET as getTrash } from "../../../src/astro/routes/api/content/[collection]/trash.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Test helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface StubUser {
|
||||
id: string;
|
||||
role: RoleLevel;
|
||||
}
|
||||
|
||||
const subscriber: StubUser = { id: "u-sub", role: Role.SUBSCRIBER };
|
||||
const contributor: StubUser = { id: "u-con", role: Role.CONTRIBUTOR };
|
||||
const editor: StubUser = { id: "u-edit", role: Role.EDITOR };
|
||||
|
||||
interface StubItem {
|
||||
id: string;
|
||||
type: string;
|
||||
slug: string | null;
|
||||
status: string;
|
||||
data: Record<string, unknown>;
|
||||
authorId: string | null;
|
||||
primaryBylineId: string | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
publishedAt: string | null;
|
||||
scheduledAt: string | null;
|
||||
liveRevisionId: string | null;
|
||||
draftRevisionId: string | null;
|
||||
version: number;
|
||||
locale: string | null;
|
||||
translationGroup: string | null;
|
||||
}
|
||||
|
||||
function makeItem(partial: Partial<StubItem> & { id: string; status: string }): StubItem {
|
||||
return {
|
||||
type: "post",
|
||||
slug: partial.id,
|
||||
data: {},
|
||||
authorId: null,
|
||||
primaryBylineId: null,
|
||||
createdAt: "2026-01-01T00:00:00Z",
|
||||
updatedAt: "2026-01-01T00:00:00Z",
|
||||
publishedAt: partial.status === "published" ? "2026-01-01T00:00:00Z" : null,
|
||||
scheduledAt: null,
|
||||
liveRevisionId: null,
|
||||
draftRevisionId: null,
|
||||
version: 1,
|
||||
locale: null,
|
||||
translationGroup: null,
|
||||
...partial,
|
||||
};
|
||||
}
|
||||
|
||||
function buildEmdash(
|
||||
opts: {
|
||||
listItems?: StubItem[];
|
||||
getItem?: StubItem | null;
|
||||
translations?: Array<{
|
||||
id: string;
|
||||
status: string;
|
||||
locale: string | null;
|
||||
slug: string | null;
|
||||
updatedAt: string;
|
||||
}>;
|
||||
trashItems?: StubItem[];
|
||||
revisions?: Array<{ id: string }>;
|
||||
compare?: { hasChanges: boolean; live: unknown; draft: unknown };
|
||||
} = {},
|
||||
) {
|
||||
const handleContentList = vi.fn(async (_collection: string, params: { status?: string }) => {
|
||||
const items = params.status
|
||||
? (opts.listItems ?? []).filter((i) => i.status === params.status)
|
||||
: (opts.listItems ?? []);
|
||||
return { success: true as const, data: { items, nextCursor: undefined } };
|
||||
});
|
||||
|
||||
const handleContentGet = vi.fn(async (_collection: string, _id: string) => {
|
||||
if (!opts.getItem) {
|
||||
return { success: false as const, error: { code: "NOT_FOUND", message: "not found" } };
|
||||
}
|
||||
return { success: true as const, data: { item: opts.getItem, _rev: "rev1" } };
|
||||
});
|
||||
|
||||
const handleContentTranslations = vi.fn(async () => ({
|
||||
success: true as const,
|
||||
data: { translationGroup: "tg-1", translations: opts.translations ?? [] },
|
||||
}));
|
||||
|
||||
const handleContentListTrashed = vi.fn(async () => ({
|
||||
success: true as const,
|
||||
data: { items: opts.trashItems ?? [], nextCursor: undefined },
|
||||
}));
|
||||
|
||||
const handleRevisionList = vi.fn(async () => ({
|
||||
success: true as const,
|
||||
data: { items: opts.revisions ?? [] },
|
||||
}));
|
||||
|
||||
const handleContentCompare = vi.fn(async () => ({
|
||||
success: true as const,
|
||||
data: opts.compare ?? { hasChanges: false, live: null, draft: null },
|
||||
}));
|
||||
|
||||
return {
|
||||
handleContentList,
|
||||
handleContentGet,
|
||||
handleContentTranslations,
|
||||
handleContentListTrashed,
|
||||
handleRevisionList,
|
||||
handleContentCompare,
|
||||
};
|
||||
}
|
||||
|
||||
function ctx(opts: {
|
||||
user: StubUser | null;
|
||||
emdash: ReturnType<typeof buildEmdash>;
|
||||
params?: Record<string, string>;
|
||||
url?: string;
|
||||
request?: Request;
|
||||
}): APIContext {
|
||||
const url = new URL(opts.url ?? "http://localhost/");
|
||||
return {
|
||||
params: opts.params ?? { collection: "post" },
|
||||
url,
|
||||
request: opts.request ?? new Request(url),
|
||||
locals: {
|
||||
user: opts.user,
|
||||
emdash: opts.emdash,
|
||||
},
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub for tests
|
||||
} as unknown as APIContext;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// LIST endpoint
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("GET /content/:collection — subscriber drafts leak", () => {
|
||||
const items = [
|
||||
makeItem({ id: "draft-1", status: "draft" }),
|
||||
makeItem({ id: "pub-1", status: "published" }),
|
||||
makeItem({ id: "sched-1", status: "scheduled" }),
|
||||
];
|
||||
|
||||
it("forces status=published filter for SUBSCRIBER", async () => {
|
||||
const emdash = buildEmdash({ listItems: items });
|
||||
const res = await getList(ctx({ user: subscriber, emdash }));
|
||||
expect(res.status).toBe(200);
|
||||
expect(emdash.handleContentList).toHaveBeenCalledWith(
|
||||
"post",
|
||||
expect.objectContaining({ status: "published" }),
|
||||
);
|
||||
const body = (await res.json()) as { data: { items: StubItem[] } };
|
||||
expect(body.data.items.map((i) => i.id)).toEqual(["pub-1"]);
|
||||
});
|
||||
|
||||
it("rejects subscriber attempt to override status filter to draft", async () => {
|
||||
const emdash = buildEmdash({ listItems: items });
|
||||
const res = await getList(
|
||||
ctx({
|
||||
user: subscriber,
|
||||
emdash,
|
||||
url: "http://localhost/?status=draft",
|
||||
}),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
// The route must not honour ?status=draft for SUBSCRIBER — should still
|
||||
// be forced to published.
|
||||
expect(emdash.handleContentList).toHaveBeenCalledWith(
|
||||
"post",
|
||||
expect.objectContaining({ status: "published" }),
|
||||
);
|
||||
const body = (await res.json()) as { data: { items: StubItem[] } };
|
||||
expect(body.data.items.every((i) => i.status === "published")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns full set for CONTRIBUTOR (has read_drafts)", async () => {
|
||||
const emdash = buildEmdash({ listItems: items });
|
||||
const res = await getList(ctx({ user: contributor, emdash }));
|
||||
expect(res.status).toBe(200);
|
||||
// status param is undefined (caller-controlled), not forced
|
||||
const call = emdash.handleContentList.mock.calls[0]?.[1];
|
||||
expect(call?.status).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// GET single item
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("GET /content/:collection/:id — subscriber drafts leak", () => {
|
||||
it("returns 404 to SUBSCRIBER fetching a draft", async () => {
|
||||
const emdash = buildEmdash({ getItem: makeItem({ id: "p1", status: "draft" }) });
|
||||
const res = await getItem(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(404);
|
||||
});
|
||||
|
||||
it("returns 404 to SUBSCRIBER fetching a scheduled item", async () => {
|
||||
const emdash = buildEmdash({ getItem: makeItem({ id: "p1", status: "scheduled" }) });
|
||||
const res = await getItem(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(404);
|
||||
});
|
||||
|
||||
it("allows SUBSCRIBER to fetch a published item", async () => {
|
||||
const emdash = buildEmdash({ getItem: makeItem({ id: "p1", status: "published" }) });
|
||||
const res = await getItem(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("allows CONTRIBUTOR to fetch a draft", async () => {
|
||||
const emdash = buildEmdash({ getItem: makeItem({ id: "p1", status: "draft" }) });
|
||||
const res = await getItem(
|
||||
ctx({ user: contributor, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Editor-only views — must require content:read_drafts
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("editor-only content views require content:read_drafts", () => {
|
||||
it("denies SUBSCRIBER on /compare", async () => {
|
||||
const emdash = buildEmdash({ compare: { hasChanges: false, live: null, draft: null } });
|
||||
const res = await getCompare(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(403);
|
||||
expect(emdash.handleContentCompare).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("allows CONTRIBUTOR on /compare", async () => {
|
||||
const emdash = buildEmdash({ compare: { hasChanges: false, live: null, draft: null } });
|
||||
const res = await getCompare(
|
||||
ctx({ user: contributor, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("denies SUBSCRIBER on /revisions", async () => {
|
||||
const emdash = buildEmdash({ revisions: [] });
|
||||
const res = await getRevisions(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(403);
|
||||
expect(emdash.handleRevisionList).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("denies SUBSCRIBER on /trash", async () => {
|
||||
const emdash = buildEmdash({ trashItems: [] });
|
||||
const res = await getTrash(ctx({ user: subscriber, emdash }));
|
||||
expect(res.status).toBe(403);
|
||||
expect(emdash.handleContentListTrashed).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("denies SUBSCRIBER on /preview-url POST", async () => {
|
||||
const emdash = buildEmdash({ getItem: makeItem({ id: "p1", status: "published" }) });
|
||||
const url = "http://localhost/";
|
||||
const res = await postPreviewUrl(
|
||||
ctx({
|
||||
user: subscriber,
|
||||
emdash,
|
||||
params: { collection: "post", id: "p1" },
|
||||
url,
|
||||
request: new Request(url, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: "{}",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(res.status).toBe(403);
|
||||
expect(emdash.handleContentGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("allows EDITOR on /trash", async () => {
|
||||
const emdash = buildEmdash({ trashItems: [] });
|
||||
const res = await getTrash(ctx({ user: editor, emdash }));
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Translations endpoint — must status-filter for SUBSCRIBER
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("GET /content/:collection/:id/translations", () => {
|
||||
const translations = [
|
||||
{
|
||||
id: "t-en",
|
||||
locale: "en",
|
||||
slug: "p1",
|
||||
status: "published",
|
||||
updatedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
{ id: "t-fr", locale: "fr", slug: "p1", status: "draft", updatedAt: "2026-01-01T00:00:00Z" },
|
||||
{
|
||||
id: "t-de",
|
||||
locale: "de",
|
||||
slug: "p1",
|
||||
status: "scheduled",
|
||||
updatedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
];
|
||||
|
||||
it("filters non-published translations for SUBSCRIBER", async () => {
|
||||
const emdash = buildEmdash({ translations });
|
||||
const res = await getTranslations(
|
||||
ctx({ user: subscriber, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
const body = (await res.json()) as {
|
||||
data: { translations: Array<{ id: string; status: string }> };
|
||||
};
|
||||
expect(body.data.translations.map((t) => t.id)).toEqual(["t-en"]);
|
||||
});
|
||||
|
||||
it("returns all translations for CONTRIBUTOR", async () => {
|
||||
const emdash = buildEmdash({ translations });
|
||||
const res = await getTranslations(
|
||||
ctx({ user: contributor, emdash, params: { collection: "post", id: "p1" } }),
|
||||
);
|
||||
expect(res.status).toBe(200);
|
||||
const body = (await res.json()) as {
|
||||
data: { translations: Array<{ id: string; status: string }> };
|
||||
};
|
||||
expect(body.data.translations).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,143 @@
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
generateConfigModule,
|
||||
generateDialectModule,
|
||||
generateSeedModule,
|
||||
} from "../../../../src/astro/integration/virtual-modules.js";
|
||||
|
||||
describe("generateConfigModule", () => {
|
||||
it("round-trips the serialisable config shape via default export", () => {
|
||||
const source = generateConfigModule({
|
||||
siteUrl: "https://example.com",
|
||||
trustedProxyHeaders: ["x-real-ip", "fly-client-ip"],
|
||||
maxUploadSize: 52_428_800,
|
||||
});
|
||||
// The virtual module is `export default <JSON>` — eval by stripping
|
||||
// the prefix and parsing.
|
||||
const prefix = "export default ";
|
||||
expect(source.startsWith(prefix)).toBe(true);
|
||||
const json = source.slice(prefix.length).replace(/;$/, "");
|
||||
const parsed = JSON.parse(json);
|
||||
expect(parsed.trustedProxyHeaders).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
expect(parsed.siteUrl).toBe("https://example.com");
|
||||
});
|
||||
});
|
||||
|
||||
describe("generateDialectModule", () => {
|
||||
it("emits undefined createDialect and null stub when no entrypoint is configured", () => {
|
||||
const out = generateDialectModule({ supportsRequestScope: false });
|
||||
expect(out).toContain("export const createDialect = undefined");
|
||||
expect(out).toContain("export const createRequestScopedDb = (_opts) => null");
|
||||
});
|
||||
|
||||
it("emits a null stub for adapters that don't support request scoping", () => {
|
||||
const out = generateDialectModule({
|
||||
entrypoint: "some-adapter/dialect",
|
||||
type: "sqlite",
|
||||
supportsRequestScope: false,
|
||||
});
|
||||
expect(out).toContain(`import { createDialect as _createDialect } from "some-adapter/dialect"`);
|
||||
expect(out).toContain("export const createRequestScopedDb = (_opts) => null");
|
||||
expect(out).not.toContain(`export { createRequestScopedDb } from`);
|
||||
});
|
||||
|
||||
it("re-exports createRequestScopedDb from the adapter when supportsRequestScope is true", () => {
|
||||
const out = generateDialectModule({
|
||||
entrypoint: "@emdash-cms/cloudflare/db/d1",
|
||||
type: "sqlite",
|
||||
supportsRequestScope: true,
|
||||
});
|
||||
expect(out).toContain(`export { createRequestScopedDb } from "@emdash-cms/cloudflare/db/d1"`);
|
||||
expect(out).not.toContain("= () => null");
|
||||
expect(out).not.toContain("= (_opts) => null");
|
||||
});
|
||||
|
||||
it("threads the dialect type through", () => {
|
||||
const out = generateDialectModule({
|
||||
entrypoint: "emdash/db/postgres",
|
||||
type: "postgres",
|
||||
supportsRequestScope: false,
|
||||
});
|
||||
expect(out).toContain(`export const dialectType = "postgres"`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("generateSeedModule", () => {
|
||||
let projectRoot: string;
|
||||
|
||||
beforeEach(() => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), "emdash-seed-test-"));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
const sampleSeed = (name: string) => ({
|
||||
version: "1",
|
||||
meta: { name },
|
||||
collections: [],
|
||||
});
|
||||
|
||||
it("prefers .emdash/seed.json over package.json#emdash.seed and seed/seed.json", () => {
|
||||
mkdirSync(join(projectRoot, ".emdash"));
|
||||
writeFileSync(
|
||||
join(projectRoot, ".emdash", "seed.json"),
|
||||
JSON.stringify(sampleSeed("dot-emdash")),
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
join(projectRoot, "package.json"),
|
||||
JSON.stringify({ name: "x", emdash: { seed: "custom-seed.json" } }),
|
||||
);
|
||||
writeFileSync(join(projectRoot, "custom-seed.json"), JSON.stringify(sampleSeed("pkg-pointer")));
|
||||
|
||||
mkdirSync(join(projectRoot, "seed"));
|
||||
writeFileSync(
|
||||
join(projectRoot, "seed", "seed.json"),
|
||||
JSON.stringify(sampleSeed("conventional")),
|
||||
);
|
||||
|
||||
const out = generateSeedModule(projectRoot);
|
||||
expect(out).toContain(`"name":"dot-emdash"`);
|
||||
expect(out).toContain("export const seed = userSeed;");
|
||||
});
|
||||
|
||||
it("uses package.json#emdash.seed when .emdash/seed.json is absent", () => {
|
||||
writeFileSync(
|
||||
join(projectRoot, "package.json"),
|
||||
JSON.stringify({ name: "x", emdash: { seed: "seed/seed.json" } }),
|
||||
);
|
||||
mkdirSync(join(projectRoot, "seed"));
|
||||
writeFileSync(join(projectRoot, "seed", "seed.json"), JSON.stringify(sampleSeed("via-pkg")));
|
||||
|
||||
const out = generateSeedModule(projectRoot);
|
||||
expect(out).toContain(`"name":"via-pkg"`);
|
||||
});
|
||||
|
||||
it("falls back to seed/seed.json when no pointer is configured", () => {
|
||||
writeFileSync(join(projectRoot, "package.json"), JSON.stringify({ name: "x" }));
|
||||
mkdirSync(join(projectRoot, "seed"));
|
||||
writeFileSync(
|
||||
join(projectRoot, "seed", "seed.json"),
|
||||
JSON.stringify(sampleSeed("conventional-fallback")),
|
||||
);
|
||||
|
||||
const out = generateSeedModule(projectRoot);
|
||||
expect(out).toContain(`"name":"conventional-fallback"`);
|
||||
expect(out).toContain("export const seed = userSeed;");
|
||||
});
|
||||
|
||||
it("falls through to the default seed when no user seed is found", () => {
|
||||
writeFileSync(join(projectRoot, "package.json"), JSON.stringify({ name: "x" }));
|
||||
|
||||
const out = generateSeedModule(projectRoot);
|
||||
expect(out).toContain("export const userSeed = null;");
|
||||
expect(out).toContain("export const seed = ");
|
||||
});
|
||||
});
|
||||
82
packages/core/tests/unit/astro/manifest-route.test.ts
Normal file
82
packages/core/tests/unit/astro/manifest-route.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Manifest route admin branding.
|
||||
*
|
||||
* The admin branding (logo, siteName, favicon) configured via the EmDash
|
||||
* integration must be reflected in `/_emdash/api/manifest` so the React SPA
|
||||
* can render the custom logo and site name. The route reads the branding
|
||||
* from the per-request config on `locals.emdash.config.admin` (the same
|
||||
* source `admin.astro` uses), not from a build-time global.
|
||||
*
|
||||
* Regression test for issue #835.
|
||||
*/
|
||||
|
||||
import type { APIContext } from "astro";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { GET as getManifest } from "../../../src/astro/routes/api/manifest.js";
|
||||
|
||||
interface ManifestEnvelope {
|
||||
data: {
|
||||
admin?: { logo?: string; siteName?: string; favicon?: string };
|
||||
authMode: string;
|
||||
signupEnabled?: boolean;
|
||||
collections?: Record<string, unknown>;
|
||||
plugins?: Record<string, unknown>;
|
||||
taxonomies?: unknown[];
|
||||
version?: string;
|
||||
};
|
||||
}
|
||||
|
||||
function makeContext(
|
||||
adminBranding?: { logo?: string; siteName?: string; favicon?: string },
|
||||
manifest?: unknown,
|
||||
): Parameters<typeof getManifest>[0] {
|
||||
const locals = {
|
||||
emdash: adminBranding
|
||||
? {
|
||||
// db is intentionally undefined so the signup-enabled query is skipped.
|
||||
config: { admin: adminBranding },
|
||||
getManifest: async () => manifest ?? null,
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
|
||||
return { locals } as unknown as APIContext;
|
||||
}
|
||||
|
||||
describe("manifest route admin branding", () => {
|
||||
it("returns admin branding from locals.emdash.config.admin", async () => {
|
||||
const branding = {
|
||||
logo: "/logo.png",
|
||||
siteName: "My Site",
|
||||
favicon: "/favicon.ico",
|
||||
};
|
||||
|
||||
const response = await getManifest(makeContext(branding));
|
||||
expect(response.status).toBe(200);
|
||||
const body = (await response.json()) as ManifestEnvelope;
|
||||
expect(body.data.admin).toEqual(branding);
|
||||
});
|
||||
|
||||
it("omits the admin field when no branding is configured", async () => {
|
||||
const response = await getManifest(makeContext());
|
||||
expect(response.status).toBe(200);
|
||||
const body = (await response.json()) as ManifestEnvelope;
|
||||
expect(body.data.admin).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns admin branding even when getManifest() resolves to a built manifest", async () => {
|
||||
const branding = { logo: "/brand.svg", siteName: "Brandname" };
|
||||
const ctx = makeContext(branding, {
|
||||
version: "test",
|
||||
hash: "test",
|
||||
collections: {},
|
||||
plugins: {},
|
||||
taxonomies: [],
|
||||
});
|
||||
|
||||
const response = await getManifest(ctx);
|
||||
const body = (await response.json()) as ManifestEnvelope;
|
||||
expect(body.data.admin).toEqual(branding);
|
||||
});
|
||||
});
|
||||
289
packages/core/tests/unit/astro/middleware-prerender.test.ts
Normal file
289
packages/core/tests/unit/astro/middleware-prerender.test.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
import { beforeEach, describe, it, expect, vi } from "vitest";
|
||||
|
||||
vi.mock("astro:middleware", () => ({
|
||||
defineMiddleware: (handler: unknown) => handler,
|
||||
}));
|
||||
|
||||
// vi.mock factories are hoisted above normal `const` declarations; use
|
||||
// vi.hoisted so the marker object is available both to the mock factory and
|
||||
// to assertions below.
|
||||
const { DB_CONFIG_MARKER } = vi.hoisted(() => ({
|
||||
DB_CONFIG_MARKER: { binding: "DB", session: "auto" },
|
||||
}));
|
||||
|
||||
vi.mock(
|
||||
"virtual:emdash/config",
|
||||
() => ({
|
||||
default: {
|
||||
database: { config: DB_CONFIG_MARKER },
|
||||
auth: { mode: "none" },
|
||||
},
|
||||
}),
|
||||
{ virtual: true },
|
||||
);
|
||||
|
||||
vi.mock(
|
||||
"virtual:emdash/dialect",
|
||||
() => ({
|
||||
createDialect: vi.fn(),
|
||||
createRequestScopedDb: vi.fn().mockReturnValue(null),
|
||||
}),
|
||||
{ virtual: true },
|
||||
);
|
||||
|
||||
vi.mock("virtual:emdash/media-providers", () => ({ mediaProviders: [] }), { virtual: true });
|
||||
vi.mock("virtual:emdash/plugins", () => ({ plugins: [] }), { virtual: true });
|
||||
vi.mock(
|
||||
"virtual:emdash/sandbox-runner",
|
||||
() => ({
|
||||
createSandboxRunner: null,
|
||||
sandboxEnabled: false,
|
||||
}),
|
||||
{ virtual: true },
|
||||
);
|
||||
vi.mock("virtual:emdash/sandboxed-plugins", () => ({ sandboxedPlugins: [] }), { virtual: true });
|
||||
vi.mock("virtual:emdash/storage", () => ({ createStorage: null }), { virtual: true });
|
||||
vi.mock("virtual:emdash/wait-until", () => ({ waitUntil: undefined }), { virtual: true });
|
||||
|
||||
vi.mock("../../../src/loader.js", () => ({
|
||||
getDb: vi.fn(async () => ({
|
||||
selectFrom: () => ({
|
||||
selectAll: () => ({
|
||||
limit: () => ({
|
||||
execute: async () => [],
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
})),
|
||||
}));
|
||||
|
||||
import { createRequestScopedDb } from "virtual:emdash/dialect";
|
||||
|
||||
import onRequest from "../../../src/astro/middleware.js";
|
||||
import { getRequestContext } from "../../../src/request-context.js";
|
||||
|
||||
describe("astro middleware prerendered routes", () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(createRequestScopedDb).mockReset().mockReturnValue(null);
|
||||
});
|
||||
|
||||
it("does not access context.session on prerendered public runtime routes", async () => {
|
||||
const cookies = {
|
||||
get: vi.fn(() => undefined),
|
||||
};
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/robots.txt"),
|
||||
url: new URL("https://example.com/robots.txt"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect: vi.fn(),
|
||||
isPrerendered: true,
|
||||
};
|
||||
|
||||
Object.defineProperty(context, "session", {
|
||||
get() {
|
||||
throw new Error("context.session should not be accessed during prerender");
|
||||
},
|
||||
});
|
||||
|
||||
const response = await onRequest(
|
||||
context as Parameters<typeof onRequest>[0],
|
||||
async () => new Response("ok"),
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it("does not access context.session when prerendering public pages", async () => {
|
||||
const cookies = {
|
||||
get: vi.fn(() => undefined),
|
||||
};
|
||||
const redirect = vi.fn(
|
||||
(location: string) => new Response(null, { status: 302, headers: { Location: location } }),
|
||||
);
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/"),
|
||||
url: new URL("https://example.com/"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect,
|
||||
isPrerendered: true,
|
||||
};
|
||||
|
||||
Object.defineProperty(context, "session", {
|
||||
get() {
|
||||
throw new Error("context.session should not be accessed during prerender");
|
||||
},
|
||||
});
|
||||
|
||||
const response = await onRequest(
|
||||
context as Parameters<typeof onRequest>[0],
|
||||
async () => new Response("ok"),
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(redirect).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("astro middleware anonymous session reads", () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(createRequestScopedDb).mockReset().mockReturnValue(null);
|
||||
});
|
||||
|
||||
it("does not read the Astro session when no astro-session cookie is present", async () => {
|
||||
// Regression test for #733: on Cloudflare Workers the Astro session
|
||||
// backend is KV, so calling session.get() on every anonymous public
|
||||
// request produces a flood of KV read misses. The middleware must
|
||||
// skip the session lookup entirely when no astro-session cookie is set.
|
||||
const cookies = {
|
||||
get: vi.fn((name: string) => {
|
||||
if (name === "astro-session") return undefined;
|
||||
return undefined;
|
||||
}),
|
||||
set: vi.fn(),
|
||||
};
|
||||
const sessionGet = vi.fn(async () => null);
|
||||
const astroSession = { get: sessionGet };
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/"),
|
||||
url: new URL("https://example.com/"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect: vi.fn(),
|
||||
isPrerendered: false,
|
||||
session: astroSession,
|
||||
};
|
||||
|
||||
const response = await onRequest(
|
||||
context as Parameters<typeof onRequest>[0],
|
||||
async () => new Response("ok"),
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(sessionGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("reads the Astro session when an astro-session cookie is present", async () => {
|
||||
const cookies = {
|
||||
get: vi.fn((name: string) => {
|
||||
if (name === "astro-session") return { value: "abc123" };
|
||||
return undefined;
|
||||
}),
|
||||
set: vi.fn(),
|
||||
};
|
||||
const sessionGet = vi.fn(async () => null);
|
||||
const astroSession = { get: sessionGet };
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/", {
|
||||
headers: { cookie: "astro-session=abc123" },
|
||||
}),
|
||||
url: new URL("https://example.com/"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect: vi.fn(),
|
||||
isPrerendered: false,
|
||||
session: astroSession,
|
||||
};
|
||||
|
||||
const response = await onRequest(
|
||||
context as Parameters<typeof onRequest>[0],
|
||||
async () => new Response("ok"),
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(sessionGet).toHaveBeenCalledWith("user");
|
||||
});
|
||||
});
|
||||
|
||||
describe("astro middleware request-scoped db", () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(createRequestScopedDb).mockReset().mockReturnValue(null);
|
||||
});
|
||||
|
||||
it("asks the adapter for a scoped db on anonymous public pages and exposes it via ALS", async () => {
|
||||
const commit = vi.fn();
|
||||
const scopedDb = { _marker: "scoped" };
|
||||
vi.mocked(createRequestScopedDb).mockReturnValue({
|
||||
db: scopedDb as never,
|
||||
commit,
|
||||
});
|
||||
|
||||
const cookies = {
|
||||
get: vi.fn(() => undefined),
|
||||
set: vi.fn(),
|
||||
};
|
||||
const astroSession = {
|
||||
get: vi.fn(async () => null),
|
||||
};
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/"),
|
||||
url: new URL("https://example.com/"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect: vi.fn(),
|
||||
isPrerendered: false,
|
||||
session: astroSession,
|
||||
};
|
||||
|
||||
let dbSeenByNext: unknown;
|
||||
const response = await onRequest(context as Parameters<typeof onRequest>[0], async () => {
|
||||
dbSeenByNext = getRequestContext()?.db;
|
||||
return new Response("ok");
|
||||
});
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(createRequestScopedDb).toHaveBeenCalledTimes(1);
|
||||
const opts = vi.mocked(createRequestScopedDb).mock.calls[0]?.[0];
|
||||
// Opts shape matches the RequestScopedDbOpts contract declared in
|
||||
// virtual-modules.d.ts. The `config` field name must match exactly —
|
||||
// it's what the D1 adapter reads; a rename silently breaks D1 sessions.
|
||||
expect(opts).toMatchObject({
|
||||
config: DB_CONFIG_MARKER,
|
||||
isAuthenticated: false,
|
||||
isWrite: false,
|
||||
cookies,
|
||||
});
|
||||
expect(dbSeenByNext).toBe(scopedDb);
|
||||
expect(commit).toHaveBeenCalledTimes(1);
|
||||
// ALS must be fully torn down after the middleware returns; otherwise
|
||||
// a refactor to enterWith() could silently leak request state into
|
||||
// other async work on the same worker.
|
||||
expect(getRequestContext()).toBeUndefined();
|
||||
});
|
||||
|
||||
it("forces isWrite true for POST requests on public pages", async () => {
|
||||
const commit = vi.fn();
|
||||
vi.mocked(createRequestScopedDb).mockReturnValue({
|
||||
db: { _marker: "scoped" } as never,
|
||||
commit,
|
||||
});
|
||||
|
||||
const cookies = { get: vi.fn(() => undefined), set: vi.fn() };
|
||||
const astroSession = { get: vi.fn(async () => null) };
|
||||
|
||||
const context: Record<string, unknown> = {
|
||||
request: new Request("https://example.com/", { method: "POST" }),
|
||||
url: new URL("https://example.com/"),
|
||||
cookies,
|
||||
locals: {},
|
||||
redirect: vi.fn(),
|
||||
isPrerendered: false,
|
||||
session: astroSession,
|
||||
};
|
||||
|
||||
await onRequest(context as Parameters<typeof onRequest>[0], async () => new Response("ok"));
|
||||
|
||||
const opts = vi.mocked(createRequestScopedDb).mock.calls[0]?.[0];
|
||||
expect(opts).toMatchObject({
|
||||
config: DB_CONFIG_MARKER,
|
||||
isAuthenticated: false,
|
||||
isWrite: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
179
packages/core/tests/unit/astro/middleware-redirect.test.ts
Normal file
179
packages/core/tests/unit/astro/middleware-redirect.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* Regression tests for issue #808: redirect middleware silently no-oped for
|
||||
* unauthenticated public visitors because `locals.emdash.db` is intentionally
|
||||
* absent on the public-visitor branch of runtime init. The fix routes the
|
||||
* lookup through `getDb()` (ALS-aware, falls back to singleton).
|
||||
*/
|
||||
import type { Kysely } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("astro:middleware", () => ({
|
||||
defineMiddleware: (handler: unknown) => handler,
|
||||
}));
|
||||
|
||||
const { getDbMock } = vi.hoisted(() => ({
|
||||
getDbMock: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../../../src/loader.js", () => ({
|
||||
getDb: getDbMock,
|
||||
}));
|
||||
|
||||
import { onRequest } from "../../../src/astro/middleware/redirect.js";
|
||||
import { RedirectRepository } from "../../../src/database/repositories/redirect.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { invalidateRedirectCache } from "../../../src/redirects/cache.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
type MiddlewareContext = Parameters<typeof onRequest>[0];
|
||||
|
||||
interface BuildContextOpts {
|
||||
pathname: string;
|
||||
emdashDb?: unknown;
|
||||
}
|
||||
|
||||
function buildContext({ pathname, emdashDb }: BuildContextOpts): {
|
||||
context: MiddlewareContext;
|
||||
redirect: ReturnType<typeof vi.fn>;
|
||||
} {
|
||||
const redirect = vi.fn(
|
||||
(location: string, status: number) =>
|
||||
new Response(null, { status, headers: { Location: location } }),
|
||||
);
|
||||
const url = new URL(`https://example.com${pathname}`);
|
||||
const locals = emdashDb !== undefined ? { emdash: { db: emdashDb } } : {};
|
||||
const ctx = {
|
||||
url,
|
||||
request: new Request(url.toString()),
|
||||
locals,
|
||||
redirect,
|
||||
};
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal Astro-shaped object for the middleware under test
|
||||
return { context: ctx as unknown as MiddlewareContext, redirect };
|
||||
}
|
||||
|
||||
describe("redirect middleware — issue #808", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
invalidateRedirectCache();
|
||||
db = await setupTestDatabase();
|
||||
const repo = new RedirectRepository(db);
|
||||
await repo.create({ source: "/old", destination: "/new", type: 301 });
|
||||
await repo.create({
|
||||
source: "/legacy/[slug]",
|
||||
destination: "/posts/[slug]",
|
||||
type: 301,
|
||||
isPattern: true,
|
||||
});
|
||||
getDbMock.mockReset();
|
||||
getDbMock.mockResolvedValue(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
async function runMiddleware(
|
||||
context: MiddlewareContext,
|
||||
next: () => Promise<Response>,
|
||||
): Promise<Response> {
|
||||
const result = await onRequest(context, next);
|
||||
if (!(result instanceof Response)) {
|
||||
throw new Error("Middleware returned void; expected a Response");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
it("fires for an unauthenticated visitor on a public path (no locals.emdash.db)", async () => {
|
||||
const { context, redirect } = buildContext({ pathname: "/old" });
|
||||
|
||||
const next = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const response = await runMiddleware(context, next);
|
||||
|
||||
expect(getDbMock).toHaveBeenCalledTimes(1);
|
||||
expect(redirect).toHaveBeenCalledWith("/new", 301);
|
||||
expect(response.status).toBe(301);
|
||||
expect(response.headers.get("Location")).toBe("/new");
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("fires pattern matches for unauthenticated visitors", async () => {
|
||||
const { context, redirect } = buildContext({ pathname: "/legacy/hello" });
|
||||
|
||||
const next = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const response = await runMiddleware(context, next);
|
||||
|
||||
expect(redirect).toHaveBeenCalledWith("/posts/hello", 301);
|
||||
expect(response.status).toBe(301);
|
||||
});
|
||||
|
||||
it("still uses locals.emdash.db when present (authenticated/edit-mode/preview path)", async () => {
|
||||
const { context, redirect } = buildContext({ pathname: "/old", emdashDb: db });
|
||||
|
||||
const next = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const response = await runMiddleware(context, next);
|
||||
|
||||
// When locals.emdash.db is provided, getDb() must not be called.
|
||||
expect(getDbMock).not.toHaveBeenCalled();
|
||||
expect(redirect).toHaveBeenCalledWith("/new", 301);
|
||||
expect(response.status).toBe(301);
|
||||
});
|
||||
|
||||
it("skips silently when no database is available at all", async () => {
|
||||
getDbMock.mockRejectedValueOnce(new Error("EmDash database not configured"));
|
||||
const { context, redirect } = buildContext({ pathname: "/old" });
|
||||
|
||||
const next = vi.fn(async () => new Response("ok"));
|
||||
const response = await runMiddleware(context, next);
|
||||
|
||||
expect(redirect).not.toHaveBeenCalled();
|
||||
expect(next).toHaveBeenCalledTimes(1);
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it("warms the redirect cache from one query and reuses it across requests", async () => {
|
||||
const findAllEnabled = vi.spyOn(RedirectRepository.prototype, "findAllEnabled");
|
||||
|
||||
// First request: cache cold, should issue exactly one query.
|
||||
const first = buildContext({ pathname: "/old" });
|
||||
const next1 = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const r1 = await runMiddleware(first.context, next1);
|
||||
expect(r1.status).toBe(301);
|
||||
expect(findAllEnabled).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Second request (exact match): cache warm, no further queries.
|
||||
const second = buildContext({ pathname: "/old" });
|
||||
const next2 = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const r2 = await runMiddleware(second.context, next2);
|
||||
expect(r2.status).toBe(301);
|
||||
expect(findAllEnabled).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Third request (pattern match): still warm, no further queries.
|
||||
const third = buildContext({ pathname: "/legacy/hello" });
|
||||
const next3 = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
const r3 = await runMiddleware(third.context, next3);
|
||||
expect(r3.status).toBe(301);
|
||||
expect(third.redirect).toHaveBeenCalledWith("/posts/hello", 301);
|
||||
expect(findAllEnabled).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Fourth request (no match): still warm, but next() runs and a 404 is logged.
|
||||
const fourth = buildContext({ pathname: "/nope" });
|
||||
const next4 = vi.fn(async () => new Response("not found", { status: 404 }));
|
||||
await runMiddleware(fourth.context, next4);
|
||||
expect(findAllEnabled).toHaveBeenCalledTimes(1);
|
||||
|
||||
findAllEnabled.mockRestore();
|
||||
});
|
||||
|
||||
it("does not intercept /_emdash routes", async () => {
|
||||
const { context, redirect } = buildContext({ pathname: "/_emdash/admin" });
|
||||
|
||||
const next = vi.fn(async () => new Response("ok"));
|
||||
await runMiddleware(context, next);
|
||||
|
||||
expect(getDbMock).not.toHaveBeenCalled();
|
||||
expect(redirect).not.toHaveBeenCalled();
|
||||
expect(next).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
61
packages/core/tests/unit/astro/routes.test.ts
Normal file
61
packages/core/tests/unit/astro/routes.test.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { injectCoreRoutes } from "../../../src/astro/integration/routes.js";
|
||||
import { GET as getMediaFile } from "../../../src/astro/routes/api/media/file/[...key].js";
|
||||
|
||||
function mockMediaContext(key: string | undefined) {
|
||||
const download = vi.fn().mockResolvedValue({
|
||||
body: new Uint8Array([1, 2, 3]),
|
||||
contentType: "image/png",
|
||||
size: 3,
|
||||
});
|
||||
|
||||
return {
|
||||
context: {
|
||||
params: { key },
|
||||
locals: {
|
||||
emdash: {
|
||||
storage: { download },
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof getMediaFile>[0],
|
||||
download,
|
||||
};
|
||||
}
|
||||
|
||||
describe("core media route injection", () => {
|
||||
it("uses a catch-all media file route so storage keys can contain slashes", () => {
|
||||
const routes: Array<{ pattern: string; entrypoint: string }> = [];
|
||||
injectCoreRoutes((route) => {
|
||||
routes.push({
|
||||
...route,
|
||||
entrypoint: route.entrypoint.replaceAll("\\", "/"),
|
||||
});
|
||||
});
|
||||
|
||||
expect(routes).toContainEqual(
|
||||
expect.objectContaining({
|
||||
pattern: "/_emdash/api/media/file/[...key]",
|
||||
entrypoint: expect.stringContaining("api/media/file/[...key].ts"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("media file catch-all route", () => {
|
||||
it("passes slash-containing keys through to storage.download", async () => {
|
||||
const { context, download } = mockMediaContext("nested/path/file.png");
|
||||
|
||||
const response = await getMediaFile(context);
|
||||
expect(response.status).toBe(200);
|
||||
expect(download).toHaveBeenCalledWith("nested/path/file.png");
|
||||
});
|
||||
|
||||
it("returns not found when the catch-all key is missing", async () => {
|
||||
const { context, download } = mockMediaContext(undefined);
|
||||
|
||||
const response = await getMediaFile(context);
|
||||
expect(response.status).toBe(404);
|
||||
expect(download).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
157
packages/core/tests/unit/astro/signup-rate-limit.test.ts
Normal file
157
packages/core/tests/unit/astro/signup-rate-limit.test.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* Rate-limit enforcement on POST /_emdash/api/auth/signup/request.
|
||||
*
|
||||
* The signup request route must be rate-limited per IP, mirroring the
|
||||
* existing protection on magic-link/send. Without a limit, a caller on
|
||||
* Cloudflare can trigger unlimited signup verification emails for any
|
||||
* allowed domain.
|
||||
*
|
||||
* Tests drive the route handler directly with a real in-memory SQLite
|
||||
* database (so checkRateLimit actually persists) and a stubbed email
|
||||
* pipeline to observe send counts.
|
||||
*/
|
||||
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import type { AuthAdapter } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { APIContext } from "astro";
|
||||
import type { Kysely } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { POST as signupRequest } from "../../../src/astro/routes/api/auth/signup/request.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Simulate a Cloudflare request so getClientIp returns a value. Without the
|
||||
// `cf` marker, the rate limiter short-circuits with null-IP and nothing is
|
||||
// enforced.
|
||||
function cfRequest(url: string, body: unknown): Request {
|
||||
const req = new Request(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
"cf-connecting-ip": "198.51.100.7",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- test harness
|
||||
(req as unknown as { cf: Record<string, unknown> }).cf = { country: "US" };
|
||||
return req;
|
||||
}
|
||||
|
||||
interface StubEmail {
|
||||
send: ReturnType<typeof vi.fn>;
|
||||
isAvailable: () => boolean;
|
||||
}
|
||||
|
||||
function buildEmail(): StubEmail {
|
||||
return {
|
||||
send: vi.fn().mockResolvedValue(undefined),
|
||||
isAvailable: () => true,
|
||||
};
|
||||
}
|
||||
|
||||
function ctx(opts: {
|
||||
db: Kysely<Database>;
|
||||
email: StubEmail;
|
||||
body: { email: string };
|
||||
}): APIContext {
|
||||
const url = "http://localhost/_emdash/api/auth/signup/request";
|
||||
return {
|
||||
request: cfRequest(url, opts.body),
|
||||
locals: {
|
||||
emdash: {
|
||||
db: opts.db,
|
||||
email: opts.email,
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub for tests
|
||||
} as unknown as APIContext;
|
||||
}
|
||||
|
||||
describe("POST /auth/signup/request rate limiting", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("sends email on the first request from an IP", async () => {
|
||||
const email = buildEmail();
|
||||
const res = await signupRequest(ctx({ db, email, body: { email: "a@allowed.com" } }));
|
||||
expect(res.status).toBe(200);
|
||||
expect(email.send).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("stops sending emails after the per-IP limit is exceeded", async () => {
|
||||
const email = buildEmail();
|
||||
|
||||
// Use 4 distinct addresses so each one would normally send — if the
|
||||
// limit is absent, the stub is called 4 times. With the fix, it's 3.
|
||||
for (const local of ["a", "b", "c", "d"]) {
|
||||
await signupRequest(ctx({ db, email, body: { email: `${local}@allowed.com` } }));
|
||||
}
|
||||
|
||||
// Matches magic-link/send: 3 requests per 5 minutes per IP.
|
||||
expect(email.send).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it("always returns 200 to avoid revealing the rate limit", async () => {
|
||||
const email = buildEmail();
|
||||
const responses = [];
|
||||
for (const local of ["a", "b", "c", "d", "e"]) {
|
||||
responses.push(
|
||||
await signupRequest(ctx({ db, email, body: { email: `${local}@allowed.com` } })),
|
||||
);
|
||||
}
|
||||
|
||||
// All responses are 200 with the generic success envelope. The rate
|
||||
// limit is invisible to the caller (which also keeps signup
|
||||
// indistinguishable from disallowed-domain).
|
||||
expect(responses.every((r) => r.status === 200)).toBe(true);
|
||||
});
|
||||
|
||||
it("tracks the limit per IP, not globally", async () => {
|
||||
const email = buildEmail();
|
||||
const url = "http://localhost/_emdash/api/auth/signup/request";
|
||||
|
||||
function req(ip: string, addr: string): Request {
|
||||
const r = new Request(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
"cf-connecting-ip": ip,
|
||||
},
|
||||
body: JSON.stringify({ email: addr }),
|
||||
});
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- test harness
|
||||
(r as unknown as { cf: Record<string, unknown> }).cf = { country: "US" };
|
||||
return r;
|
||||
}
|
||||
|
||||
function makeCtx(request: Request): APIContext {
|
||||
return {
|
||||
request,
|
||||
locals: { emdash: { db, email } },
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
|
||||
} as unknown as APIContext;
|
||||
}
|
||||
|
||||
// Exhaust IP A
|
||||
for (const local of ["a", "b", "c", "d"]) {
|
||||
await signupRequest(makeCtx(req("198.51.100.7", `${local}@allowed.com`)));
|
||||
}
|
||||
expect(email.send).toHaveBeenCalledTimes(3);
|
||||
|
||||
// IP B still gets through
|
||||
await signupRequest(makeCtx(req("198.51.100.8", "x@allowed.com")));
|
||||
expect(email.send).toHaveBeenCalledTimes(4);
|
||||
});
|
||||
});
|
||||
113
packages/core/tests/unit/astro/virtual-modules-sandbox.test.ts
Normal file
113
packages/core/tests/unit/astro/virtual-modules-sandbox.test.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { mkdtemp, rm, writeFile, mkdir } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { PluginDescriptor } from "../../../src/astro/integration/runtime.js";
|
||||
import { generateSandboxedPluginsModule } from "../../../src/astro/integration/virtual-modules.js";
|
||||
|
||||
function descriptor(overrides: Partial<PluginDescriptor> = {}): PluginDescriptor {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
entrypoint: "@test/plugin/sandbox",
|
||||
format: "standard",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
adminPages: [],
|
||||
adminWidgets: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("generateSandboxedPluginsModule", () => {
|
||||
let tmpDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tmpDir = await mkdtemp(join(tmpdir(), "emdash-vm-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
async function setupFakeProject(exportPath: string, content: string) {
|
||||
// Create a fake project root with package.json
|
||||
await writeFile(join(tmpDir, "package.json"), JSON.stringify({ name: "test-project" }));
|
||||
|
||||
// Create the plugin package inside node_modules
|
||||
const pluginDir = join(tmpDir, "node_modules", "@test", "plugin");
|
||||
await mkdir(pluginDir, { recursive: true });
|
||||
|
||||
// Determine the directory for the export file
|
||||
const fileParts = exportPath.split("/");
|
||||
if (fileParts.length > 1) {
|
||||
const dir = join(pluginDir, ...fileParts.slice(0, -1));
|
||||
await mkdir(dir, { recursive: true });
|
||||
}
|
||||
|
||||
await writeFile(join(pluginDir, exportPath), content);
|
||||
await writeFile(
|
||||
join(pluginDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "@test/plugin",
|
||||
exports: { "./sandbox": `./${exportPath}` },
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
it("returns empty module when no plugins configured", () => {
|
||||
const result = generateSandboxedPluginsModule([], tmpDir);
|
||||
expect(result).toContain("export const sandboxedPlugins = []");
|
||||
});
|
||||
|
||||
it("embeds pre-built JavaScript successfully", async () => {
|
||||
await setupFakeProject("dist/sandbox-entry.mjs", "export default { hooks: {} };");
|
||||
|
||||
const result = generateSandboxedPluginsModule(
|
||||
[descriptor({ entrypoint: "@test/plugin/sandbox" })],
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
expect(result).toContain("sandboxedPlugins");
|
||||
expect(result).toContain("test-plugin");
|
||||
expect(result).toContain("export default { hooks: {} };");
|
||||
});
|
||||
|
||||
it("throws for .ts source files", async () => {
|
||||
await setupFakeProject("src/sandbox-entry.ts", "export default {};");
|
||||
|
||||
expect(() =>
|
||||
generateSandboxedPluginsModule([descriptor({ entrypoint: "@test/plugin/sandbox" })], tmpDir),
|
||||
).toThrow(/unbuilt source/);
|
||||
});
|
||||
|
||||
it("throws for .tsx source files", async () => {
|
||||
await setupFakeProject("src/sandbox-entry.tsx", "export default {};");
|
||||
|
||||
expect(() =>
|
||||
generateSandboxedPluginsModule([descriptor({ entrypoint: "@test/plugin/sandbox" })], tmpDir),
|
||||
).toThrow(/unbuilt source/);
|
||||
});
|
||||
|
||||
it("throws for .mts source files", async () => {
|
||||
await setupFakeProject("src/sandbox-entry.mts", "export default {};");
|
||||
|
||||
expect(() =>
|
||||
generateSandboxedPluginsModule([descriptor({ entrypoint: "@test/plugin/sandbox" })], tmpDir),
|
||||
).toThrow(/unbuilt source/);
|
||||
});
|
||||
|
||||
it("includes plugin id in error message", async () => {
|
||||
await setupFakeProject("src/sandbox-entry.ts", "export default {};");
|
||||
|
||||
expect(() =>
|
||||
generateSandboxedPluginsModule(
|
||||
[descriptor({ id: "my-broken-plugin", entrypoint: "@test/plugin/sandbox" })],
|
||||
tmpDir,
|
||||
),
|
||||
).toThrow(/my-broken-plugin/);
|
||||
});
|
||||
});
|
||||
140
packages/core/tests/unit/astro/vite-config.test.ts
Normal file
140
packages/core/tests/unit/astro/vite-config.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import { basename } from "node:path";
|
||||
|
||||
import type { AstroConfig } from "astro";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { createViteConfig } from "../../../src/astro/integration/vite-config.js";
|
||||
|
||||
describe("createViteConfig admin aliasing", () => {
|
||||
const monorepoDemoRoot = new URL("../../../../../demos/simple/", import.meta.url);
|
||||
const externalProjectRoot = new URL("file:///workspace/emdash-site/");
|
||||
const siblingProjectRoot = new URL("../../../../../../emdash-site/", import.meta.url);
|
||||
const adminSourcePattern = /[/\\]packages[/\\]admin[/\\]src$/;
|
||||
const adminDistPattern = /[/\\]packages[/\\]admin[/\\]dist$/;
|
||||
|
||||
function buildConfig(root: URL, command: "dev" | "build" | "preview" | "sync" = "dev") {
|
||||
return createViteConfig(
|
||||
{
|
||||
serializableConfig: {},
|
||||
resolvedConfig: {} as never,
|
||||
pluginDescriptors: [],
|
||||
astroConfig: {
|
||||
root,
|
||||
adapter: { name: "@astrojs/node" },
|
||||
} as AstroConfig,
|
||||
},
|
||||
command,
|
||||
);
|
||||
}
|
||||
|
||||
function getAdminAliasReplacement(config: ReturnType<typeof createViteConfig>) {
|
||||
const aliases = Array.isArray(config.resolve?.alias) ? config.resolve.alias : [];
|
||||
const adminAlias = aliases.find(
|
||||
(alias) =>
|
||||
typeof alias === "object" &&
|
||||
alias !== null &&
|
||||
"find" in alias &&
|
||||
alias.find === "@emdash-cms/admin" &&
|
||||
"replacement" in alias,
|
||||
);
|
||||
|
||||
if (!adminAlias || typeof adminAlias.replacement !== "string") {
|
||||
throw new Error("Missing @emdash-cms/admin alias");
|
||||
}
|
||||
|
||||
return adminAlias.replacement;
|
||||
}
|
||||
|
||||
it("uses raw admin source for local monorepo dev", () => {
|
||||
const config = buildConfig(monorepoDemoRoot);
|
||||
const replacement = getAdminAliasReplacement(config);
|
||||
|
||||
expect(basename(replacement)).toBe("src");
|
||||
expect(replacement).toMatch(adminSourcePattern);
|
||||
});
|
||||
|
||||
it("uses built admin dist for external app dev", () => {
|
||||
const config = buildConfig(externalProjectRoot);
|
||||
const replacement = getAdminAliasReplacement(config);
|
||||
|
||||
expect(basename(replacement)).toBe("dist");
|
||||
expect(replacement).toMatch(adminDistPattern);
|
||||
});
|
||||
|
||||
it("uses built admin dist for sibling paths with a matching prefix", () => {
|
||||
const config = buildConfig(siblingProjectRoot);
|
||||
const replacement = getAdminAliasReplacement(config);
|
||||
|
||||
expect(basename(replacement)).toBe("dist");
|
||||
expect(replacement).toMatch(adminDistPattern);
|
||||
});
|
||||
|
||||
it("uses built admin dist outside dev", () => {
|
||||
const config = buildConfig(monorepoDemoRoot, "build");
|
||||
const replacement = getAdminAliasReplacement(config);
|
||||
|
||||
expect(basename(replacement)).toBe("dist");
|
||||
expect(replacement).toMatch(adminDistPattern);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createViteConfig use-sync-external-store shim aliasing", () => {
|
||||
const externalProjectRoot = new URL("file:///workspace/emdash-site/");
|
||||
|
||||
function buildConfig(adapter: string) {
|
||||
return createViteConfig(
|
||||
{
|
||||
serializableConfig: {},
|
||||
resolvedConfig: {} as never,
|
||||
pluginDescriptors: [],
|
||||
astroConfig: {
|
||||
root: externalProjectRoot,
|
||||
adapter: { name: adapter },
|
||||
} as AstroConfig,
|
||||
},
|
||||
"dev",
|
||||
);
|
||||
}
|
||||
|
||||
function getAlias(config: ReturnType<typeof createViteConfig>, find: string) {
|
||||
const aliases = Array.isArray(config.resolve?.alias) ? config.resolve.alias : [];
|
||||
return aliases.find(
|
||||
(alias) =>
|
||||
typeof alias === "object" && alias !== null && "find" in alias && alias.find === find,
|
||||
);
|
||||
}
|
||||
|
||||
// Regression: with pnpm + React 18+, @tiptap/react pulls in
|
||||
// `use-sync-external-store/shim` (CJS). Vite can't pre-bundle from the
|
||||
// virtual store, so browsers get raw CJS and InlinePortableTextEditor
|
||||
// fails to hydrate. The aliases redirect the shim to the main package,
|
||||
// which delegates to React's built-in hook on React >=18.
|
||||
for (const adapter of ["@astrojs/node", "@astrojs/cloudflare"] as const) {
|
||||
it(`redirects use-sync-external-store/shim to the main package on ${adapter}`, () => {
|
||||
const config = buildConfig(adapter);
|
||||
|
||||
const indexAlias = getAlias(config, "use-sync-external-store/shim/index.js");
|
||||
const shimAlias = getAlias(config, "use-sync-external-store/shim");
|
||||
|
||||
expect(indexAlias).toMatchObject({ replacement: "use-sync-external-store" });
|
||||
expect(shimAlias).toMatchObject({ replacement: "use-sync-external-store" });
|
||||
});
|
||||
|
||||
it(`lists the more-specific shim alias before the directory alias on ${adapter}`, () => {
|
||||
const config = buildConfig(adapter);
|
||||
const aliases = Array.isArray(config.resolve?.alias) ? config.resolve.alias : [];
|
||||
|
||||
const findIndex = (find: string) =>
|
||||
aliases.findIndex(
|
||||
(alias) =>
|
||||
typeof alias === "object" && alias !== null && "find" in alias && alias.find === find,
|
||||
);
|
||||
|
||||
const indexIdx = findIndex("use-sync-external-store/shim/index.js");
|
||||
const shimIdx = findIndex("use-sync-external-store/shim");
|
||||
|
||||
expect(indexIdx).toBeGreaterThanOrEqual(0);
|
||||
expect(shimIdx).toBeGreaterThan(indexIdx);
|
||||
});
|
||||
}
|
||||
});
|
||||
308
packages/core/tests/unit/auth/allowed-domains.test.ts
Normal file
308
packages/core/tests/unit/auth/allowed-domains.test.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
import type { AuthAdapter } from "@emdash-cms/auth";
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Allowed Domains Management", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("getAllowedDomains", () => {
|
||||
it("should return empty array when no domains exist", async () => {
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return all allowed domains", async () => {
|
||||
await adapter.createAllowedDomain("acme.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("partner.org", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("editors.net", Role.EDITOR);
|
||||
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
|
||||
expect(domains).toHaveLength(3);
|
||||
const domainNames = domains.map((d) => d.domain);
|
||||
expect(domainNames).toContain("acme.com");
|
||||
expect(domainNames).toContain("partner.org");
|
||||
expect(domainNames).toContain("editors.net");
|
||||
});
|
||||
|
||||
it("should include both enabled and disabled domains", async () => {
|
||||
await adapter.createAllowedDomain("enabled.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("disabled.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("disabled.com", false);
|
||||
|
||||
const domains = await adapter.getAllowedDomains();
|
||||
|
||||
expect(domains).toHaveLength(2);
|
||||
const enabled = domains.find((d) => d.domain === "enabled.com");
|
||||
const disabled = domains.find((d) => d.domain === "disabled.com");
|
||||
|
||||
expect(enabled?.enabled).toBe(true);
|
||||
expect(disabled?.enabled).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllowedDomain", () => {
|
||||
it("should return null for non-existent domain", async () => {
|
||||
const domain = await adapter.getAllowedDomain("nonexistent.com");
|
||||
expect(domain).toBeNull();
|
||||
});
|
||||
|
||||
it("should return domain with all properties", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.EDITOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("example.com");
|
||||
|
||||
expect(domain).not.toBeNull();
|
||||
expect(domain?.domain).toBe("example.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
expect(domain?.enabled).toBe(true);
|
||||
expect(domain?.createdAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for domain lookup (normalizes to lowercase)", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.AUTHOR);
|
||||
|
||||
// Lowercase should work
|
||||
const lower = await adapter.getAllowedDomain("example.com");
|
||||
expect(lower).not.toBeNull();
|
||||
|
||||
// Uppercase should also work (domains are normalized to lowercase)
|
||||
const upper = await adapter.getAllowedDomain("EXAMPLE.COM");
|
||||
expect(upper).not.toBeNull();
|
||||
expect(upper?.domain).toBe("example.com"); // stored as lowercase
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAllowedDomain", () => {
|
||||
it("should create a new allowed domain", async () => {
|
||||
const domain = await adapter.createAllowedDomain("newdomain.com", Role.AUTHOR);
|
||||
|
||||
expect(domain.domain).toBe("newdomain.com");
|
||||
expect(domain.defaultRole).toBe(Role.AUTHOR);
|
||||
expect(domain.enabled).toBe(true);
|
||||
expect(domain.createdAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should create domain with specified role", async () => {
|
||||
await adapter.createAllowedDomain("subscribers.com", Role.SUBSCRIBER);
|
||||
await adapter.createAllowedDomain("contributors.com", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("authors.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("editors.com", Role.EDITOR);
|
||||
await adapter.createAllowedDomain("admins.com", Role.ADMIN);
|
||||
|
||||
expect((await adapter.getAllowedDomain("subscribers.com"))?.defaultRole).toBe(
|
||||
Role.SUBSCRIBER,
|
||||
);
|
||||
expect((await adapter.getAllowedDomain("contributors.com"))?.defaultRole).toBe(
|
||||
Role.CONTRIBUTOR,
|
||||
);
|
||||
expect((await adapter.getAllowedDomain("authors.com"))?.defaultRole).toBe(Role.AUTHOR);
|
||||
expect((await adapter.getAllowedDomain("editors.com"))?.defaultRole).toBe(Role.EDITOR);
|
||||
expect((await adapter.getAllowedDomain("admins.com"))?.defaultRole).toBe(Role.ADMIN);
|
||||
});
|
||||
|
||||
it("should throw error for duplicate domain", async () => {
|
||||
await adapter.createAllowedDomain("duplicate.com", Role.AUTHOR);
|
||||
|
||||
await expect(adapter.createAllowedDomain("duplicate.com", Role.EDITOR)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should set enabled to true by default", async () => {
|
||||
const domain = await adapter.createAllowedDomain("enabled-default.com", Role.AUTHOR);
|
||||
expect(domain.enabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateAllowedDomain", () => {
|
||||
it("should toggle domain enabled status", async () => {
|
||||
await adapter.createAllowedDomain("toggle.com", Role.AUTHOR);
|
||||
|
||||
// Disable
|
||||
await adapter.updateAllowedDomain("toggle.com", false);
|
||||
let domain = await adapter.getAllowedDomain("toggle.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
|
||||
// Re-enable
|
||||
await adapter.updateAllowedDomain("toggle.com", true);
|
||||
domain = await adapter.getAllowedDomain("toggle.com");
|
||||
expect(domain?.enabled).toBe(true);
|
||||
});
|
||||
|
||||
it("should update default role", async () => {
|
||||
await adapter.createAllowedDomain("role-change.com", Role.AUTHOR);
|
||||
|
||||
await adapter.updateAllowedDomain("role-change.com", true, Role.EDITOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("role-change.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should update both enabled and role at once", async () => {
|
||||
await adapter.createAllowedDomain("both.com", Role.AUTHOR);
|
||||
|
||||
await adapter.updateAllowedDomain("both.com", false, Role.CONTRIBUTOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("both.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
expect(domain?.defaultRole).toBe(Role.CONTRIBUTOR);
|
||||
});
|
||||
|
||||
it("should preserve role when only updating enabled", async () => {
|
||||
await adapter.createAllowedDomain("preserve.com", Role.EDITOR);
|
||||
|
||||
await adapter.updateAllowedDomain("preserve.com", false);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("preserve.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should preserve createdAt when updating", async () => {
|
||||
const created = await adapter.createAllowedDomain("timestamp.com", Role.AUTHOR);
|
||||
const originalCreatedAt = created.createdAt;
|
||||
|
||||
// Small delay
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
await adapter.updateAllowedDomain("timestamp.com", false, Role.EDITOR);
|
||||
|
||||
const updated = await adapter.getAllowedDomain("timestamp.com");
|
||||
expect(updated?.createdAt.getTime()).toBe(originalCreatedAt.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteAllowedDomain", () => {
|
||||
it("should delete an existing domain", async () => {
|
||||
await adapter.createAllowedDomain("todelete.com", Role.AUTHOR);
|
||||
|
||||
await adapter.deleteAllowedDomain("todelete.com");
|
||||
|
||||
const domain = await adapter.getAllowedDomain("todelete.com");
|
||||
expect(domain).toBeNull();
|
||||
});
|
||||
|
||||
it("should not affect other domains", async () => {
|
||||
await adapter.createAllowedDomain("keep.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("delete.com", Role.AUTHOR);
|
||||
|
||||
await adapter.deleteAllowedDomain("delete.com");
|
||||
|
||||
const kept = await adapter.getAllowedDomain("keep.com");
|
||||
const deleted = await adapter.getAllowedDomain("delete.com");
|
||||
|
||||
expect(kept).not.toBeNull();
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
|
||||
it("should be idempotent (no error on non-existent)", async () => {
|
||||
// Deleting non-existent domain should not throw
|
||||
await expect(adapter.deleteAllowedDomain("nonexistent.com")).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Domain Management Flow", () => {
|
||||
it("should support full CRUD flow", async () => {
|
||||
// Create
|
||||
const created = await adapter.createAllowedDomain("company.com", Role.AUTHOR);
|
||||
expect(created.domain).toBe("company.com");
|
||||
expect(created.enabled).toBe(true);
|
||||
|
||||
// Read
|
||||
let domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.domain).toBe("company.com");
|
||||
|
||||
// Update - change role
|
||||
await adapter.updateAllowedDomain("company.com", true, Role.EDITOR);
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.defaultRole).toBe(Role.EDITOR);
|
||||
|
||||
// Update - disable
|
||||
await adapter.updateAllowedDomain("company.com", false);
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain?.enabled).toBe(false);
|
||||
|
||||
// List
|
||||
const all = await adapter.getAllowedDomains();
|
||||
expect(all).toHaveLength(1);
|
||||
|
||||
// Delete
|
||||
await adapter.deleteAllowedDomain("company.com");
|
||||
domain = await adapter.getAllowedDomain("company.com");
|
||||
expect(domain).toBeNull();
|
||||
|
||||
// List after delete
|
||||
const afterDelete = await adapter.getAllowedDomains();
|
||||
expect(afterDelete).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should handle multiple domains correctly", async () => {
|
||||
// Create multiple domains
|
||||
await adapter.createAllowedDomain("first.com", Role.SUBSCRIBER);
|
||||
await adapter.createAllowedDomain("second.com", Role.CONTRIBUTOR);
|
||||
await adapter.createAllowedDomain("third.com", Role.AUTHOR);
|
||||
|
||||
// Verify all exist
|
||||
let domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toHaveLength(3);
|
||||
|
||||
// Disable one
|
||||
await adapter.updateAllowedDomain("second.com", false);
|
||||
|
||||
// Delete another
|
||||
await adapter.deleteAllowedDomain("first.com");
|
||||
|
||||
// Verify state
|
||||
domains = await adapter.getAllowedDomains();
|
||||
expect(domains).toHaveLength(2);
|
||||
|
||||
const second = domains.find((d) => d.domain === "second.com");
|
||||
const third = domains.find((d) => d.domain === "third.com");
|
||||
|
||||
expect(second?.enabled).toBe(false);
|
||||
expect(third?.enabled).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Edge Cases", () => {
|
||||
it("should handle subdomains correctly", async () => {
|
||||
await adapter.createAllowedDomain("sub.domain.com", Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("sub.domain.com");
|
||||
expect(domain).not.toBeNull();
|
||||
|
||||
// Parent domain should not match
|
||||
const parent = await adapter.getAllowedDomain("domain.com");
|
||||
expect(parent).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle domains with hyphens", async () => {
|
||||
await adapter.createAllowedDomain("my-company.com", Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain("my-company.com");
|
||||
expect(domain?.domain).toBe("my-company.com");
|
||||
});
|
||||
|
||||
it("should handle long domain names", async () => {
|
||||
const longDomain = "very-long-subdomain.another-part.yet-another.example.com";
|
||||
await adapter.createAllowedDomain(longDomain, Role.AUTHOR);
|
||||
|
||||
const domain = await adapter.getAllowedDomain(longDomain);
|
||||
expect(domain?.domain).toBe(longDomain);
|
||||
});
|
||||
});
|
||||
});
|
||||
211
packages/core/tests/unit/auth/allowed-origins.test.ts
Normal file
211
packages/core/tests/unit/auth/allowed-origins.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { _resetEnvCache } from "../../../src/api/public-url.js";
|
||||
import {
|
||||
getConfiguredAllowedOrigins,
|
||||
validateAllowedOrigins,
|
||||
validateOriginShape,
|
||||
type TaggedOrigin,
|
||||
} from "../../../src/auth/allowed-origins.js";
|
||||
|
||||
const origEnvAllowed = process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
|
||||
beforeEach(() => {
|
||||
_resetEnvCache();
|
||||
delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
_resetEnvCache();
|
||||
if (origEnvAllowed === undefined) delete process.env.EMDASH_ALLOWED_ORIGINS;
|
||||
else process.env.EMDASH_ALLOWED_ORIGINS = origEnvAllowed;
|
||||
});
|
||||
|
||||
function tag(
|
||||
origin: string,
|
||||
source: TaggedOrigin["source"] = "config.allowedOrigins",
|
||||
): TaggedOrigin {
|
||||
return { origin, source };
|
||||
}
|
||||
|
||||
describe("getConfiguredAllowedOrigins()", () => {
|
||||
it("returns [] when neither config nor env supplies origins", () => {
|
||||
expect(getConfiguredAllowedOrigins(undefined)).toEqual([]);
|
||||
expect(getConfiguredAllowedOrigins({})).toEqual([]);
|
||||
});
|
||||
|
||||
it("tags config entries as config.allowedOrigins", () => {
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("tags env entries as EMDASH_ALLOWED_ORIGINS", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://preview.example.com";
|
||||
const tagged = getConfiguredAllowedOrigins({});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "EMDASH_ALLOWED_ORIGINS" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("merges config first, then env (config wins on dedupe by tag-of-first-occurrence)", () => {
|
||||
process.env.EMDASH_ALLOWED_ORIGINS = "https://staging.example.com";
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
{ origin: "https://staging.example.com", source: "EMDASH_ALLOWED_ORIGINS" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("filters falsy config entries", () => {
|
||||
const tagged = getConfiguredAllowedOrigins({
|
||||
allowedOrigins: ["", "https://preview.example.com"],
|
||||
});
|
||||
expect(tagged).toEqual([
|
||||
{ origin: "https://preview.example.com", source: "config.allowedOrigins" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateOriginShape()", () => {
|
||||
it("returns [] for empty input", () => {
|
||||
expect(validateOriginShape([])).toEqual([]);
|
||||
});
|
||||
|
||||
it("normalizes to URL.origin form (path/query stripped)", () => {
|
||||
expect(validateOriginShape([tag("https://example.com/admin?x=1")])).toEqual([
|
||||
"https://example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("dedupes duplicate origins", () => {
|
||||
expect(validateOriginShape([tag("https://example.com"), tag("https://example.com/x")])).toEqual(
|
||||
["https://example.com"],
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects unparseable URLs with source attribution", () => {
|
||||
expect(() => validateOriginShape([tag("not-a-url")])).toThrow(
|
||||
/EmDash config error in config\.allowedOrigins:.*invalid URL/,
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects non-http(s) protocols", () => {
|
||||
expect(() => validateOriginShape([tag("ftp://example.com", "EMDASH_ALLOWED_ORIGINS")])).toThrow(
|
||||
/EmDash config error in EMDASH_ALLOWED_ORIGINS:.*must be http or https.*ftp:/,
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects hostnames with trailing dots", () => {
|
||||
expect(() => validateOriginShape([tag("https://example.com.")])).toThrow(/trailing dot/);
|
||||
});
|
||||
|
||||
it("rejects hostnames with empty labels", () => {
|
||||
// "foo..example.com" parses with hostname "foo..example.com"
|
||||
expect(() => validateOriginShape([tag("https://foo..example.com")])).toThrow(/empty labels/);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateAllowedOrigins() — Rule A and Rule B", () => {
|
||||
it("returns [] when input is empty (no Rule A check fires)", () => {
|
||||
expect(validateAllowedOrigins(undefined, [])).toEqual([]);
|
||||
expect(validateAllowedOrigins("https://example.com", [])).toEqual([]);
|
||||
});
|
||||
|
||||
it("throws Rule A when origins are non-empty but siteUrl is missing", () => {
|
||||
expect(() => validateAllowedOrigins(undefined, [tag("https://preview.example.com")])).toThrow(
|
||||
/allowedOrigins is set.*but siteUrl is not/,
|
||||
);
|
||||
});
|
||||
|
||||
it("accepts an exact-hostname-match entry (apex listed alongside apex siteUrl)", () => {
|
||||
expect(validateAllowedOrigins("https://example.com", [tag("https://example.com")])).toEqual([
|
||||
"https://example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("accepts a true subdomain", () => {
|
||||
expect(
|
||||
validateAllowedOrigins("https://example.com", [tag("https://preview.example.com")]),
|
||||
).toEqual(["https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("rejects a sibling/unrelated domain", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://other-site.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects a suffix-attacker (example.com.evil.com)", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://example.com.evil.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects a prefix-attacker (fakeexample.com)", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [tag("https://fakeexample.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects apex when siteHost is itself a subdomain", () => {
|
||||
// rpId would be app.example.com — the browser refuses apex assertions for it
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://app.example.com", [tag("https://example.com")]),
|
||||
).toThrow(/not a subdomain of siteUrl/);
|
||||
});
|
||||
|
||||
it("rejects siteUrl with a trailing-dot hostname when allowedOrigins is non-empty", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com.", [tag("https://preview.example.com")]),
|
||||
).toThrow(/trailing-dot hostname.*Remove the trailing dot/);
|
||||
});
|
||||
|
||||
it("rejects IP-literal siteUrl (IPv4) when allowedOrigins is non-empty", () => {
|
||||
// IP-literal check fires before Rule B in the validator, so the entry shape
|
||||
// itself doesn't need to relate to the IP — any parseable origin triggers it.
|
||||
expect(() =>
|
||||
validateAllowedOrigins("http://127.0.0.1:4321", [tag("https://preview.example.com")]),
|
||||
).toThrow(/IP-literal hostname/);
|
||||
});
|
||||
|
||||
it("rejects IP-literal siteUrl (IPv6) when allowedOrigins is non-empty", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("http://[::1]:4321", [tag("http://x.example.com")]),
|
||||
).toThrow(/IP-literal hostname/);
|
||||
});
|
||||
|
||||
it("allows IP-literal siteUrl when allowedOrigins is empty (single-origin dev)", () => {
|
||||
expect(validateAllowedOrigins("http://127.0.0.1:4321", [])).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts mixed config + env tagged origins", () => {
|
||||
const result = validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://staging.example.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]);
|
||||
expect(result).toEqual(["https://preview.example.com", "https://staging.example.com"]);
|
||||
});
|
||||
|
||||
it("attributes Rule B errors to the source of the offending entry", () => {
|
||||
expect(() =>
|
||||
validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://other-site.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]),
|
||||
).toThrow(/EmDash config error in EMDASH_ALLOWED_ORIGINS.*not a subdomain/);
|
||||
});
|
||||
|
||||
it("dedupes when config and env list the same origin", () => {
|
||||
const result = validateAllowedOrigins("https://example.com", [
|
||||
tag("https://preview.example.com", "config.allowedOrigins"),
|
||||
tag("https://preview.example.com", "EMDASH_ALLOWED_ORIGINS"),
|
||||
]);
|
||||
expect(result).toEqual(["https://preview.example.com"]);
|
||||
});
|
||||
});
|
||||
224
packages/core/tests/unit/auth/api-tokens.test.ts
Normal file
224
packages/core/tests/unit/auth/api-tokens.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Unit tests for API token generation, hashing, and scope utilities.
|
||||
*/
|
||||
|
||||
import { Role, scopesForRole, clampScopes } from "@emdash-cms/auth";
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
generatePrefixedToken,
|
||||
hashApiToken,
|
||||
validateScopes,
|
||||
hasScope,
|
||||
TOKEN_PREFIXES,
|
||||
VALID_SCOPES,
|
||||
} from "../../../src/auth/api-tokens.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const PAT_PREFIX_REGEX = /^ec_pat_/;
|
||||
const OAUTH_ACCESS_PREFIX_REGEX = /^ec_oat_/;
|
||||
const OAUTH_REFRESH_PREFIX_REGEX = /^ec_ort_/;
|
||||
const BASE64URL_INVALID_CHARS_REGEX = /[+/=]/;
|
||||
const BASE64URL_VALID_REGEX = /^[A-Za-z0-9_-]+$/;
|
||||
|
||||
describe("generatePrefixedToken", () => {
|
||||
it("generates a PAT with ec_pat_ prefix", () => {
|
||||
const { raw, hash, prefix } = generatePrefixedToken(TOKEN_PREFIXES.PAT);
|
||||
|
||||
expect(raw).toMatch(PAT_PREFIX_REGEX);
|
||||
expect(raw.length).toBeGreaterThan(20);
|
||||
expect(hash).toBeTruthy();
|
||||
expect(hash).not.toBe(raw);
|
||||
expect(prefix).toMatch(PAT_PREFIX_REGEX);
|
||||
expect(prefix.length).toBe(TOKEN_PREFIXES.PAT.length + 4);
|
||||
});
|
||||
|
||||
it("generates an OAuth access token with ec_oat_ prefix", () => {
|
||||
const { raw } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
|
||||
expect(raw).toMatch(OAUTH_ACCESS_PREFIX_REGEX);
|
||||
});
|
||||
|
||||
it("generates an OAuth refresh token with ec_ort_ prefix", () => {
|
||||
const { raw } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
|
||||
expect(raw).toMatch(OAUTH_REFRESH_PREFIX_REGEX);
|
||||
});
|
||||
|
||||
it("generates unique tokens each time", () => {
|
||||
const tokens = new Set<string>();
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const { raw } = generatePrefixedToken("ec_pat_");
|
||||
tokens.add(raw);
|
||||
}
|
||||
expect(tokens.size).toBe(50);
|
||||
});
|
||||
|
||||
it("generates unique hashes for different tokens", () => {
|
||||
const { hash: hash1 } = generatePrefixedToken("ec_pat_");
|
||||
const { hash: hash2 } = generatePrefixedToken("ec_pat_");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hashApiToken", () => {
|
||||
it("produces a deterministic hash", () => {
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_pat_abc123");
|
||||
expect(hash1).toBe(hash2);
|
||||
});
|
||||
|
||||
it("produces different hashes for different tokens", () => {
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_pat_def456");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it("hashes the full prefixed token", () => {
|
||||
// Same suffix but different prefix should produce different hashes
|
||||
const hash1 = hashApiToken("ec_pat_abc123");
|
||||
const hash2 = hashApiToken("ec_oat_abc123");
|
||||
expect(hash1).not.toBe(hash2);
|
||||
});
|
||||
|
||||
it("produces URL-safe base64 output", () => {
|
||||
const hash = hashApiToken("ec_pat_test");
|
||||
// Should not contain +, /, or = (standard base64 chars)
|
||||
expect(hash).not.toMatch(BASE64URL_INVALID_CHARS_REGEX);
|
||||
// Should only contain base64url chars
|
||||
expect(hash).toMatch(BASE64URL_VALID_REGEX);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateScopes", () => {
|
||||
it("returns empty array for valid scopes", () => {
|
||||
const invalid = validateScopes(["content:read", "media:write"]);
|
||||
expect(invalid).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns invalid scopes", () => {
|
||||
const invalid = validateScopes(["content:read", "invalid:scope", "admin"]);
|
||||
expect(invalid).toEqual(["invalid:scope"]);
|
||||
});
|
||||
|
||||
it("handles empty array", () => {
|
||||
expect(validateScopes([])).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts all valid scopes", () => {
|
||||
const invalid = validateScopes([...VALID_SCOPES]);
|
||||
expect(invalid).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasScope", () => {
|
||||
it("returns true when scope is present", () => {
|
||||
expect(hasScope(["content:read", "media:write"], "content:read")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false when scope is missing", () => {
|
||||
expect(hasScope(["content:read"], "content:write")).toBe(false);
|
||||
});
|
||||
|
||||
it("admin scope grants access to everything", () => {
|
||||
expect(hasScope(["admin"], "content:read")).toBe(true);
|
||||
expect(hasScope(["admin"], "schema:write")).toBe(true);
|
||||
expect(hasScope(["admin"], "media:write")).toBe(true);
|
||||
});
|
||||
|
||||
it("handles empty scopes", () => {
|
||||
expect(hasScope([], "content:read")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// scopesForRole — maps roles to maximum allowed scopes
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("scopesForRole", () => {
|
||||
it("SUBSCRIBER gets only read scopes for content and media", () => {
|
||||
const scopes = scopesForRole(Role.SUBSCRIBER);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).not.toContain("content:write");
|
||||
expect(scopes).not.toContain("media:write");
|
||||
expect(scopes).not.toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("CONTRIBUTOR gets content and media read/write", () => {
|
||||
const scopes = scopesForRole(Role.CONTRIBUTOR);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).not.toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("EDITOR gets content, media, and schema:read", () => {
|
||||
const scopes = scopesForRole(Role.EDITOR);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).toContain("schema:read");
|
||||
expect(scopes).not.toContain("schema:write");
|
||||
expect(scopes).not.toContain("admin");
|
||||
});
|
||||
|
||||
it("ADMIN gets all scopes including admin and schema:write", () => {
|
||||
const scopes = scopesForRole(Role.ADMIN);
|
||||
expect(scopes).toContain("content:read");
|
||||
expect(scopes).toContain("content:write");
|
||||
expect(scopes).toContain("media:read");
|
||||
expect(scopes).toContain("media:write");
|
||||
expect(scopes).toContain("schema:read");
|
||||
expect(scopes).toContain("schema:write");
|
||||
expect(scopes).toContain("admin");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// clampScopes — intersects requested scopes with role-allowed scopes
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("clampScopes", () => {
|
||||
it("strips admin scope from non-admin role", () => {
|
||||
const result = clampScopes(["content:read", "admin"], Role.CONTRIBUTOR);
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
|
||||
it("strips schema:write from editor role", () => {
|
||||
const result = clampScopes(["schema:read", "schema:write"], Role.EDITOR);
|
||||
expect(result).toEqual(["schema:read"]);
|
||||
});
|
||||
|
||||
it("preserves all scopes for admin role", () => {
|
||||
const all = [
|
||||
"content:read",
|
||||
"content:write",
|
||||
"media:read",
|
||||
"media:write",
|
||||
"schema:read",
|
||||
"schema:write",
|
||||
"admin",
|
||||
];
|
||||
const result = clampScopes(all, Role.ADMIN);
|
||||
expect(result).toEqual(all);
|
||||
});
|
||||
|
||||
it("returns empty array when no scopes survive clamping", () => {
|
||||
const result = clampScopes(["admin", "schema:write"], Role.SUBSCRIBER);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles empty input", () => {
|
||||
expect(clampScopes([], Role.ADMIN)).toEqual([]);
|
||||
});
|
||||
|
||||
it("strips schema:read from contributor role", () => {
|
||||
const result = clampScopes(["content:read", "schema:read"], Role.CONTRIBUTOR);
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
});
|
||||
214
packages/core/tests/unit/auth/challenge-store.test.ts
Normal file
214
packages/core/tests/unit/auth/challenge-store.test.ts
Normal file
@@ -0,0 +1,214 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, vi, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
createChallengeStore,
|
||||
cleanupExpiredChallenges,
|
||||
} from "../../../src/auth/challenge-store.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("ChallengeStore", () => {
|
||||
let db: Kysely<Database>;
|
||||
let store: ReturnType<typeof createChallengeStore>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
store = createChallengeStore(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
describe("set()", () => {
|
||||
it("stores challenge with expiry", async () => {
|
||||
const challenge = "test-challenge-123";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-1",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.type).toBe("registration");
|
||||
expect(result?.userId).toBe("user-1");
|
||||
expect(result?.expiresAt).toBe(expiresAt);
|
||||
});
|
||||
|
||||
it("stores challenge without userId", async () => {
|
||||
const challenge = "auth-challenge-456";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.type).toBe("authentication");
|
||||
expect(result?.userId).toBeUndefined();
|
||||
});
|
||||
|
||||
it("updates existing challenge on conflict", async () => {
|
||||
const challenge = "update-test";
|
||||
const expiresAt1 = Date.now() + 5 * 60 * 1000;
|
||||
const expiresAt2 = Date.now() + 10 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-1",
|
||||
expiresAt: expiresAt1,
|
||||
});
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
userId: "user-2",
|
||||
expiresAt: expiresAt2,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result?.type).toBe("authentication");
|
||||
expect(result?.userId).toBe("user-2");
|
||||
expect(result?.expiresAt).toBe(expiresAt2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("get()", () => {
|
||||
it("returns stored challenge", async () => {
|
||||
const challenge = "get-test";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
userId: "user-abc",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).toEqual({
|
||||
type: "registration",
|
||||
userId: "user-abc",
|
||||
expiresAt,
|
||||
});
|
||||
});
|
||||
|
||||
it("returns null for non-existent challenge", async () => {
|
||||
const result = await store.get("does-not-exist");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for expired challenges and deletes them", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const challenge = "expired-test";
|
||||
const expiresAt = Date.now() + 60 * 1000; // 1 minute
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "registration",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
// Advance time past expiry
|
||||
vi.advanceTimersByTime(61 * 1000);
|
||||
|
||||
const result = await store.get(challenge);
|
||||
expect(result).toBeNull();
|
||||
|
||||
// Verify it was deleted
|
||||
vi.useRealTimers();
|
||||
const afterDelete = await db
|
||||
.selectFrom("auth_challenges")
|
||||
.selectAll()
|
||||
.where("challenge", "=", challenge)
|
||||
.executeTakeFirst();
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete()", () => {
|
||||
it("removes challenge", async () => {
|
||||
const challenge = "delete-test";
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000;
|
||||
|
||||
await store.set(challenge, {
|
||||
type: "authentication",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
// Verify it exists
|
||||
const before = await store.get(challenge);
|
||||
expect(before).not.toBeNull();
|
||||
|
||||
// Delete it
|
||||
await store.delete(challenge);
|
||||
|
||||
// Verify it's gone
|
||||
const after = await store.get(challenge);
|
||||
expect(after).toBeNull();
|
||||
});
|
||||
|
||||
it("does not throw when deleting non-existent challenge", async () => {
|
||||
await expect(store.delete("non-existent")).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("cleanupExpiredChallenges()", () => {
|
||||
it("removes only expired entries", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
// Create some challenges with different expiry times
|
||||
await store.set("expired-1", {
|
||||
type: "registration",
|
||||
expiresAt: now + 30 * 1000, // expires in 30s
|
||||
});
|
||||
await store.set("expired-2", {
|
||||
type: "authentication",
|
||||
expiresAt: now + 60 * 1000, // expires in 60s
|
||||
});
|
||||
await store.set("valid-1", {
|
||||
type: "registration",
|
||||
expiresAt: now + 5 * 60 * 1000, // expires in 5 minutes
|
||||
});
|
||||
await store.set("valid-2", {
|
||||
type: "authentication",
|
||||
expiresAt: now + 10 * 60 * 1000, // expires in 10 minutes
|
||||
});
|
||||
|
||||
// Advance time by 90 seconds (past first two, but not last two)
|
||||
vi.advanceTimersByTime(90 * 1000);
|
||||
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(2);
|
||||
|
||||
// Verify only valid ones remain
|
||||
vi.useRealTimers();
|
||||
const remaining = await db.selectFrom("auth_challenges").select("challenge").execute();
|
||||
|
||||
expect(remaining.map((r) => r.challenge).toSorted()).toEqual(["valid-1", "valid-2"]);
|
||||
});
|
||||
|
||||
it("returns 0 when no expired challenges", async () => {
|
||||
const expiresAt = Date.now() + 10 * 60 * 1000;
|
||||
|
||||
await store.set("valid", {
|
||||
type: "registration",
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(0);
|
||||
});
|
||||
|
||||
it("handles empty table", async () => {
|
||||
const deleted = await cleanupExpiredChallenges(db);
|
||||
expect(deleted).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
124
packages/core/tests/unit/auth/discovery-endpoints.test.ts
Normal file
124
packages/core/tests/unit/auth/discovery-endpoints.test.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
/**
|
||||
* Unit tests for OAuth discovery endpoint response shapes.
|
||||
*
|
||||
* These endpoints are public, unauthenticated, and return JSON metadata
|
||||
* that MCP clients use to discover OAuth endpoints. The response shapes
|
||||
* are contractual — changing them breaks MCP client compatibility.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { GET as getAuthorizationServer } from "../../../src/astro/routes/api/well-known/oauth-authorization-server.js";
|
||||
// We import the GET handlers directly — they're plain functions that take
|
||||
// an Astro-like context and return a Response.
|
||||
import { GET as getProtectedResource } from "../../../src/astro/routes/api/well-known/oauth-protected-resource.js";
|
||||
import { VALID_SCOPES } from "../../../src/auth/api-tokens.js";
|
||||
|
||||
/** Minimal mock of what the route handlers actually use from the Astro context. */
|
||||
function mockContext(origin = "https://example.com") {
|
||||
return {
|
||||
url: new URL("/.well-known/test", origin),
|
||||
locals: { emdash: undefined },
|
||||
} as unknown as Parameters<typeof getProtectedResource>[0];
|
||||
}
|
||||
|
||||
describe("Protected Resource Metadata (RFC 9728)", () => {
|
||||
it("returns correct resource and authorization_servers", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.resource).toBe("https://example.com/_emdash/api/mcp");
|
||||
expect(body.authorization_servers).toEqual(["https://example.com/_emdash"]);
|
||||
});
|
||||
|
||||
it("includes all valid scopes", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
const body = (await response.json()) as { scopes_supported: string[] };
|
||||
expect(body.scopes_supported).toEqual([...VALID_SCOPES]);
|
||||
});
|
||||
|
||||
it("advertises header-based bearer method", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
const body = (await response.json()) as { bearer_methods_supported: string[] };
|
||||
expect(body.bearer_methods_supported).toEqual(["header"]);
|
||||
});
|
||||
|
||||
it("sets CORS and cache headers", async () => {
|
||||
const response = await getProtectedResource(mockContext());
|
||||
expect(response.headers.get("Access-Control-Allow-Origin")).toBe("*");
|
||||
expect(response.headers.get("Cache-Control")).toContain("public");
|
||||
});
|
||||
|
||||
it("uses the request origin for URLs", async () => {
|
||||
const response = await getProtectedResource(mockContext("https://cms.mysite.com"));
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.resource).toBe("https://cms.mysite.com/_emdash/api/mcp");
|
||||
expect(body.authorization_servers).toEqual(["https://cms.mysite.com/_emdash"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Authorization Server Metadata (RFC 8414)", () => {
|
||||
it("returns correct issuer and endpoints", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.issuer).toBe("https://example.com/_emdash");
|
||||
expect(body.authorization_endpoint).toBe("https://example.com/_emdash/oauth/authorize");
|
||||
expect(body.token_endpoint).toBe("https://example.com/_emdash/api/oauth/token");
|
||||
expect(body.device_authorization_endpoint).toBe(
|
||||
"https://example.com/_emdash/api/oauth/device/code",
|
||||
);
|
||||
});
|
||||
|
||||
it("supports authorization_code, refresh_token, and device_code grants", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { grant_types_supported: string[] };
|
||||
expect(body.grant_types_supported).toContain("authorization_code");
|
||||
expect(body.grant_types_supported).toContain("refresh_token");
|
||||
expect(body.grant_types_supported).toContain("urn:ietf:params:oauth:grant-type:device_code");
|
||||
});
|
||||
|
||||
it("requires S256 code challenge method only", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { code_challenge_methods_supported: string[] };
|
||||
expect(body.code_challenge_methods_supported).toEqual(["S256"]);
|
||||
});
|
||||
|
||||
it("only supports code response type", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { response_types_supported: string[] };
|
||||
expect(body.response_types_supported).toEqual(["code"]);
|
||||
});
|
||||
|
||||
it("supports public clients (no auth method)", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { token_endpoint_auth_methods_supported: string[] };
|
||||
expect(body.token_endpoint_auth_methods_supported).toEqual(["none"]);
|
||||
});
|
||||
|
||||
it("advertises dynamic client registration", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { registration_endpoint: string };
|
||||
expect(body.registration_endpoint).toBe("https://example.com/_emdash/api/oauth/register");
|
||||
});
|
||||
|
||||
it("includes all valid scopes", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as { scopes_supported: string[] };
|
||||
expect(body.scopes_supported).toEqual([...VALID_SCOPES]);
|
||||
});
|
||||
|
||||
it("sets CORS and cache headers", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
expect(response.headers.get("Access-Control-Allow-Origin")).toBe("*");
|
||||
expect(response.headers.get("Cache-Control")).toContain("public");
|
||||
});
|
||||
|
||||
it("does not advertise unsupported client_id metadata documents", async () => {
|
||||
const response = await getAuthorizationServer(mockContext());
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body).not.toHaveProperty("client_id_metadata_document_supported");
|
||||
});
|
||||
});
|
||||
322
packages/core/tests/unit/auth/invite.test.ts
Normal file
322
packages/core/tests/unit/auth/invite.test.ts
Normal file
@@ -0,0 +1,322 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import {
|
||||
Role,
|
||||
createInvite,
|
||||
createInviteToken,
|
||||
validateInvite,
|
||||
completeInvite,
|
||||
InviteError,
|
||||
escapeHtml,
|
||||
generateToken,
|
||||
} from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const TOKEN_PARAM_REGEX = /token=/;
|
||||
const TOKEN_EXTRACT_REGEX = /token=([a-zA-Z0-9_-]+)/;
|
||||
|
||||
describe("Invite", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let adminId: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
|
||||
// Create an admin user (required for the invitedBy FK)
|
||||
const admin = await adapter.createUser({
|
||||
email: "admin@example.com",
|
||||
name: "Admin",
|
||||
role: Role.ADMIN,
|
||||
emailVerified: true,
|
||||
});
|
||||
adminId = admin.id;
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("createInviteToken", () => {
|
||||
it("should create a token and return url + email", async () => {
|
||||
const result = await createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"new@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.email).toBe("new@example.com");
|
||||
expect(result.url).toContain("https://example.com");
|
||||
expect(result.url).toContain("/admin/invite/accept?token=");
|
||||
expect(result.url).toMatch(TOKEN_PARAM_REGEX);
|
||||
// Should NOT have a token field on the result
|
||||
expect("token" in result).toBe(false);
|
||||
});
|
||||
|
||||
it("should preserve baseUrl path prefix in invite URL", async () => {
|
||||
const result = await createInviteToken(
|
||||
{ baseUrl: "https://example.com/_emdash" },
|
||||
adapter,
|
||||
"path@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.url).toContain("https://example.com/_emdash/admin/invite/accept");
|
||||
});
|
||||
|
||||
it("should throw user_exists if email is already registered", async () => {
|
||||
await adapter.createUser({
|
||||
email: "existing@example.com",
|
||||
name: "Existing",
|
||||
role: Role.AUTHOR,
|
||||
emailVerified: true,
|
||||
});
|
||||
|
||||
await expect(
|
||||
createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"existing@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
),
|
||||
).rejects.toThrow(InviteError);
|
||||
|
||||
try {
|
||||
await createInviteToken(
|
||||
{ baseUrl: "https://example.com" },
|
||||
adapter,
|
||||
"existing@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InviteError);
|
||||
expect((error as InviteError).code).toBe("user_exists");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("createInvite", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(() => {
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
it("should send email when email sender is provided", async () => {
|
||||
const result = await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"invite@example.com",
|
||||
Role.EDITOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledOnce();
|
||||
expect(sentEmails).toHaveLength(1);
|
||||
expect(sentEmails[0]!.to).toBe("invite@example.com");
|
||||
expect(sentEmails[0]!.subject).toContain("Test Site");
|
||||
expect(sentEmails[0]!.html).toContain("Accept Invite");
|
||||
expect(sentEmails[0]!.text).toContain(result.url);
|
||||
});
|
||||
|
||||
it("should return url without sending email when no sender", async () => {
|
||||
const result = await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
// No email sender — copy-link fallback
|
||||
},
|
||||
adapter,
|
||||
"noemail@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(result.url).toContain("https://example.com");
|
||||
expect(result.url).toMatch(TOKEN_PARAM_REGEX);
|
||||
expect(result.email).toBe("noemail@example.com");
|
||||
});
|
||||
|
||||
it("should HTML-escape siteName in email HTML body", async () => {
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: '<script>alert("xss")</script>',
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"xss@example.com",
|
||||
Role.AUTHOR,
|
||||
adminId,
|
||||
);
|
||||
|
||||
expect(sentEmails).toHaveLength(1);
|
||||
const html = sentEmails[0]!.html!;
|
||||
// HTML body should be escaped
|
||||
expect(html).not.toContain("<script>");
|
||||
expect(html).toContain("<script>");
|
||||
// Plain text subject should NOT be escaped (it's not HTML)
|
||||
expect(sentEmails[0]!.subject).toContain("<script>");
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateInvite", () => {
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
});
|
||||
|
||||
async function createTestInvite(email: string, role: number = Role.AUTHOR): Promise<string> {
|
||||
const mockSend = vi.fn(async (msg: EmailMessage) => {
|
||||
const match = msg.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test",
|
||||
email: mockSend,
|
||||
},
|
||||
adapter,
|
||||
email,
|
||||
role,
|
||||
adminId,
|
||||
);
|
||||
|
||||
if (!capturedToken) throw new Error("Token not captured from email");
|
||||
return capturedToken;
|
||||
}
|
||||
|
||||
it("should validate a valid token and return email + role", async () => {
|
||||
const token = await createTestInvite("valid@example.com", Role.EDITOR);
|
||||
|
||||
const result = await validateInvite(adapter, token);
|
||||
|
||||
expect(result.email).toBe("valid@example.com");
|
||||
expect(result.role).toBe(Role.EDITOR);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for a nonexistent token", async () => {
|
||||
// Use a valid base64url token that doesn't exist in the DB
|
||||
const fakeToken = generateToken();
|
||||
|
||||
await expect(validateInvite(adapter, fakeToken)).rejects.toThrow(InviteError);
|
||||
|
||||
try {
|
||||
await validateInvite(adapter, fakeToken);
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InviteError);
|
||||
expect((error as InviteError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw invalid_token for an already-used token", async () => {
|
||||
const token = await createTestInvite("used@example.com");
|
||||
|
||||
// Complete the invite (consumes the token)
|
||||
await completeInvite(adapter, token, { name: "Used User" });
|
||||
|
||||
// Token should now be invalid
|
||||
await expect(validateInvite(adapter, token)).rejects.toThrow(InviteError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("completeInvite", () => {
|
||||
async function createTestInvite(email: string, role: number = Role.AUTHOR): Promise<string> {
|
||||
let token: string | null = null;
|
||||
const mockSend = vi.fn(async (msg: EmailMessage) => {
|
||||
const match = msg.text.match(TOKEN_EXTRACT_REGEX);
|
||||
token = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
|
||||
await createInvite(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test",
|
||||
email: mockSend,
|
||||
},
|
||||
adapter,
|
||||
email,
|
||||
role,
|
||||
adminId,
|
||||
);
|
||||
|
||||
if (!token) throw new Error("Token not captured from email");
|
||||
return token;
|
||||
}
|
||||
|
||||
it("should create user with correct email and role", async () => {
|
||||
const token = await createTestInvite("new@example.com", Role.EDITOR);
|
||||
|
||||
const user = await completeInvite(adapter, token, { name: "New User" });
|
||||
|
||||
expect(user.email).toBe("new@example.com");
|
||||
expect(user.role).toBe(Role.EDITOR);
|
||||
expect(user.name).toBe("New User");
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
|
||||
it("should delete token after use (single-use)", async () => {
|
||||
const token = await createTestInvite("oneuse@example.com");
|
||||
|
||||
await completeInvite(adapter, token, { name: "One Use" });
|
||||
|
||||
// Second use should fail
|
||||
await expect(completeInvite(adapter, token, { name: "Second Use" })).rejects.toThrow(
|
||||
InviteError,
|
||||
);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for nonexistent token", async () => {
|
||||
const fakeToken = generateToken();
|
||||
|
||||
await expect(completeInvite(adapter, fakeToken, { name: "Fake" })).rejects.toThrow(
|
||||
InviteError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("escapeHtml", () => {
|
||||
it("should escape angle brackets", () => {
|
||||
expect(escapeHtml("<script>")).toBe("<script>");
|
||||
});
|
||||
|
||||
it("should escape ampersands", () => {
|
||||
expect(escapeHtml("a & b")).toBe("a & b");
|
||||
});
|
||||
|
||||
it("should escape double quotes", () => {
|
||||
expect(escapeHtml('"hello"')).toBe(""hello"");
|
||||
});
|
||||
|
||||
it("should handle strings with no special characters", () => {
|
||||
expect(escapeHtml("My Site")).toBe("My Site");
|
||||
});
|
||||
|
||||
it("should handle empty string", () => {
|
||||
expect(escapeHtml("")).toBe("");
|
||||
});
|
||||
});
|
||||
});
|
||||
53
packages/core/tests/unit/auth/magic-link.test.ts
Normal file
53
packages/core/tests/unit/auth/magic-link.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import { Role, sendMagicLink } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Magic Link", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("sends verify links through the injected EmDash auth route", async () => {
|
||||
await adapter.createUser({
|
||||
email: "author@example.com",
|
||||
name: "Author",
|
||||
role: Role.AUTHOR,
|
||||
emailVerified: true,
|
||||
});
|
||||
|
||||
await sendMagicLink(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
siteName: "Test Site",
|
||||
email: mockEmailSend,
|
||||
},
|
||||
adapter,
|
||||
"author@example.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledOnce();
|
||||
expect(sentEmails[0]!.text).toContain(
|
||||
"https://example.com/_emdash/api/auth/magic-link/verify?token=",
|
||||
);
|
||||
});
|
||||
});
|
||||
188
packages/core/tests/unit/auth/mcp-discovery-post.test.ts
Normal file
188
packages/core/tests/unit/auth/mcp-discovery-post.test.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("virtual:emdash/auth", () => ({ authenticate: vi.fn() }));
|
||||
vi.mock("virtual:emdash/config", () => ({ default: {} }));
|
||||
vi.mock("astro:middleware", () => ({
|
||||
defineMiddleware: (handler: unknown) => handler,
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth", () => ({
|
||||
TOKEN_PREFIXES: {},
|
||||
generatePrefixedToken: vi.fn(),
|
||||
hashPrefixedToken: vi.fn(),
|
||||
VALID_SCOPES: [],
|
||||
validateScopes: vi.fn(),
|
||||
hasScope: vi.fn(() => false),
|
||||
computeS256Challenge: vi.fn(),
|
||||
Role: { ADMIN: 50 },
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth/adapters/kysely", () => ({
|
||||
createKyselyAdapter: vi.fn(() => ({
|
||||
getUserById: vi.fn(async (id: string) => ({
|
||||
id,
|
||||
email: "admin@test.com",
|
||||
name: "Admin",
|
||||
role: 50,
|
||||
disabled: 0,
|
||||
})),
|
||||
getUserByEmail: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
type AuthMiddlewareModule = typeof import("../../../src/astro/middleware/auth.js");
|
||||
|
||||
let onRequest: AuthMiddlewareModule["onRequest"];
|
||||
|
||||
beforeAll(async () => {
|
||||
({ onRequest } = await import("../../../src/astro/middleware/auth.js"));
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
async function runAuthMiddleware(opts: {
|
||||
pathname: string;
|
||||
method?: string;
|
||||
headers?: HeadersInit;
|
||||
sessionUserId?: string | null;
|
||||
siteUrl?: string;
|
||||
}) {
|
||||
const url = new URL(opts.pathname, "https://example.com");
|
||||
const session = {
|
||||
get: vi.fn().mockResolvedValue(opts.sessionUserId ? { id: opts.sessionUserId } : null),
|
||||
set: vi.fn(),
|
||||
destroy: vi.fn(),
|
||||
};
|
||||
const next = vi.fn(async () => new Response("ok"));
|
||||
const response = await onRequest(
|
||||
{
|
||||
url,
|
||||
request: new Request(url, {
|
||||
method: opts.method ?? "POST",
|
||||
headers: opts.headers,
|
||||
body: JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
id: 1,
|
||||
method: "initialize",
|
||||
params: {
|
||||
protocolVersion: "2025-03-26",
|
||||
capabilities: {},
|
||||
clientInfo: { name: "debug", version: "1.0" },
|
||||
},
|
||||
}),
|
||||
}),
|
||||
locals: {
|
||||
emdash: {
|
||||
db: {},
|
||||
config: opts.siteUrl ? { siteUrl: opts.siteUrl } : {},
|
||||
},
|
||||
},
|
||||
session,
|
||||
redirect: (location: string) =>
|
||||
new Response(null, {
|
||||
status: 302,
|
||||
headers: { Location: location },
|
||||
}),
|
||||
} as Parameters<AuthMiddlewareModule["onRequest"]>[0],
|
||||
next,
|
||||
);
|
||||
|
||||
return { response, next, session };
|
||||
}
|
||||
|
||||
describe("MCP discovery auth middleware", () => {
|
||||
it("returns 401 with discovery metadata for unauthenticated MCP POST requests", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" },
|
||||
});
|
||||
});
|
||||
|
||||
it("does not read the session for anonymous MCP POST discovery requests", async () => {
|
||||
const { response, next, session } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(session.get).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("uses the configured public origin for anonymous MCP POST discovery responses", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
siteUrl: "https://public.example.com",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://public.example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
});
|
||||
|
||||
it("returns 401 with discovery metadata for invalid bearer tokens on MCP POST", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: {
|
||||
Authorization: "Bearer invalid",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(response.headers.get("WWW-Authenticate")).toBe(
|
||||
'Bearer resource_metadata="https://example.com/.well-known/oauth-protected-resource"',
|
||||
);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "INVALID_TOKEN", message: "Invalid or expired token" },
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects MCP POST requests that only have session auth", async () => {
|
||||
const { response, next, session } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/mcp",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-EmDash-Request": "1",
|
||||
},
|
||||
sessionUserId: "user_1",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(401);
|
||||
expect(session.get).not.toHaveBeenCalled();
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" },
|
||||
});
|
||||
});
|
||||
|
||||
it("still rejects non-MCP API POST requests without the CSRF header", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/content/posts",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(403);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "CSRF_REJECTED", message: "Missing required header" },
|
||||
});
|
||||
});
|
||||
});
|
||||
131
packages/core/tests/unit/auth/oauth-register-route.test.ts
Normal file
131
packages/core/tests/unit/auth/oauth-register-route.test.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { POST as registerClient } from "../../../src/astro/routes/api/oauth/register.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("oauth register route", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("returns RFC 7591-style errors for malformed JSON", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: "{",
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Request body must be valid JSON",
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects unsupported token endpoint auth methods", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
redirect_uris: ["http://127.0.0.1:9999/callback"],
|
||||
token_endpoint_auth_method: "client_secret_basic",
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Only token_endpoint_auth_method=none is supported",
|
||||
});
|
||||
});
|
||||
|
||||
it("rejects redirect URIs that the authorize flow would later refuse", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
redirect_uris: ["http://example.com/callback"],
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: "invalid_client_metadata",
|
||||
error_description: "Invalid redirect URI: HTTP redirect URIs are only allowed for localhost",
|
||||
});
|
||||
});
|
||||
|
||||
it("registers public clients with loopback redirect URIs", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/oauth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
client_name: "Harness Test",
|
||||
redirect_uris: ["http://127.0.0.1:9999/callback"],
|
||||
token_endpoint_auth_method: "none",
|
||||
grant_types: ["authorization_code", "refresh_token"],
|
||||
response_types: ["code"],
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await registerClient({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
} as Parameters<typeof registerClient>[0]);
|
||||
|
||||
expect(response.status).toBe(201);
|
||||
expect(response.headers.get("Cache-Control")).toBe("no-store");
|
||||
expect(response.headers.get("Pragma")).toBe("no-cache");
|
||||
|
||||
const body = (await response.json()) as Record<string, unknown>;
|
||||
expect(body.client_name).toBe("Harness Test");
|
||||
expect(body.redirect_uris).toEqual(["http://127.0.0.1:9999/callback"]);
|
||||
expect(body.token_endpoint_auth_method).toBe("none");
|
||||
expect(body.grant_types).toEqual(["authorization_code", "refresh_token"]);
|
||||
expect(body.response_types).toEqual(["code"]);
|
||||
expect(typeof body.client_id).toBe("string");
|
||||
});
|
||||
});
|
||||
225
packages/core/tests/unit/auth/passkey-config.test.ts
Normal file
225
packages/core/tests/unit/auth/passkey-config.test.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { getPasskeyConfig } from "../../../src/auth/passkey-config.js";
|
||||
|
||||
/** URL shape from `new URL(request.url)` after trusted proxy + Astro `security.allowedDomains`. */
|
||||
function urlAfterTrustedProxy(path: string, host: string, proto: "http" | "https"): URL {
|
||||
return new URL(path, `${proto}://${host}`);
|
||||
}
|
||||
|
||||
describe("passkey-config", () => {
|
||||
describe("getPasskeyConfig() via emulated reverse proxy URL", () => {
|
||||
const internalDevUrl = "http://127.0.0.1:4321/_emdash/api/auth/passkey/register/options";
|
||||
|
||||
it("loopback URL alone matches Node before rewrite — rpId is not the public host", () => {
|
||||
const url = new URL(internalDevUrl);
|
||||
expect(getPasskeyConfig(url).rpId).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("emits a single-element origins array by default", () => {
|
||||
const url = new URL(internalDevUrl);
|
||||
expect(getPasskeyConfig(url).origins).toEqual(["http://127.0.0.1:4321"]);
|
||||
});
|
||||
|
||||
it("forwarded Host/Proto yield the URL handlers see; rp matches HTTP reverse-proxy edge", () => {
|
||||
const url = urlAfterTrustedProxy(
|
||||
"/_emdash/api/auth/passkey/register/options",
|
||||
"emdash.local:8080",
|
||||
"http",
|
||||
);
|
||||
const config = getPasskeyConfig(url, "My Site");
|
||||
expect(config.rpId).toBe("emdash.local");
|
||||
expect(config.rpName).toBe("My Site");
|
||||
expect(config.origins[0]).toBe("http://emdash.local:8080");
|
||||
});
|
||||
|
||||
it("HTTPS listener on proxy with HTTP upstream: siteUrl aligns origin with browser", () => {
|
||||
const urlAstroSeesFromForwardedHttp = urlAfterTrustedProxy(
|
||||
"/_emdash/api/setup/admin",
|
||||
"emdash.local:8080",
|
||||
"http",
|
||||
);
|
||||
const browserOrigin = "https://emdash.local:8443";
|
||||
const config = getPasskeyConfig(urlAstroSeesFromForwardedHttp, "My Site", browserOrigin);
|
||||
expect(config.rpId).toBe("emdash.local");
|
||||
expect(config.rpName).toBe("My Site");
|
||||
expect(config.origins[0]).toBe(browserOrigin);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPasskeyConfig()", () => {
|
||||
it("throws when siteUrl is not a valid URL", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
expect(() => getPasskeyConfig(url, "Site", "::not-a-url")).toThrow("Invalid siteUrl");
|
||||
});
|
||||
|
||||
it("extracts rpId from localhost URL", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("localhost");
|
||||
});
|
||||
|
||||
it("extracts rpId from production URL", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("extracts rpId from subdomain URL", () => {
|
||||
const url = new URL("https://admin.example.com/dashboard");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("admin.example.com");
|
||||
});
|
||||
|
||||
it("returns correct origin for http", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.origins[0]).toBe("http://localhost:4321");
|
||||
});
|
||||
|
||||
it("returns correct origin for https", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.origins[0]).toBe("https://example.com");
|
||||
});
|
||||
|
||||
it("handles port numbers correctly", () => {
|
||||
const url = new URL("http://localhost:3000/setup");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("localhost");
|
||||
expect(config.origins[0]).toBe("http://localhost:3000");
|
||||
});
|
||||
|
||||
it("handles https with non-standard port", () => {
|
||||
const url = new URL("https://staging.example.com:8443/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpId).toBe("staging.example.com");
|
||||
expect(config.origins[0]).toBe("https://staging.example.com:8443");
|
||||
});
|
||||
|
||||
it("uses hostname as rpName by default", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
expect(config.rpName).toBe("example.com");
|
||||
});
|
||||
|
||||
it("uses provided siteName for rpName", () => {
|
||||
const url = new URL("https://example.com/admin");
|
||||
const config = getPasskeyConfig(url, "My Cool Site");
|
||||
|
||||
expect(config.rpName).toBe("My Cool Site");
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("ignores path and query params for origin", () => {
|
||||
const url = new URL("https://example.com:443/admin/setup?foo=bar#section");
|
||||
const config = getPasskeyConfig(url);
|
||||
|
||||
// Standard https port 443 is omitted from origin
|
||||
expect(config.origins[0]).toBe("https://example.com");
|
||||
expect(config.rpId).toBe("example.com");
|
||||
});
|
||||
|
||||
it("documents HTTPS reverse-proxy dev pitfall: server URL scheme must match the browser", () => {
|
||||
const serverDevUrl = new URL("http://emdash.local:8443/_emdash/api/setup/admin");
|
||||
const browserPageOrigin = new URL("https://emdash.local:8443/_emdash/admin/setup");
|
||||
|
||||
const fromServer = getPasskeyConfig(serverDevUrl);
|
||||
const fromBrowser = getPasskeyConfig(browserPageOrigin);
|
||||
|
||||
expect(fromServer.rpId).toBe(fromBrowser.rpId);
|
||||
expect(fromServer.origins[0]).toBe("http://emdash.local:8443");
|
||||
expect(fromBrowser.origins[0]).toBe("https://emdash.local:8443");
|
||||
// verifyRegistrationResponse requires clientData.origin === config.origins[0] (see @emdash-cms/auth/passkey)
|
||||
expect(fromServer.origins[0]).not.toBe(fromBrowser.origins[0]);
|
||||
});
|
||||
|
||||
it("siteUrl overrides origin and rpId (TLS termination and loopback request URL)", () => {
|
||||
const fromForwardedHttp = getPasskeyConfig(
|
||||
new URL("http://emdash.local:8443/_emdash/api/setup/admin"),
|
||||
"My Site",
|
||||
"https://emdash.local:8443",
|
||||
);
|
||||
expect(fromForwardedHttp.rpName).toBe("My Site");
|
||||
expect(fromForwardedHttp.rpId).toBe("emdash.local");
|
||||
expect(fromForwardedHttp.origins[0]).toBe("https://emdash.local:8443");
|
||||
|
||||
const fromLoopback = getPasskeyConfig(
|
||||
new URL("http://127.0.0.1:4321/_emdash/api/setup/admin"),
|
||||
"My CMS",
|
||||
"https://public.example:8443",
|
||||
);
|
||||
expect(fromLoopback.rpId).toBe("public.example");
|
||||
expect(fromLoopback.rpName).toBe("My CMS");
|
||||
expect(fromLoopback.origins[0]).toBe("https://public.example:8443");
|
||||
|
||||
const hostnameOnly = getPasskeyConfig(
|
||||
new URL("http://127.0.0.1:4321/x"),
|
||||
undefined,
|
||||
"https://public.example:8443",
|
||||
);
|
||||
expect(hostnameOnly.rpName).toBe("public.example");
|
||||
expect(hostnameOnly.rpId).toBe("public.example");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPasskeyConfig() multi-origin", () => {
|
||||
it("appends allowedOrigins after the canonical origin", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, "Site", "https://example.com", [
|
||||
"https://preview.example.com",
|
||||
"https://staging.example.com",
|
||||
]);
|
||||
|
||||
expect(config.rpId).toBe("example.com");
|
||||
expect(config.origins).toEqual([
|
||||
"https://example.com",
|
||||
"https://preview.example.com",
|
||||
"https://staging.example.com",
|
||||
]);
|
||||
});
|
||||
|
||||
it("places the canonical origin first when no siteUrl is set", () => {
|
||||
const url = new URL("https://preview.example.com/admin");
|
||||
const config = getPasskeyConfig(url, undefined, undefined, ["https://example.com"]);
|
||||
|
||||
// rpId is preview.example.com (no siteUrl); allow apex as second origin
|
||||
expect(config.rpId).toBe("preview.example.com");
|
||||
expect(config.origins).toEqual(["https://preview.example.com", "https://example.com"]);
|
||||
});
|
||||
|
||||
it("dedupes if allowedOrigins repeats the canonical origin", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", [
|
||||
"https://example.com",
|
||||
"https://preview.example.com",
|
||||
]);
|
||||
|
||||
expect(config.origins).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("ignores empty/falsy entries in allowedOrigins", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", [
|
||||
"",
|
||||
"https://preview.example.com",
|
||||
]);
|
||||
|
||||
expect(config.origins).toEqual(["https://example.com", "https://preview.example.com"]);
|
||||
});
|
||||
|
||||
it("yields a single-element origins array when allowedOrigins is empty", () => {
|
||||
const url = new URL("http://localhost:4321/admin");
|
||||
const config = getPasskeyConfig(url, undefined, "https://example.com", []);
|
||||
expect(config.origins).toEqual(["https://example.com"]);
|
||||
});
|
||||
});
|
||||
});
|
||||
278
packages/core/tests/unit/auth/passkey-management.test.ts
Normal file
278
packages/core/tests/unit/auth/passkey-management.test.ts
Normal file
@@ -0,0 +1,278 @@
|
||||
import type { AuthAdapter, Credential, User } from "@emdash-cms/auth";
|
||||
import { Role } from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("Passkey Management", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
let testUser: User;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
|
||||
// Create a test user
|
||||
testUser = await adapter.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
role: Role.ADMIN,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
// Helper to create a test credential
|
||||
async function createTestCredential(userId: string, name?: string): Promise<Credential> {
|
||||
const credentialId = `cred-${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
return adapter.createCredential({
|
||||
id: credentialId,
|
||||
userId,
|
||||
publicKey: new Uint8Array([1, 2, 3, 4]),
|
||||
counter: 0,
|
||||
deviceType: "multiDevice",
|
||||
backedUp: true,
|
||||
transports: ["internal"],
|
||||
name: name ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
describe("getCredentialById", () => {
|
||||
it("should return credential by ID", async () => {
|
||||
const created = await createTestCredential(testUser.id, "My MacBook");
|
||||
|
||||
const credential = await adapter.getCredentialById(created.id);
|
||||
|
||||
expect(credential).not.toBeNull();
|
||||
expect(credential?.id).toBe(created.id);
|
||||
expect(credential?.userId).toBe(testUser.id);
|
||||
expect(credential?.name).toBe("My MacBook");
|
||||
expect(credential?.deviceType).toBe("multiDevice");
|
||||
expect(credential?.backedUp).toBe(true);
|
||||
});
|
||||
|
||||
it("should return null for non-existent credential", async () => {
|
||||
const credential = await adapter.getCredentialById("non-existent");
|
||||
expect(credential).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCredentialsByUserId", () => {
|
||||
it("should return empty array for user with no passkeys", async () => {
|
||||
const credentials = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(credentials).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return all passkeys for a user", async () => {
|
||||
await createTestCredential(testUser.id, "MacBook Pro");
|
||||
await createTestCredential(testUser.id, "iPhone");
|
||||
await createTestCredential(testUser.id, null);
|
||||
|
||||
const credentials = await adapter.getCredentialsByUserId(testUser.id);
|
||||
|
||||
expect(credentials).toHaveLength(3);
|
||||
const names = credentials.map((c) => c.name);
|
||||
expect(names).toContain("MacBook Pro");
|
||||
expect(names).toContain("iPhone");
|
||||
expect(names).toContain(null);
|
||||
});
|
||||
|
||||
it("should not return passkeys from other users", async () => {
|
||||
const otherUser = await adapter.createUser({
|
||||
email: "other@example.com",
|
||||
name: "Other User",
|
||||
});
|
||||
|
||||
await createTestCredential(testUser.id, "Test User Passkey");
|
||||
await createTestCredential(otherUser.id, "Other User Passkey");
|
||||
|
||||
const testUserCreds = await adapter.getCredentialsByUserId(testUser.id);
|
||||
const otherUserCreds = await adapter.getCredentialsByUserId(otherUser.id);
|
||||
|
||||
expect(testUserCreds).toHaveLength(1);
|
||||
expect(testUserCreds[0].name).toBe("Test User Passkey");
|
||||
|
||||
expect(otherUserCreds).toHaveLength(1);
|
||||
expect(otherUserCreds[0].name).toBe("Other User Passkey");
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateCredentialName", () => {
|
||||
it("should update the credential name", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Old Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, "New Name");
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBe("New Name");
|
||||
});
|
||||
|
||||
it("should set name to null when provided null", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Has Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, null);
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle empty string as name", async () => {
|
||||
const credential = await createTestCredential(testUser.id, "Has Name");
|
||||
|
||||
await adapter.updateCredentialName(credential.id, "");
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.name).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("countCredentialsByUserId", () => {
|
||||
it("should return 0 for user with no passkeys", async () => {
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(0);
|
||||
});
|
||||
|
||||
it("should return correct count", async () => {
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(3);
|
||||
});
|
||||
|
||||
it("should only count credentials for the specified user", async () => {
|
||||
const otherUser = await adapter.createUser({
|
||||
email: "other@example.com",
|
||||
});
|
||||
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(testUser.id);
|
||||
await createTestCredential(otherUser.id);
|
||||
|
||||
const testUserCount = await adapter.countCredentialsByUserId(testUser.id);
|
||||
const otherUserCount = await adapter.countCredentialsByUserId(otherUser.id);
|
||||
|
||||
expect(testUserCount).toBe(2);
|
||||
expect(otherUserCount).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteCredential", () => {
|
||||
it("should delete a credential", async () => {
|
||||
const credential = await createTestCredential(testUser.id);
|
||||
|
||||
await adapter.deleteCredential(credential.id);
|
||||
|
||||
const deleted = await adapter.getCredentialById(credential.id);
|
||||
expect(deleted).toBeNull();
|
||||
});
|
||||
|
||||
it("should not affect other credentials", async () => {
|
||||
await createTestCredential(testUser.id, "Keep This");
|
||||
const cred2 = await createTestCredential(testUser.id, "Delete This");
|
||||
|
||||
await adapter.deleteCredential(cred2.id);
|
||||
|
||||
const remaining = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(remaining).toHaveLength(1);
|
||||
expect(remaining[0].name).toBe("Keep This");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Passkey Management Flow", () => {
|
||||
it("should support full CRUD flow", async () => {
|
||||
// Create passkeys
|
||||
const passkey1 = await createTestCredential(testUser.id, "MacBook");
|
||||
const passkey2 = await createTestCredential(testUser.id, "iPhone");
|
||||
|
||||
// List passkeys
|
||||
let passkeys = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(passkeys).toHaveLength(2);
|
||||
|
||||
// Rename a passkey
|
||||
await adapter.updateCredentialName(passkey1.id, "MacBook Pro M3");
|
||||
const renamed = await adapter.getCredentialById(passkey1.id);
|
||||
expect(renamed?.name).toBe("MacBook Pro M3");
|
||||
|
||||
// Delete a passkey (not the last one)
|
||||
const countBefore = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(countBefore).toBe(2);
|
||||
|
||||
await adapter.deleteCredential(passkey2.id);
|
||||
|
||||
const countAfter = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(countAfter).toBe(1);
|
||||
|
||||
// Verify only one remains
|
||||
passkeys = await adapter.getCredentialsByUserId(testUser.id);
|
||||
expect(passkeys).toHaveLength(1);
|
||||
expect(passkeys[0].name).toBe("MacBook Pro M3");
|
||||
});
|
||||
|
||||
it("should enforce 'cannot delete last passkey' in application logic", async () => {
|
||||
// Create a single passkey
|
||||
const passkey = await createTestCredential(testUser.id, "Only Passkey");
|
||||
|
||||
// Check count before deletion attempt
|
||||
const count = await adapter.countCredentialsByUserId(testUser.id);
|
||||
expect(count).toBe(1);
|
||||
|
||||
// Application should check count and prevent deletion
|
||||
// The adapter itself doesn't enforce this - it's the API layer's job
|
||||
if (count <= 1) {
|
||||
// Don't delete - this is what the API should do
|
||||
const stillExists = await adapter.getCredentialById(passkey.id);
|
||||
expect(stillExists).not.toBeNull();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Credential properties", () => {
|
||||
it("should preserve all credential properties", async () => {
|
||||
await adapter.createCredential({
|
||||
id: "test-cred-123",
|
||||
userId: testUser.id,
|
||||
publicKey: new Uint8Array([10, 20, 30, 40, 50]),
|
||||
counter: 5,
|
||||
deviceType: "singleDevice",
|
||||
backedUp: false,
|
||||
transports: ["usb", "nfc"],
|
||||
name: "YubiKey 5",
|
||||
});
|
||||
|
||||
const retrieved = await adapter.getCredentialById("test-cred-123");
|
||||
|
||||
expect(retrieved).not.toBeNull();
|
||||
expect(retrieved?.id).toBe("test-cred-123");
|
||||
expect(retrieved?.userId).toBe(testUser.id);
|
||||
expect(retrieved?.counter).toBe(5);
|
||||
expect(retrieved?.deviceType).toBe("singleDevice");
|
||||
expect(retrieved?.backedUp).toBe(false);
|
||||
expect(retrieved?.transports).toEqual(["usb", "nfc"]);
|
||||
expect(retrieved?.name).toBe("YubiKey 5");
|
||||
expect(retrieved?.createdAt).toBeInstanceOf(Date);
|
||||
expect(retrieved?.lastUsedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should update lastUsedAt when counter is updated", async () => {
|
||||
const credential = await createTestCredential(testUser.id);
|
||||
const originalLastUsed = credential.lastUsedAt;
|
||||
|
||||
// Small delay to ensure time difference
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
await adapter.updateCredentialCounter(credential.id, 1);
|
||||
|
||||
const updated = await adapter.getCredentialById(credential.id);
|
||||
expect(updated?.counter).toBe(1);
|
||||
expect(updated?.lastUsedAt.getTime()).toBeGreaterThan(originalLastUsed.getTime());
|
||||
});
|
||||
});
|
||||
});
|
||||
58
packages/core/tests/unit/auth/passkey-verify-route.test.ts
Normal file
58
packages/core/tests/unit/auth/passkey-verify-route.test.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import { POST as verifyPasskey } from "../../../src/astro/routes/api/auth/passkey/verify.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("passkey verify route", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("returns unauthorized instead of internal server error when the credential is not registered", async () => {
|
||||
const request = new Request("http://localhost:4321/_emdash/api/auth/passkey/verify", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
credential: {
|
||||
id: "unregistered-credential",
|
||||
rawId: "unregistered-credential",
|
||||
type: "public-key",
|
||||
response: {
|
||||
clientDataJSON: "AA",
|
||||
authenticatorData: "AA",
|
||||
signature: "AA",
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const response = await verifyPasskey({
|
||||
request,
|
||||
locals: {
|
||||
emdash: {
|
||||
db,
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
session: {
|
||||
set: vi.fn(),
|
||||
},
|
||||
} as Parameters<typeof verifyPasskey>[0]);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: {
|
||||
code: "UNAUTHORIZED",
|
||||
message: "Authentication failed",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
114
packages/core/tests/unit/auth/scopes.test.ts
Normal file
114
packages/core/tests/unit/auth/scopes.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Unit tests for scope enforcement.
|
||||
*
|
||||
* Tests the requireScope() guard that API routes and MCP tools use
|
||||
* to enforce token scope restrictions.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { requireScope } from "../../../src/auth/scopes.js";
|
||||
|
||||
describe("requireScope", () => {
|
||||
it("allows session auth (no tokenScopes) unconditionally", () => {
|
||||
const result = requireScope({}, "content:write");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("allows session auth with undefined tokenScopes", () => {
|
||||
const result = requireScope({ tokenScopes: undefined }, "schema:write");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("allows when token has the required scope", () => {
|
||||
const result = requireScope(
|
||||
{ tokenScopes: ["content:read", "content:write"] },
|
||||
"content:write",
|
||||
);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("rejects when token lacks the required scope", () => {
|
||||
const result = requireScope({ tokenScopes: ["content:read"] }, "content:write");
|
||||
expect(result).toBeInstanceOf(Response);
|
||||
expect(result!.status).toBe(403);
|
||||
});
|
||||
|
||||
it("returns INSUFFICIENT_SCOPE error body", async () => {
|
||||
const result = requireScope({ tokenScopes: ["media:read"] }, "schema:write");
|
||||
expect(result).not.toBeNull();
|
||||
const body = (await result!.json()) as { error: { code: string; message: string } };
|
||||
expect(body.error.code).toBe("INSUFFICIENT_SCOPE");
|
||||
expect(body.error.message).toContain("schema:write");
|
||||
});
|
||||
|
||||
it("admin scope grants access to everything", () => {
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "content:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "content:write")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "schema:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "schema:write")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "media:read")).toBeNull();
|
||||
expect(requireScope({ tokenScopes: ["admin"] }, "media:write")).toBeNull();
|
||||
});
|
||||
|
||||
it("empty scopes array rejects everything", () => {
|
||||
expect(requireScope({ tokenScopes: [] }, "content:read")).toBeInstanceOf(Response);
|
||||
expect(requireScope({ tokenScopes: [] }, "admin")).toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
it("read scope does not grant write access", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:read"] }, "content:write")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
expect(requireScope({ tokenScopes: ["media:read"] }, "media:write")).toBeInstanceOf(Response);
|
||||
expect(requireScope({ tokenScopes: ["schema:read"] }, "schema:write")).toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
describe("backwards compatibility: content:write implicit grants", () => {
|
||||
// Before the menu/taxonomy mutation MCP tools were split out into
|
||||
// `menus:manage` and `taxonomies:manage`, the only scope checked for
|
||||
// those operations was `content:write`. Tokens issued before the
|
||||
// split must continue to work — `content:write` implicitly grants
|
||||
// `menus:manage` and `taxonomies:manage`.
|
||||
|
||||
it("content:write grants menus:manage", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:write"] }, "menus:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("content:write grants taxonomies:manage", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:write"] }, "taxonomies:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("content:read does NOT grant menus:manage (read-only doesn't escalate)", () => {
|
||||
expect(requireScope({ tokenScopes: ["content:read"] }, "menus:manage")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
});
|
||||
|
||||
it("menus:manage alone allows menu operations", () => {
|
||||
expect(requireScope({ tokenScopes: ["menus:manage"] }, "menus:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("menus:manage does not grant content:write (no reverse implication)", () => {
|
||||
expect(requireScope({ tokenScopes: ["menus:manage"] }, "content:write")).toBeInstanceOf(
|
||||
Response,
|
||||
);
|
||||
});
|
||||
|
||||
it("taxonomies:manage alone allows taxonomy operations", () => {
|
||||
expect(requireScope({ tokenScopes: ["taxonomies:manage"] }, "taxonomies:manage")).toBeNull();
|
||||
});
|
||||
|
||||
it("prototype-chain keys do not crash or grant access", () => {
|
||||
// Defense in depth: the implicit-grants table is a Map, but a
|
||||
// regression to a plain-object lookup would let Object.prototype
|
||||
// keys (`__proto__`, `constructor`, `toString`) walk the chain
|
||||
// and either crash with "x.includes is not a function" or
|
||||
// accidentally satisfy the check. Either is a 500 instead of a
|
||||
// 403. Verify both paths reject cleanly.
|
||||
for (const key of ["__proto__", "constructor", "toString", "hasOwnProperty"]) {
|
||||
expect(requireScope({ tokenScopes: [key] }, "menus:manage")).toBeInstanceOf(Response);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
465
packages/core/tests/unit/auth/signup.test.ts
Normal file
465
packages/core/tests/unit/auth/signup.test.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
import type { AuthAdapter, EmailSendFn } from "@emdash-cms/auth";
|
||||
import type { EmailMessage } from "@emdash-cms/auth";
|
||||
import {
|
||||
Role,
|
||||
canSignup,
|
||||
requestSignup,
|
||||
validateSignupToken,
|
||||
completeSignup,
|
||||
SignupError,
|
||||
} from "@emdash-cms/auth";
|
||||
import { createKyselyAdapter } from "@emdash-cms/auth/adapters/kysely";
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for token validation
|
||||
const TOKEN_PARAM_REGEX = /token=/;
|
||||
const TOKEN_EXTRACT_REGEX = /token=([a-zA-Z0-9_-]+)/;
|
||||
|
||||
describe("Self-Signup", () => {
|
||||
let db: Kysely<Database>;
|
||||
let adapter: AuthAdapter;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
adapter = createKyselyAdapter(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("canSignup", () => {
|
||||
it("should return null for email with no allowed domain", async () => {
|
||||
const result = await canSignup(adapter, "user@notallowed.com");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for email with disabled domain", async () => {
|
||||
// Create a disabled domain
|
||||
await adapter.createAllowedDomain("disabled.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("disabled.com", false);
|
||||
|
||||
const result = await canSignup(adapter, "user@disabled.com");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return allowed:true and role for email with allowed domain", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
const result = await canSignup(adapter, "user@allowed.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.allowed).toBe(true);
|
||||
expect(result?.role).toBe(Role.AUTHOR);
|
||||
});
|
||||
|
||||
it("should return correct role for each domain", async () => {
|
||||
await adapter.createAllowedDomain("authors.com", Role.AUTHOR);
|
||||
await adapter.createAllowedDomain("editors.com", Role.EDITOR);
|
||||
await adapter.createAllowedDomain("contributors.com", Role.CONTRIBUTOR);
|
||||
|
||||
const author = await canSignup(adapter, "user@authors.com");
|
||||
const editor = await canSignup(adapter, "user@editors.com");
|
||||
const contributor = await canSignup(adapter, "user@contributors.com");
|
||||
|
||||
expect(author?.role).toBe(Role.AUTHOR);
|
||||
expect(editor?.role).toBe(Role.EDITOR);
|
||||
expect(contributor?.role).toBe(Role.CONTRIBUTOR);
|
||||
});
|
||||
|
||||
it("should be case-insensitive for email domains", async () => {
|
||||
await adapter.createAllowedDomain("example.com", Role.AUTHOR);
|
||||
|
||||
const result = await canSignup(adapter, "User@EXAMPLE.COM");
|
||||
expect(result).not.toBeNull();
|
||||
});
|
||||
|
||||
it("should return null for invalid email format", async () => {
|
||||
const result = await canSignup(adapter, "not-an-email");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("requestSignup", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let sentEmails: Array<EmailMessage>;
|
||||
|
||||
beforeEach(() => {
|
||||
sentEmails = [];
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
sentEmails.push(email);
|
||||
});
|
||||
});
|
||||
|
||||
it("should send verification email for allowed domain", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).toHaveBeenCalledTimes(1);
|
||||
expect(sentEmails[0]!.to).toBe("newuser@allowed.com");
|
||||
expect(sentEmails[0]!.subject).toContain("Test Site");
|
||||
expect(sentEmails[0]!.text).toContain(
|
||||
"https://example.com/_emdash/api/auth/signup/verify?token=",
|
||||
);
|
||||
expect(sentEmails[0]!.text).toContain("verify");
|
||||
});
|
||||
|
||||
it("should fail silently for disallowed domain (no email sent)", async () => {
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"user@notallowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should fail silently if user already exists (no email sent)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
// Create existing user
|
||||
await adapter.createUser({
|
||||
email: "existing@allowed.com",
|
||||
name: "Existing User",
|
||||
});
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"existing@allowed.com",
|
||||
);
|
||||
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should create a token in the database", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.EDITOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// The email should contain a verification link with a token
|
||||
expect(sentEmails[0]!.text).toMatch(TOKEN_PARAM_REGEX);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateSignupToken", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
// Extract token from email text
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should validate a valid token and return email/role", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
const result = await validateSignupToken(adapter, capturedToken!);
|
||||
|
||||
expect(result.email).toBe("newuser@allowed.com");
|
||||
expect(result.role).toBe(Role.AUTHOR);
|
||||
});
|
||||
|
||||
it("should throw invalid_token for non-existent token", async () => {
|
||||
// Use a properly formatted but non-existent token (base64url encoded)
|
||||
const fakeToken = "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo"; // base64url of "abcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
try {
|
||||
await validateSignupToken(adapter, fakeToken);
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw token_expired for expired token", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
// Manually expire the token by updating it in the database
|
||||
// We need to find the token hash and update its expiry
|
||||
// Since we can't easily do this, we'll test the error path differently
|
||||
// by creating a token directly with an expired date
|
||||
|
||||
// First, validate and get the hash
|
||||
const result = await validateSignupToken(adapter, capturedToken!);
|
||||
expect(result.email).toBe("newuser@allowed.com");
|
||||
|
||||
// For expiry testing, we'd need direct DB access to set expiry in the past
|
||||
// This is tested implicitly by the token creation with short expiry
|
||||
});
|
||||
});
|
||||
|
||||
describe("completeSignup", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should create user with correct email and role", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "New User",
|
||||
});
|
||||
|
||||
expect(user.email).toBe("newuser@allowed.com");
|
||||
expect(user.name).toBe("New User");
|
||||
expect(user.role).toBe(Role.AUTHOR);
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
|
||||
it("should throw user_exists if user created during signup flow (race condition)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// Simulate race condition - create user before completing signup
|
||||
await adapter.createUser({
|
||||
email: "newuser@allowed.com",
|
||||
name: "Created During Race",
|
||||
});
|
||||
|
||||
// Try to complete signup - should fail with user_exists
|
||||
try {
|
||||
await completeSignup(adapter, capturedToken!, { name: "New User" });
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("user_exists");
|
||||
}
|
||||
});
|
||||
|
||||
it("should throw invalid_token for non-existent token", async () => {
|
||||
// Use a properly formatted but non-existent token (base64url encoded)
|
||||
const fakeToken = "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXo"; // base64url of "abcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
try {
|
||||
await completeSignup(adapter, fakeToken, { name: "User" });
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(SignupError);
|
||||
expect((error as SignupError).code).toBe("invalid_token");
|
||||
}
|
||||
});
|
||||
|
||||
it("should delete token after successful signup (single-use)", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"newuser@allowed.com",
|
||||
);
|
||||
|
||||
// First completion should succeed
|
||||
await completeSignup(adapter, capturedToken!, { name: "New User" });
|
||||
|
||||
// Second attempt should fail - token is deleted
|
||||
await expect(
|
||||
completeSignup(adapter, capturedToken!, { name: "Another User" }),
|
||||
).rejects.toThrow(SignupError);
|
||||
});
|
||||
|
||||
it("should allow optional name and avatarUrl", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"noname@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {});
|
||||
|
||||
expect(user.email).toBe("noname@allowed.com");
|
||||
expect(user.name).toBeNull();
|
||||
});
|
||||
|
||||
it("should set emailVerified to true", async () => {
|
||||
await adapter.createAllowedDomain("allowed.com", Role.AUTHOR);
|
||||
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test Site",
|
||||
},
|
||||
adapter,
|
||||
"verified@allowed.com",
|
||||
);
|
||||
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "Verified User",
|
||||
});
|
||||
|
||||
expect(user.emailVerified).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Integration: Full Signup Flow", () => {
|
||||
let mockEmailSend: EmailSendFn & ReturnType<typeof vi.fn>;
|
||||
let capturedToken: string | null;
|
||||
|
||||
beforeEach(() => {
|
||||
capturedToken = null;
|
||||
mockEmailSend = vi.fn(async (email: EmailMessage) => {
|
||||
const match = email.text.match(TOKEN_EXTRACT_REGEX);
|
||||
capturedToken = match ? (match[1] ?? null) : null;
|
||||
});
|
||||
});
|
||||
|
||||
it("should complete full signup flow for allowed domain", async () => {
|
||||
// 1. Admin adds allowed domain
|
||||
await adapter.createAllowedDomain("company.com", Role.EDITOR);
|
||||
|
||||
// 2. Check if signup is allowed
|
||||
const check = await canSignup(adapter, "employee@company.com");
|
||||
expect(check?.allowed).toBe(true);
|
||||
expect(check?.role).toBe(Role.EDITOR);
|
||||
|
||||
// 3. Request signup
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Company CMS",
|
||||
},
|
||||
adapter,
|
||||
"employee@company.com",
|
||||
);
|
||||
expect(capturedToken).not.toBeNull();
|
||||
|
||||
// 4. Validate token (simulating email link click)
|
||||
const validation = await validateSignupToken(adapter, capturedToken!);
|
||||
expect(validation.email).toBe("employee@company.com");
|
||||
expect(validation.role).toBe(Role.EDITOR);
|
||||
|
||||
// 5. Complete signup
|
||||
const user = await completeSignup(adapter, capturedToken!, {
|
||||
name: "New Employee",
|
||||
});
|
||||
|
||||
expect(user.email).toBe("employee@company.com");
|
||||
expect(user.name).toBe("New Employee");
|
||||
expect(user.role).toBe(Role.EDITOR);
|
||||
expect(user.emailVerified).toBe(true);
|
||||
|
||||
// 6. Verify user exists in database
|
||||
const fetchedUser = await adapter.getUserByEmail("employee@company.com");
|
||||
expect(fetchedUser).not.toBeNull();
|
||||
expect(fetchedUser?.id).toBe(user.id);
|
||||
});
|
||||
|
||||
it("should prevent signup for disabled domain", async () => {
|
||||
// Add domain then disable it
|
||||
await adapter.createAllowedDomain("company.com", Role.AUTHOR);
|
||||
await adapter.updateAllowedDomain("company.com", false);
|
||||
|
||||
// Check - should not be allowed
|
||||
const check = await canSignup(adapter, "user@company.com");
|
||||
expect(check).toBeNull();
|
||||
|
||||
// Request signup - should fail silently (no email)
|
||||
await requestSignup(
|
||||
{
|
||||
baseUrl: "https://example.com",
|
||||
email: mockEmailSend,
|
||||
siteName: "Test",
|
||||
},
|
||||
adapter,
|
||||
"user@company.com",
|
||||
);
|
||||
expect(mockEmailSend).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
97
packages/core/tests/unit/auth/trusted-proxy.test.ts
Normal file
97
packages/core/tests/unit/auth/trusted-proxy.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* Tests for getTrustedProxyHeaders — resolves the list of trusted client-IP
|
||||
* headers from config, falling back to the EMDASH_TRUSTED_PROXY_HEADERS env
|
||||
* var, then to an empty array.
|
||||
*
|
||||
* The helper lets operators declare which headers they trust when running
|
||||
* behind a reverse proxy. On Cloudflare the `cf` object is used instead and
|
||||
* this list is usually empty.
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
_resetTrustedProxyHeadersCache,
|
||||
getTrustedProxyHeaders,
|
||||
} from "../../../src/auth/trusted-proxy.js";
|
||||
|
||||
describe("getTrustedProxyHeaders", () => {
|
||||
const ORIGINAL_ENV = process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
|
||||
beforeEach(() => {
|
||||
_resetTrustedProxyHeadersCache();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (ORIGINAL_ENV === undefined) {
|
||||
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
} else {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = ORIGINAL_ENV;
|
||||
}
|
||||
_resetTrustedProxyHeadersCache();
|
||||
});
|
||||
|
||||
it("returns config value when set", () => {
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["x-real-ip"] })).toEqual(["x-real-ip"]);
|
||||
});
|
||||
|
||||
it("prefers config over env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "fly-client-ip";
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["x-real-ip"] })).toEqual(["x-real-ip"]);
|
||||
});
|
||||
|
||||
it("falls back to env when config is absent", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip,fly-client-ip";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
|
||||
it("trims whitespace and drops empty entries from env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = " x-real-ip , , fly-client-ip ";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
|
||||
it("lowercases header names for consistent matching", () => {
|
||||
// Header lookups go through Headers.get() which is case-insensitive,
|
||||
// so we normalise the list here to avoid double-normalising elsewhere.
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: ["X-Real-IP", "Fly-Client-IP"] })).toEqual(
|
||||
["x-real-ip", "fly-client-ip"],
|
||||
);
|
||||
});
|
||||
|
||||
it("returns empty array when neither config nor env is set", () => {
|
||||
delete process.env.EMDASH_TRUSTED_PROXY_HEADERS;
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty array when config has empty list", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip";
|
||||
// An explicit empty array means "trust nothing" — do not fall through
|
||||
// to the env. Operators use this to override an inherited env value.
|
||||
expect(getTrustedProxyHeaders({ trustedProxyHeaders: [] })).toEqual([]);
|
||||
});
|
||||
|
||||
// Header names must be valid RFC 7230 tokens; passing anything else into
|
||||
// `Headers.get()` throws. Drop invalid entries silently rather than
|
||||
// taking down every rate-limited endpoint with a 500.
|
||||
it("drops invalid header names from config", () => {
|
||||
expect(
|
||||
getTrustedProxyHeaders({
|
||||
trustedProxyHeaders: ["x-real-ip", "", "invalid name", "bad:colon", "ok-name"],
|
||||
}),
|
||||
).toEqual(["x-real-ip", "ok-name"]);
|
||||
});
|
||||
|
||||
it("drops invalid header names from env", () => {
|
||||
process.env.EMDASH_TRUSTED_PROXY_HEADERS = "x-real-ip, x y z , bad:one, ok-name";
|
||||
expect(getTrustedProxyHeaders(undefined)).toEqual(["x-real-ip", "ok-name"]);
|
||||
});
|
||||
|
||||
it("trims whitespace from config entries before matching", () => {
|
||||
// Common typo: `"x-real-ip "` (trailing space). Previously the raw
|
||||
// value was lowercased but not trimmed, so validation silently
|
||||
// dropped it and per-IP bucketing was disabled.
|
||||
expect(
|
||||
getTrustedProxyHeaders({ trustedProxyHeaders: [" x-real-ip ", "fly-client-ip"] }),
|
||||
).toEqual(["x-real-ip", "fly-client-ip"]);
|
||||
});
|
||||
});
|
||||
311
packages/core/tests/unit/bylines/bylines-query.test.ts
Normal file
311
packages/core/tests/unit/bylines/bylines-query.test.ts
Normal file
@@ -0,0 +1,311 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import { BylineRepository } from "../../../src/database/repositories/byline.js";
|
||||
import { ContentRepository } from "../../../src/database/repositories/content.js";
|
||||
import { UserRepository } from "../../../src/database/repositories/user.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { SQL_BATCH_SIZE } from "../../../src/utils/chunks.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
// Mock the loader's getDb to return our test database
|
||||
vi.mock("../../../src/loader.js", () => ({
|
||||
getDb: vi.fn(),
|
||||
}));
|
||||
|
||||
import {
|
||||
getByline,
|
||||
getBylineBySlug,
|
||||
getEntryBylines,
|
||||
getBylinesForEntries,
|
||||
} from "../../../src/bylines/index.js";
|
||||
import { getDb } from "../../../src/loader.js";
|
||||
|
||||
describe("Byline query functions", () => {
|
||||
let db: Kysely<Database>;
|
||||
let bylineRepo: BylineRepository;
|
||||
let contentRepo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
bylineRepo = new BylineRepository(db);
|
||||
contentRepo = new ContentRepository(db);
|
||||
vi.mocked(getDb).mockResolvedValue(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("getByline", () => {
|
||||
it("returns a byline by ID", async () => {
|
||||
const created = await bylineRepo.create({
|
||||
slug: "jane-doe",
|
||||
displayName: "Jane Doe",
|
||||
});
|
||||
|
||||
const result = await getByline(created.id);
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe(created.id);
|
||||
expect(result?.displayName).toBe("Jane Doe");
|
||||
expect(result?.slug).toBe("jane-doe");
|
||||
});
|
||||
|
||||
it("returns null for non-existent ID", async () => {
|
||||
const result = await getByline("non-existent");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBylineBySlug", () => {
|
||||
it("returns a byline by slug", async () => {
|
||||
await bylineRepo.create({
|
||||
slug: "john-smith",
|
||||
displayName: "John Smith",
|
||||
});
|
||||
|
||||
const result = await getBylineBySlug("john-smith");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.displayName).toBe("John Smith");
|
||||
});
|
||||
|
||||
it("returns null for non-existent slug", async () => {
|
||||
const result = await getBylineBySlug("nobody");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("getEntryBylines", () => {
|
||||
it("returns explicit byline credits for an entry", async () => {
|
||||
const lead = await bylineRepo.create({
|
||||
slug: "lead-author",
|
||||
displayName: "Lead Author",
|
||||
});
|
||||
const editor = await bylineRepo.create({
|
||||
slug: "editor",
|
||||
displayName: "Editor",
|
||||
});
|
||||
|
||||
const post = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "my-post",
|
||||
data: { title: "My Post" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", post.id, [
|
||||
{ bylineId: lead.id },
|
||||
{ bylineId: editor.id, roleLabel: "Contributing Editor" },
|
||||
]);
|
||||
|
||||
const bylines = await getEntryBylines("post", post.id);
|
||||
|
||||
expect(bylines).toHaveLength(2);
|
||||
expect(bylines[0]?.byline.displayName).toBe("Lead Author");
|
||||
expect(bylines[0]?.sortOrder).toBe(0);
|
||||
expect(bylines[0]?.source).toBe("explicit");
|
||||
expect(bylines[1]?.byline.displayName).toBe("Editor");
|
||||
expect(bylines[1]?.roleLabel).toBe("Contributing Editor");
|
||||
expect(bylines[1]?.source).toBe("explicit");
|
||||
});
|
||||
|
||||
it("falls back to user-linked byline when no explicit credits", async () => {
|
||||
// Create a user
|
||||
const userRepo = new UserRepository(db);
|
||||
const user = await userRepo.create({
|
||||
email: "author@example.com",
|
||||
displayName: "Author User",
|
||||
role: "editor",
|
||||
});
|
||||
|
||||
// Create a byline linked to the user
|
||||
await bylineRepo.create({
|
||||
slug: "author-user",
|
||||
displayName: "Author User",
|
||||
userId: user.id,
|
||||
});
|
||||
|
||||
// Create a post with this user as author, no explicit bylines
|
||||
const post = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "authored-post",
|
||||
data: { title: "Authored Post" },
|
||||
authorId: user.id,
|
||||
});
|
||||
|
||||
const bylines = await getEntryBylines("post", post.id);
|
||||
|
||||
expect(bylines).toHaveLength(1);
|
||||
expect(bylines[0]?.byline.displayName).toBe("Author User");
|
||||
expect(bylines[0]?.source).toBe("inferred");
|
||||
expect(bylines[0]?.roleLabel).toBeNull();
|
||||
});
|
||||
|
||||
it("returns empty array when no bylines and no author fallback", async () => {
|
||||
const post = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "no-author-post",
|
||||
data: { title: "No Author" },
|
||||
});
|
||||
|
||||
const bylines = await getEntryBylines("post", post.id);
|
||||
expect(bylines).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBylinesForEntries", () => {
|
||||
it("batch-fetches byline credits for multiple entries", async () => {
|
||||
const author1 = await bylineRepo.create({
|
||||
slug: "author-one",
|
||||
displayName: "Author One",
|
||||
});
|
||||
const author2 = await bylineRepo.create({
|
||||
slug: "author-two",
|
||||
displayName: "Author Two",
|
||||
});
|
||||
|
||||
const post1 = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "post-1",
|
||||
data: { title: "Post 1" },
|
||||
});
|
||||
const post2 = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "post-2",
|
||||
data: { title: "Post 2" },
|
||||
});
|
||||
const post3 = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "post-3",
|
||||
data: { title: "Post 3" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", post1.id, [{ bylineId: author1.id }]);
|
||||
await bylineRepo.setContentBylines("post", post2.id, [
|
||||
{ bylineId: author1.id },
|
||||
{ bylineId: author2.id, roleLabel: "Contributor" },
|
||||
]);
|
||||
// post3 has no bylines
|
||||
|
||||
const result = await getBylinesForEntries(
|
||||
"post",
|
||||
[post1, post2, post3].map((p) => ({ id: p.id, authorId: p.authorId })),
|
||||
);
|
||||
|
||||
expect(result.get(post1.id)).toHaveLength(1);
|
||||
expect(result.get(post1.id)?.[0]?.byline.displayName).toBe("Author One");
|
||||
expect(result.get(post1.id)?.[0]?.source).toBe("explicit");
|
||||
|
||||
expect(result.get(post2.id)).toHaveLength(2);
|
||||
expect(result.get(post2.id)?.[0]?.byline.displayName).toBe("Author One");
|
||||
expect(result.get(post2.id)?.[1]?.byline.displayName).toBe("Author Two");
|
||||
expect(result.get(post2.id)?.[1]?.roleLabel).toBe("Contributor");
|
||||
|
||||
expect(result.get(post3.id)).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("returns inferred bylines for entries without explicit credits", async () => {
|
||||
const userRepo = new UserRepository(db);
|
||||
const user = await userRepo.create({
|
||||
email: "batch-author@example.com",
|
||||
displayName: "Batch Author",
|
||||
role: "editor",
|
||||
});
|
||||
|
||||
await bylineRepo.create({
|
||||
slug: "batch-author",
|
||||
displayName: "Batch Author",
|
||||
userId: user.id,
|
||||
});
|
||||
|
||||
const post = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "batch-post",
|
||||
data: { title: "Batch Post" },
|
||||
authorId: user.id,
|
||||
});
|
||||
|
||||
const result = await getBylinesForEntries("post", [{ id: post.id, authorId: post.authorId }]);
|
||||
|
||||
expect(result.get(post.id)).toHaveLength(1);
|
||||
expect(result.get(post.id)?.[0]?.source).toBe("inferred");
|
||||
expect(result.get(post.id)?.[0]?.byline.displayName).toBe("Batch Author");
|
||||
});
|
||||
|
||||
it("handles batches larger than SQL_BATCH_SIZE across explicit and inferred bylines", async () => {
|
||||
const userRepo = new UserRepository(db);
|
||||
const explicitByline = await bylineRepo.create({
|
||||
slug: "large-batch-explicit",
|
||||
displayName: "Large Batch Explicit",
|
||||
});
|
||||
|
||||
const explicitPost1 = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "large-batch-explicit-1",
|
||||
data: { title: "Large Batch Explicit 1" },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", explicitPost1.id, [
|
||||
{ bylineId: explicitByline.id },
|
||||
]);
|
||||
|
||||
const inferredPosts: { id: string; authorId: string | null }[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 2; i++) {
|
||||
const user = await userRepo.create({
|
||||
email: `large-batch-${i}@example.com`,
|
||||
displayName: `Large Batch ${i}`,
|
||||
role: "editor",
|
||||
});
|
||||
|
||||
await bylineRepo.create({
|
||||
slug: `large-batch-${i}`,
|
||||
displayName: `Large Batch ${i}`,
|
||||
userId: user.id,
|
||||
});
|
||||
|
||||
const post = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: `large-batch-post-${i}`,
|
||||
data: { title: `Large Batch Post ${i}` },
|
||||
authorId: user.id,
|
||||
});
|
||||
inferredPosts.push({ id: post.id, authorId: post.authorId });
|
||||
}
|
||||
|
||||
const explicitPost2 = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "large-batch-explicit-2",
|
||||
data: { title: "Large Batch Explicit 2" },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", explicitPost2.id, [
|
||||
{ bylineId: explicitByline.id },
|
||||
]);
|
||||
|
||||
const inferredPostIds = inferredPosts.map((p) => p.id);
|
||||
const entries = [
|
||||
{ id: explicitPost1.id, authorId: explicitPost1.authorId },
|
||||
...inferredPosts,
|
||||
{ id: explicitPost2.id, authorId: explicitPost2.authorId },
|
||||
];
|
||||
const result = await getBylinesForEntries("post", entries);
|
||||
|
||||
expect(result.size).toBe(entries.length);
|
||||
expect(result.get(explicitPost1.id)?.[0]?.source).toBe("explicit");
|
||||
expect(result.get(explicitPost1.id)?.[0]?.byline.displayName).toBe("Large Batch Explicit");
|
||||
expect(result.get(explicitPost2.id)?.[0]?.source).toBe("explicit");
|
||||
expect(result.get(explicitPost2.id)?.[0]?.byline.displayName).toBe("Large Batch Explicit");
|
||||
expect(result.get(inferredPostIds[0]!)?.[0]?.source).toBe("inferred");
|
||||
expect(result.get(inferredPostIds[0]!)?.[0]?.byline.displayName).toBe("Large Batch 0");
|
||||
expect(result.get(inferredPostIds[SQL_BATCH_SIZE + 1]!)?.[0]?.source).toBe("inferred");
|
||||
expect(result.get(inferredPostIds[SQL_BATCH_SIZE + 1]!)?.[0]?.byline.displayName).toBe(
|
||||
`Large Batch ${SQL_BATCH_SIZE + 1}`,
|
||||
);
|
||||
});
|
||||
|
||||
it("returns empty map for empty input", async () => {
|
||||
const result = await getBylinesForEntries("post", []);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
128
packages/core/tests/unit/cache-hints.test.ts
Normal file
128
packages/core/tests/unit/cache-hints.test.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { handleContentCreate } from "../../src/api/index.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
import { emdashLoader } from "../../src/loader.js";
|
||||
import { runWithContext } from "../../src/request-context.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../utils/test-db.js";
|
||||
|
||||
describe("Cache hints", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
async function createPublishedPost(title: string) {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title },
|
||||
status: "published",
|
||||
});
|
||||
if (!result.success) throw new Error("Failed to create post");
|
||||
return result.data!.item;
|
||||
}
|
||||
|
||||
describe("loadCollection cacheHint", () => {
|
||||
it("should tag collection with type name", async () => {
|
||||
await createPublishedPost("First Post");
|
||||
await createPublishedPost("Second Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
expect(result.cacheHint).toBeDefined();
|
||||
expect(result.cacheHint!.tags).toEqual(["post"]);
|
||||
});
|
||||
|
||||
it("should include lastModified from most recent entry", async () => {
|
||||
await createPublishedPost("First Post");
|
||||
const second = await createPublishedPost("Second Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
expect(result.cacheHint!.lastModified).toBeInstanceOf(Date);
|
||||
// lastModified should be >= the second post's updated_at
|
||||
const secondUpdated = new Date(second.updatedAt);
|
||||
expect(result.cacheHint!.lastModified!.getTime()).toBeGreaterThanOrEqual(
|
||||
secondUpdated.getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("entry-level cacheHint", () => {
|
||||
it("should tag each entry with its database ID", async () => {
|
||||
const post = await createPublishedPost("Test Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
expect(result.entries).toHaveLength(1);
|
||||
const entry = result.entries![0];
|
||||
expect(entry.cacheHint).toBeDefined();
|
||||
expect(entry.cacheHint!.tags).toEqual([post.id]);
|
||||
});
|
||||
|
||||
it("should include lastModified on each entry", async () => {
|
||||
await createPublishedPost("Test Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
const entry = result.entries![0];
|
||||
expect(entry.cacheHint!.lastModified).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
|
||||
describe("loadEntry cacheHint", () => {
|
||||
it("should tag entry with its database ID", async () => {
|
||||
const post = await createPublishedPost("Test Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadEntry!({ filter: { type: "post", id: post.slug } }),
|
||||
);
|
||||
|
||||
// loadEntry returns the entry directly (LiveDataEntry), not { entry, cacheHint }
|
||||
expect(result).toBeDefined();
|
||||
expect(result!.cacheHint).toBeDefined();
|
||||
expect(result!.cacheHint!.tags).toEqual([post.id]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("invalidation tag alignment", () => {
|
||||
it("should produce tags that match the invalidation pattern", async () => {
|
||||
const post = await createPublishedPost("Test Post");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const collectionResult = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
// The route invalidates with tags: [collection, id]
|
||||
// Collection pages are tagged with [type] -> matches "collection" tag
|
||||
// Entry pages are tagged with [entryId] -> matches "id" tag
|
||||
const invalidationTags = ["post", post.id];
|
||||
|
||||
// Collection-level tag should be hit by invalidation
|
||||
expect(invalidationTags).toContain(collectionResult.cacheHint!.tags![0]);
|
||||
|
||||
// Entry-level tag should be hit by invalidation
|
||||
const entry = collectionResult.entries![0];
|
||||
expect(invalidationTags).toContain(entry.cacheHint!.tags![0]);
|
||||
});
|
||||
});
|
||||
});
|
||||
277
packages/core/tests/unit/cleanup.test.ts
Normal file
277
packages/core/tests/unit/cleanup.test.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
/**
|
||||
* Tests for the cleanup subsystems.
|
||||
*
|
||||
* Note: runSystemCleanup() is not tested directly here because it imports
|
||||
* from @emdash-cms/auth/adapters/kysely, which requires the auth package to
|
||||
* be built. Instead, we test each subsystem independently:
|
||||
* - cleanupExpiredChallenges: tested in auth/challenge-store.test.ts
|
||||
* - deleteExpiredTokens: tested below using direct DB operations
|
||||
* - cleanupPendingUploads: tested below via MediaRepository
|
||||
* - pruneOldRevisions: tested below via RevisionRepository
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import { MediaRepository } from "../../src/database/repositories/media.js";
|
||||
import { RevisionRepository } from "../../src/database/repositories/revision.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
import { setupTestDatabase, setupTestDatabaseWithCollections } from "../utils/test-db.js";
|
||||
|
||||
describe("Revision Pruning", () => {
|
||||
let db: Kysely<Database>;
|
||||
let revisionRepo: RevisionRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
revisionRepo = new RevisionRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("prunes old revisions keeping the most recent N", async () => {
|
||||
const entryId = ulid();
|
||||
|
||||
// Create a content entry
|
||||
const { sql } = await import("kysely");
|
||||
await sql`
|
||||
INSERT INTO ec_post (id, slug, status, created_at, updated_at, version)
|
||||
VALUES (${entryId}, ${"test-post"}, ${"draft"}, ${new Date().toISOString()}, ${new Date().toISOString()}, ${1})
|
||||
`.execute(db);
|
||||
|
||||
// Create 200 revisions
|
||||
for (let i = 0; i < 200; i++) {
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId,
|
||||
data: { title: `Version ${i + 1}` },
|
||||
});
|
||||
}
|
||||
|
||||
const countBefore = await revisionRepo.countByEntry("post", entryId);
|
||||
expect(countBefore).toBe(200);
|
||||
|
||||
// Prune to keep 50
|
||||
const pruned = await revisionRepo.pruneOldRevisions("post", entryId, 50);
|
||||
|
||||
expect(pruned).toBe(150);
|
||||
|
||||
const countAfter = await revisionRepo.countByEntry("post", entryId);
|
||||
expect(countAfter).toBe(50);
|
||||
|
||||
// Verify the remaining 50 are the newest
|
||||
const remaining = await revisionRepo.findByEntry("post", entryId);
|
||||
expect(remaining[0]?.data.title).toBe("Version 200");
|
||||
expect(remaining[49]?.data.title).toBe("Version 151");
|
||||
});
|
||||
|
||||
it("is a no-op when revision count is at or below keepCount", async () => {
|
||||
const entryId = ulid();
|
||||
|
||||
const { sql } = await import("kysely");
|
||||
await sql`
|
||||
INSERT INTO ec_post (id, slug, status, created_at, updated_at, version)
|
||||
VALUES (${entryId}, ${"test-post-2"}, ${"draft"}, ${new Date().toISOString()}, ${new Date().toISOString()}, ${1})
|
||||
`.execute(db);
|
||||
|
||||
// Create 10 revisions
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId,
|
||||
data: { title: `Version ${i + 1}` },
|
||||
});
|
||||
}
|
||||
|
||||
const pruned = await revisionRepo.pruneOldRevisions("post", entryId, 50);
|
||||
expect(pruned).toBe(0);
|
||||
|
||||
const countAfter = await revisionRepo.countByEntry("post", entryId);
|
||||
expect(countAfter).toBe(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe("MediaRepository.cleanupPendingUploads", () => {
|
||||
let db: Kysely<Database>;
|
||||
let mediaRepo: MediaRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
mediaRepo = new MediaRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("deletes pending uploads older than the default 1 hour", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
// Create pending uploads
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await mediaRepo.createPending({
|
||||
filename: `pending-${i}.jpg`,
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: `uploads/pending-${i}.jpg`,
|
||||
});
|
||||
}
|
||||
|
||||
// Advance past 1 hour
|
||||
vi.advanceTimersByTime(61 * 60 * 1000);
|
||||
|
||||
const deletedKeys = await mediaRepo.cleanupPendingUploads();
|
||||
expect(deletedKeys).toHaveLength(10);
|
||||
// Verify actual storage keys are returned
|
||||
for (let i = 0; i < 10; i++) {
|
||||
expect(deletedKeys).toContain(`uploads/pending-${i}.jpg`);
|
||||
}
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it("does not delete recent pending uploads", async () => {
|
||||
// Create pending uploads (current time -- not yet expired)
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await mediaRepo.createPending({
|
||||
filename: `recent-${i}.jpg`,
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: `uploads/recent-${i}.jpg`,
|
||||
});
|
||||
}
|
||||
|
||||
const deletedKeys = await mediaRepo.cleanupPendingUploads();
|
||||
expect(deletedKeys).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("does not delete ready or failed items", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
// Create items with different statuses
|
||||
await mediaRepo.create({
|
||||
filename: "ready.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: "uploads/ready.jpg",
|
||||
status: "ready",
|
||||
});
|
||||
|
||||
const pending = await mediaRepo.createPending({
|
||||
filename: "pending.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: "uploads/pending.jpg",
|
||||
});
|
||||
await mediaRepo.markFailed(pending.id);
|
||||
|
||||
// Advance past 1 hour
|
||||
vi.advanceTimersByTime(61 * 60 * 1000);
|
||||
|
||||
const deletedKeys = await mediaRepo.cleanupPendingUploads();
|
||||
expect(deletedKeys).toHaveLength(0); // failed + ready should not be deleted
|
||||
|
||||
vi.useRealTimers();
|
||||
|
||||
const remaining = await db.selectFrom("media").select("id").execute();
|
||||
expect(remaining).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("respects custom maxAgeMs parameter", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
await mediaRepo.createPending({
|
||||
filename: "short-lived.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
storageKey: "uploads/short-lived.jpg",
|
||||
});
|
||||
|
||||
// Advance 10 minutes
|
||||
vi.advanceTimersByTime(10 * 60 * 1000);
|
||||
|
||||
// Cleanup with 5 min max age
|
||||
const deletedKeys = await mediaRepo.cleanupPendingUploads(5 * 60 * 1000);
|
||||
expect(deletedKeys).toHaveLength(1);
|
||||
expect(deletedKeys[0]).toBe("uploads/short-lived.jpg");
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Expired token cleanup", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("deletes expired tokens while keeping valid ones", async () => {
|
||||
const now = new Date();
|
||||
const expired = new Date(now.getTime() - 60 * 1000).toISOString(); // 1 min ago
|
||||
|
||||
// Create a test user first (tokens reference users)
|
||||
const userId = ulid();
|
||||
await db
|
||||
.insertInto("users")
|
||||
.values({
|
||||
id: userId,
|
||||
email: "test@example.com",
|
||||
name: "Test",
|
||||
avatar_url: null,
|
||||
role: 50,
|
||||
email_verified: 1,
|
||||
disabled: 0,
|
||||
data: null,
|
||||
created_at: now.toISOString(),
|
||||
updated_at: now.toISOString(),
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create 100 expired tokens
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await db
|
||||
.insertInto("auth_tokens")
|
||||
.values({
|
||||
hash: `expired-hash-${i}`,
|
||||
user_id: userId,
|
||||
email: "test@example.com",
|
||||
type: "magic_link",
|
||||
role: null,
|
||||
invited_by: null,
|
||||
expires_at: expired,
|
||||
created_at: now.toISOString(),
|
||||
})
|
||||
.execute();
|
||||
}
|
||||
|
||||
// Create 5 valid tokens
|
||||
const validExpiry = new Date(now.getTime() + 15 * 60 * 1000).toISOString();
|
||||
for (let i = 0; i < 5; i++) {
|
||||
await db
|
||||
.insertInto("auth_tokens")
|
||||
.values({
|
||||
hash: `valid-hash-${i}`,
|
||||
user_id: userId,
|
||||
email: "test@example.com",
|
||||
type: "magic_link",
|
||||
role: null,
|
||||
invited_by: null,
|
||||
expires_at: validExpiry,
|
||||
created_at: now.toISOString(),
|
||||
})
|
||||
.execute();
|
||||
}
|
||||
|
||||
// Use the DB directly to simulate what deleteExpiredTokens does
|
||||
await db.deleteFrom("auth_tokens").where("expires_at", "<", new Date().toISOString()).execute();
|
||||
|
||||
// Verify only valid ones remain
|
||||
const remaining = await db.selectFrom("auth_tokens").select("hash").execute();
|
||||
|
||||
expect(remaining).toHaveLength(5);
|
||||
expect(remaining.every((r) => r.hash.startsWith("valid-"))).toBe(true);
|
||||
});
|
||||
});
|
||||
360
packages/core/tests/unit/cli/bundle-utils.test.ts
Normal file
360
packages/core/tests/unit/cli/bundle-utils.test.ts
Normal file
@@ -0,0 +1,360 @@
|
||||
/**
|
||||
* Tests for bundle utility functions.
|
||||
*
|
||||
* Focuses on the functions where bugs would be non-obvious:
|
||||
* - Tarball round-trip (custom tar implementation)
|
||||
* - Manifest extraction (shape transformation, function stripping)
|
||||
* - Source entry resolution (path mapping logic)
|
||||
* - Node.js built-in detection (regex against bundled output)
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import { mkdtemp, rm, writeFile, mkdir, readFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
|
||||
|
||||
import {
|
||||
extractManifest,
|
||||
createTarball,
|
||||
resolveSourceEntry,
|
||||
findNodeBuiltinImports,
|
||||
findBuildOutput,
|
||||
findSourceExports,
|
||||
} from "../../../src/cli/commands/bundle-utils.js";
|
||||
import type { ResolvedPlugin } from "../../../src/plugins/types.js";
|
||||
|
||||
function mockPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: {},
|
||||
routes: {},
|
||||
admin: { pages: [], widgets: [] },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("extractManifest", () => {
|
||||
it("converts hooks from handler objects to name array", () => {
|
||||
const plugin = mockPlugin({
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler: vi.fn(),
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
"media:afterUpload": {
|
||||
handler: vi.fn(),
|
||||
priority: 50,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
// content:beforeSave has all defaults → plain string
|
||||
// media:afterUpload has non-default priority → structured entry
|
||||
expect(manifest.hooks).toEqual([
|
||||
"content:beforeSave",
|
||||
{ name: "media:afterUpload", priority: 50 },
|
||||
]);
|
||||
});
|
||||
|
||||
it("converts routes from handler objects to name array", () => {
|
||||
const plugin = mockPlugin({
|
||||
routes: {
|
||||
sync: { handler: vi.fn() },
|
||||
webhook: { handler: vi.fn() },
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.routes).toEqual(["sync", "webhook"]);
|
||||
});
|
||||
|
||||
it("strips admin.entry (host-only concern, not in bundles)", () => {
|
||||
const plugin = mockPlugin({
|
||||
admin: {
|
||||
entry: "@test/plugin/admin",
|
||||
settingsSchema: { apiKey: { type: "string", label: "Key" } as any },
|
||||
pages: [{ id: "settings", title: "Settings" }],
|
||||
widgets: [],
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect((manifest.admin as any).entry).toBeUndefined();
|
||||
expect(manifest.admin.settingsSchema).toBeDefined();
|
||||
expect(manifest.admin.pages).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("result is JSON-serializable (no functions survive)", () => {
|
||||
const plugin = mockPlugin({
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler: vi.fn(),
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "abort",
|
||||
pluginId: "test",
|
||||
exclusive: false,
|
||||
},
|
||||
},
|
||||
routes: { sync: { handler: vi.fn() } },
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
const json = JSON.stringify(manifest);
|
||||
const parsed = JSON.parse(json);
|
||||
|
||||
expect(parsed.hooks).toEqual(["content:beforeSave"]);
|
||||
expect(parsed.routes).toEqual(["sync"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createTarball", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-tar-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("produces a tarball that system tar can list", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
await writeFile(join(srcDir, "manifest.json"), '{"id":"test"}');
|
||||
await writeFile(join(srcDir, "backend.js"), "export default {}");
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const listing = execSync(`tar tzf "${out}"`, { encoding: "utf-8" });
|
||||
const files = listing.trim().split("\n").toSorted();
|
||||
expect(files).toContain("manifest.json");
|
||||
expect(files).toContain("backend.js");
|
||||
});
|
||||
|
||||
it("preserves file content through pack/unpack", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
const content = JSON.stringify({ id: "round-trip", version: "2.0.0" });
|
||||
await writeFile(join(srcDir, "manifest.json"), content);
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const extractDir = join(tempDir, "extract");
|
||||
await mkdir(extractDir);
|
||||
execSync(`tar xzf "${out}" -C "${extractDir}"`);
|
||||
|
||||
expect(await readFile(join(extractDir, "manifest.json"), "utf-8")).toBe(content);
|
||||
});
|
||||
|
||||
it("handles nested directories (screenshots/)", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(join(srcDir, "screenshots"), { recursive: true });
|
||||
await writeFile(join(srcDir, "manifest.json"), "{}");
|
||||
await writeFile(join(srcDir, "screenshots", "shot1.png"), "fake");
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const listing = execSync(`tar tzf "${out}"`, { encoding: "utf-8" });
|
||||
expect(listing).toContain("screenshots/shot1.png");
|
||||
});
|
||||
|
||||
it("handles binary content without corruption", async () => {
|
||||
const srcDir = join(tempDir, "src");
|
||||
await mkdir(srcDir);
|
||||
// Write bytes that would break text-mode handling
|
||||
const binary = Buffer.from([0x00, 0xff, 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]);
|
||||
await writeFile(join(srcDir, "icon.png"), binary);
|
||||
|
||||
const out = join(tempDir, "out.tar.gz");
|
||||
await createTarball(srcDir, out);
|
||||
|
||||
const extractDir = join(tempDir, "extract");
|
||||
await mkdir(extractDir);
|
||||
execSync(`tar xzf "${out}" -C "${extractDir}"`);
|
||||
|
||||
const extracted = await readFile(join(extractDir, "icon.png"));
|
||||
expect(extracted.equals(binary)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveSourceEntry", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-resolve-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("maps ./dist/index.mjs → src/index.ts", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.mjs");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("maps ./dist/index.js → src/index.ts", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.js");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("falls back to .tsx when .ts doesn't exist", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.tsx"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/index.mjs");
|
||||
expect(result).toBe(join(tempDir, "src", "index.tsx"));
|
||||
});
|
||||
|
||||
it("returns the direct path if it already exists", async () => {
|
||||
await mkdir(join(tempDir, "src"), { recursive: true });
|
||||
await writeFile(join(tempDir, "src", "index.ts"), "");
|
||||
|
||||
const result = await resolveSourceEntry(tempDir, "src/index.ts");
|
||||
expect(result).toBe(join(tempDir, "src", "index.ts"));
|
||||
});
|
||||
|
||||
it("returns undefined when nothing matches", async () => {
|
||||
const result = await resolveSourceEntry(tempDir, "./dist/missing.mjs");
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findBuildOutput", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-build-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("prefers .mjs over .js", async () => {
|
||||
await writeFile(join(tempDir, "index.mjs"), "");
|
||||
await writeFile(join(tempDir, "index.js"), "");
|
||||
|
||||
expect(await findBuildOutput(tempDir, "index")).toBe(join(tempDir, "index.mjs"));
|
||||
});
|
||||
|
||||
it("falls back through .js then .cjs", async () => {
|
||||
await writeFile(join(tempDir, "index.cjs"), "");
|
||||
expect(await findBuildOutput(tempDir, "index")).toBe(join(tempDir, "index.cjs"));
|
||||
});
|
||||
|
||||
it("returns undefined when no match", async () => {
|
||||
expect(await findBuildOutput(tempDir, "index")).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findNodeBuiltinImports", () => {
|
||||
it("detects require('node:fs') in bundled output", () => {
|
||||
expect(findNodeBuiltinImports(`const fs = require("node:fs");`)).toEqual(["fs"]);
|
||||
});
|
||||
|
||||
it("detects require('fs') without node: prefix", () => {
|
||||
expect(findNodeBuiltinImports(`const fs = require("fs");`)).toEqual(["fs"]);
|
||||
});
|
||||
|
||||
it("detects dynamic import('node:child_process')", () => {
|
||||
expect(findNodeBuiltinImports(`await import("node:child_process")`)).toEqual(["child_process"]);
|
||||
});
|
||||
|
||||
it("returns empty for code with no builtins", () => {
|
||||
expect(findNodeBuiltinImports(`import("emdash"); require("lodash");`)).toEqual([]);
|
||||
});
|
||||
|
||||
it("deduplicates repeated requires", () => {
|
||||
const code = `require("node:fs"); require("node:fs");`;
|
||||
expect(findNodeBuiltinImports(code)).toEqual(["fs"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("findSourceExports", () => {
|
||||
it("flags .ts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.ts" });
|
||||
expect(issues).toEqual([{ exportPath: ".", resolvedPath: "./src/index.ts" }]);
|
||||
});
|
||||
|
||||
it("flags .tsx exports", () => {
|
||||
const issues = findSourceExports({ "./admin": "./src/admin.tsx" });
|
||||
expect(issues).toEqual([{ exportPath: "./admin", resolvedPath: "./src/admin.tsx" }]);
|
||||
});
|
||||
|
||||
it("flags .mts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.mts" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("flags .cts exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.cts" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("flags .jsx exports", () => {
|
||||
const issues = findSourceExports({ ".": "./src/index.jsx" });
|
||||
expect(issues).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("accepts .mjs exports", () => {
|
||||
const issues = findSourceExports({ ".": "./dist/index.mjs" });
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("accepts .js exports", () => {
|
||||
const issues = findSourceExports({ ".": "./dist/index.js" });
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("handles conditional exports with import field", () => {
|
||||
const issues = findSourceExports({
|
||||
".": { import: "./src/index.ts", types: "./dist/index.d.mts" },
|
||||
});
|
||||
expect(issues).toEqual([{ exportPath: ".", resolvedPath: "./src/index.ts" }]);
|
||||
});
|
||||
|
||||
it("accepts conditional exports pointing to built files", () => {
|
||||
const issues = findSourceExports({
|
||||
".": { import: "./dist/index.mjs", types: "./dist/index.d.mts" },
|
||||
});
|
||||
expect(issues).toEqual([]);
|
||||
});
|
||||
|
||||
it("flags multiple bad exports", () => {
|
||||
const issues = findSourceExports({
|
||||
".": "./src/index.ts",
|
||||
"./sandbox": "./src/sandbox-entry.ts",
|
||||
});
|
||||
expect(issues).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
92
packages/core/tests/unit/cli/secrets-commands.test.ts
Normal file
92
packages/core/tests/unit/cli/secrets-commands.test.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Tests for the `emdash secrets` CLI surface.
|
||||
*
|
||||
* Focuses on the file-write helper used by `secrets generate --write`,
|
||||
* which is the only piece with non-trivial logic. The command runners
|
||||
* themselves are thin wrappers around `generateEncryptionKey()` and
|
||||
* `fingerprintKey()` (covered by `tests/unit/config/secrets.test.ts`).
|
||||
*/
|
||||
|
||||
import { mkdtemp, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { writeEncryptionKeyToFile } from "../../../src/cli/commands/secrets.js";
|
||||
|
||||
describe("secrets CLI: writeEncryptionKeyToFile", () => {
|
||||
let dir: string;
|
||||
let target: string;
|
||||
const sample = "emdash_enc_v1_AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA";
|
||||
const sample2 = "emdash_enc_v1_BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB";
|
||||
|
||||
beforeEach(async () => {
|
||||
dir = await mkdtemp(join(tmpdir(), "emdash-secrets-cli-"));
|
||||
target = join(dir, ".dev.vars");
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(dir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("creates a new file with a trailing newline", async () => {
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`EMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("appends to an existing file without clobbering other vars", async () => {
|
||||
await writeFile(target, "OTHER=value\nFOO=bar\n");
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nFOO=bar\nEMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("appends to a file that lacks a trailing newline", async () => {
|
||||
await writeFile(target, "OTHER=value");
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\n`);
|
||||
});
|
||||
|
||||
it("refuses to overwrite an existing entry without force", async () => {
|
||||
await writeFile(target, `EMDASH_ENCRYPTION_KEY=${sample}\nOTHER=value\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, false);
|
||||
expect(result).toBe("skipped");
|
||||
const content = await readFile(target, "utf-8");
|
||||
// Entry untouched.
|
||||
expect(content).toBe(`EMDASH_ENCRYPTION_KEY=${sample}\nOTHER=value\n`);
|
||||
});
|
||||
|
||||
it("replaces an existing entry in place when force is true", async () => {
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\nMORE=stuff\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, true);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
// Other vars untouched, key replaced inline (no duplication).
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample2}\nMORE=stuff\n`);
|
||||
});
|
||||
|
||||
it("treats an empty-value entry as not-set and replaces it without --force", async () => {
|
||||
// Operators sometimes leave `EMDASH_ENCRYPTION_KEY=` as a placeholder.
|
||||
// A skip in that case would be hostile — they actively want a value.
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=\nMORE=stuff\n`);
|
||||
const result = writeEncryptionKeyToFile(target, sample, false);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}\nMORE=stuff\n`);
|
||||
});
|
||||
|
||||
it("always ends with a trailing newline, even when replacing in-place in a file without one", async () => {
|
||||
await writeFile(target, `OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample}`);
|
||||
const result = writeEncryptionKeyToFile(target, sample2, true);
|
||||
expect(result).toBe("wrote");
|
||||
const content = await readFile(target, "utf-8");
|
||||
expect(content.endsWith("\n")).toBe(true);
|
||||
expect(content).toBe(`OTHER=value\nEMDASH_ENCRYPTION_KEY=${sample2}\n`);
|
||||
});
|
||||
});
|
||||
289
packages/core/tests/unit/cli/seed-commands.test.ts
Normal file
289
packages/core/tests/unit/cli/seed-commands.test.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
/**
|
||||
* Tests for CLI seed commands
|
||||
*/
|
||||
|
||||
import { mkdtemp, rm, writeFile, mkdir, readFile } from "node:fs/promises";
|
||||
import { tmpdir } from "node:os";
|
||||
import { join } from "node:path";
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import { applySeed } from "../../../src/seed/apply.js";
|
||||
import type { SeedFile } from "../../../src/seed/types.js";
|
||||
import { validateSeed } from "../../../src/seed/validate.js";
|
||||
|
||||
describe("CLI Seed Commands", () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await mkdtemp(join(tmpdir(), "emdash-cli-test-"));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe("seed file resolution", () => {
|
||||
it("should resolve .emdash/seed.json by convention", async () => {
|
||||
// Create convention seed file
|
||||
const emdashDir = join(tempDir, ".emdash");
|
||||
await mkdir(emdashDir);
|
||||
const seedPath = join(emdashDir, "seed.json");
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Convention Seed" },
|
||||
};
|
||||
await writeFile(seedPath, JSON.stringify(seed));
|
||||
|
||||
// Read it back
|
||||
const content = await readFile(seedPath, "utf-8");
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.settings.title).toBe("Convention Seed");
|
||||
});
|
||||
|
||||
it("should resolve seed from package.json emdash.seed", async () => {
|
||||
// Create seed file in custom location
|
||||
const customDir = join(tempDir, "custom");
|
||||
await mkdir(customDir);
|
||||
const seedPath = join(customDir, "my-seed.json");
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Package.json Seed" },
|
||||
};
|
||||
await writeFile(seedPath, JSON.stringify(seed));
|
||||
|
||||
// Create package.json referencing it
|
||||
const pkg = {
|
||||
name: "test-project",
|
||||
emdash: {
|
||||
seed: "custom/my-seed.json",
|
||||
},
|
||||
};
|
||||
await writeFile(join(tempDir, "package.json"), JSON.stringify(pkg));
|
||||
|
||||
// Verify the referenced path works
|
||||
const content = await readFile(seedPath, "utf-8");
|
||||
const parsed = JSON.parse(content);
|
||||
expect(parsed.settings.title).toBe("Package.json Seed");
|
||||
});
|
||||
});
|
||||
|
||||
describe("seed validation", () => {
|
||||
it("should validate a valid seed file", () => {
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Test Site" },
|
||||
collections: [
|
||||
{
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", label: "Title", type: "string", required: true }],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should reject invalid seed version", () => {
|
||||
const seed = {
|
||||
version: "999",
|
||||
settings: {},
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes("version"))).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject seed with invalid collection", () => {
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "", // Invalid: empty slug
|
||||
label: "Posts",
|
||||
fields: [],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = validateSeed(seed);
|
||||
expect(result.valid).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("seed application", () => {
|
||||
it("should apply settings from seed", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
settings: {
|
||||
title: "My Test Site",
|
||||
tagline: "A test site for testing",
|
||||
},
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, {});
|
||||
|
||||
expect(result.settings.applied).toBe(2);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should apply collections from seed", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "articles",
|
||||
label: "Articles",
|
||||
labelSingular: "Article",
|
||||
fields: [
|
||||
{
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
required: true,
|
||||
},
|
||||
{ slug: "body", label: "Body", type: "portableText" },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, {});
|
||||
|
||||
expect(result.collections.created).toBe(1);
|
||||
expect(result.fields.created).toBe(2);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
it("should be idempotent (skip existing)", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "pages",
|
||||
label: "Pages",
|
||||
fields: [{ slug: "title", label: "Title", type: "string" }],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// First apply
|
||||
const result1 = await applySeed(db, seed, {});
|
||||
expect(result1.collections.created).toBe(1);
|
||||
expect(result1.collections.skipped).toBe(0);
|
||||
|
||||
// Second apply - should skip
|
||||
const result2 = await applySeed(db, seed, {});
|
||||
expect(result2.collections.created).toBe(0);
|
||||
expect(result2.collections.skipped).toBe(1);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("export-seed output", () => {
|
||||
it("should produce valid seed from exported data", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
// Apply a seed first
|
||||
const inputSeed: SeedFile = {
|
||||
version: "1",
|
||||
settings: { title: "Export Test" },
|
||||
collections: [
|
||||
{
|
||||
slug: "docs",
|
||||
label: "Documentation",
|
||||
fields: [
|
||||
{ slug: "title", label: "Title", type: "string" },
|
||||
{ slug: "content", label: "Content", type: "portableText" },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
await applySeed(db, inputSeed, {});
|
||||
|
||||
// Now export (simulating what export-seed does)
|
||||
// For this test, we just verify the input seed validates
|
||||
const validation = validateSeed(inputSeed);
|
||||
expect(validation.valid).toBe(true);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("content export with $media", () => {
|
||||
it("should handle content without media gracefully", async () => {
|
||||
const dbPath = join(tempDir, "test.db");
|
||||
const db = createDatabase({ url: `file:${dbPath}` });
|
||||
|
||||
try {
|
||||
await runMigrations(db);
|
||||
|
||||
const seed: SeedFile = {
|
||||
version: "1",
|
||||
collections: [
|
||||
{
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", label: "Title", type: "string" }],
|
||||
},
|
||||
],
|
||||
content: {
|
||||
posts: [
|
||||
{
|
||||
id: "post-1",
|
||||
slug: "hello-world",
|
||||
status: "published",
|
||||
data: { title: "Hello World" },
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await applySeed(db, seed, { includeContent: true });
|
||||
|
||||
expect(result.collections.created).toBe(1);
|
||||
expect(result.content.created).toBe(1);
|
||||
} finally {
|
||||
await db.destroy();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
427
packages/core/tests/unit/cli/wxr-parser.test.ts
Normal file
427
packages/core/tests/unit/cli/wxr-parser.test.ts
Normal file
@@ -0,0 +1,427 @@
|
||||
/**
|
||||
* Tests for WXR parser
|
||||
*/
|
||||
|
||||
import { Readable } from "node:stream";
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { parseWxr } from "../../../src/cli/wxr/parser.js";
|
||||
|
||||
function createStream(content: string): Readable {
|
||||
return Readable.from([content]);
|
||||
}
|
||||
|
||||
describe("parseWxr", () => {
|
||||
it("parses basic WXR structure", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:excerpt="http://wordpress.org/export/1.2/excerpt/"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<title>Test Site</title>
|
||||
<link>https://example.com</link>
|
||||
<description>A test WordPress site</description>
|
||||
<language>en-US</language>
|
||||
<wp:base_site_url>https://example.com</wp:base_site_url>
|
||||
<wp:base_blog_url>https://example.com</wp:base_blog_url>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.site.title).toBe("Test Site");
|
||||
expect(result.site.link).toBe("https://example.com");
|
||||
expect(result.site.description).toBe("A test WordPress site");
|
||||
expect(result.site.language).toBe("en-US");
|
||||
});
|
||||
|
||||
it("parses posts", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<title>Test Site</title>
|
||||
<item>
|
||||
<title>Hello World</title>
|
||||
<link>https://example.com/hello-world/</link>
|
||||
<pubDate>Mon, 01 Jan 2024 12:00:00 +0000</pubDate>
|
||||
<dc:creator>admin</dc:creator>
|
||||
<content:encoded><![CDATA[<!-- wp:paragraph -->
|
||||
<p>Welcome to WordPress!</p>
|
||||
<!-- /wp:paragraph -->]]></content:encoded>
|
||||
<wp:post_id>1</wp:post_id>
|
||||
<wp:post_date>2024-01-01 12:00:00</wp:post_date>
|
||||
<wp:status>publish</wp:status>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<wp:post_name>hello-world</wp:post_name>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(1);
|
||||
expect(result.posts[0]?.title).toBe("Hello World");
|
||||
expect(result.posts[0]?.id).toBe(1);
|
||||
expect(result.posts[0]?.status).toBe("publish");
|
||||
expect(result.posts[0]?.postType).toBe("post");
|
||||
expect(result.posts[0]?.content).toContain("wp:paragraph");
|
||||
});
|
||||
|
||||
it("parses pages", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:content="http://purl.org/rss/1.0/modules/content/"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>About Us</title>
|
||||
<content:encoded><![CDATA[<p>About page content</p>]]></content:encoded>
|
||||
<wp:post_id>2</wp:post_id>
|
||||
<wp:status>publish</wp:status>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(1);
|
||||
expect(result.posts[0]?.title).toBe("About Us");
|
||||
expect(result.posts[0]?.postType).toBe("page");
|
||||
});
|
||||
|
||||
it("parses attachments", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Test Image</title>
|
||||
<wp:post_id>10</wp:post_id>
|
||||
<wp:post_type>attachment</wp:post_type>
|
||||
<wp:attachment_url>https://example.com/wp-content/uploads/2024/01/test.jpg</wp:attachment_url>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(0);
|
||||
expect(result.attachments).toHaveLength(1);
|
||||
expect(result.attachments[0]?.id).toBe(10);
|
||||
expect(result.attachments[0]?.title).toBe("Test Image");
|
||||
expect(result.attachments[0]?.url).toContain("test.jpg");
|
||||
});
|
||||
|
||||
it("parses categories", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:category>
|
||||
<wp:term_id>1</wp:term_id>
|
||||
<wp:category_nicename>uncategorized</wp:category_nicename>
|
||||
<wp:cat_name><![CDATA[Uncategorized]]></wp:cat_name>
|
||||
</wp:category>
|
||||
<wp:category>
|
||||
<wp:term_id>2</wp:term_id>
|
||||
<wp:category_nicename>news</wp:category_nicename>
|
||||
<wp:cat_name><![CDATA[News]]></wp:cat_name>
|
||||
<wp:category_parent>uncategorized</wp:category_parent>
|
||||
</wp:category>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.categories).toHaveLength(2);
|
||||
expect(result.categories[0]?.nicename).toBe("uncategorized");
|
||||
expect(result.categories[0]?.name).toBe("Uncategorized");
|
||||
expect(result.categories[1]?.parent).toBe("uncategorized");
|
||||
});
|
||||
|
||||
it("parses tags", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:tag>
|
||||
<wp:term_id>5</wp:term_id>
|
||||
<wp:tag_slug>javascript</wp:tag_slug>
|
||||
<wp:tag_name><![CDATA[JavaScript]]></wp:tag_name>
|
||||
</wp:tag>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.tags).toHaveLength(1);
|
||||
expect(result.tags[0]?.slug).toBe("javascript");
|
||||
expect(result.tags[0]?.name).toBe("JavaScript");
|
||||
});
|
||||
|
||||
it("parses post categories and tags", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Tagged Post</title>
|
||||
<category domain="category" nicename="news"><![CDATA[News]]></category>
|
||||
<category domain="post_tag" nicename="javascript"><![CDATA[JavaScript]]></category>
|
||||
<category domain="post_tag" nicename="typescript"><![CDATA[TypeScript]]></category>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.categories).toContain("news");
|
||||
expect(result.posts[0]?.tags).toContain("javascript");
|
||||
expect(result.posts[0]?.tags).toContain("typescript");
|
||||
});
|
||||
|
||||
it("parses authors", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:author>
|
||||
<wp:author_id>1</wp:author_id>
|
||||
<wp:author_login>admin</wp:author_login>
|
||||
<wp:author_email>admin@example.com</wp:author_email>
|
||||
<wp:author_display_name><![CDATA[Administrator]]></wp:author_display_name>
|
||||
<wp:author_first_name>Admin</wp:author_first_name>
|
||||
<wp:author_last_name>User</wp:author_last_name>
|
||||
</wp:author>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.authors).toHaveLength(1);
|
||||
expect(result.authors[0]?.login).toBe("admin");
|
||||
expect(result.authors[0]?.email).toBe("admin@example.com");
|
||||
expect(result.authors[0]?.displayName).toBe("Administrator");
|
||||
});
|
||||
|
||||
it("parses post meta", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Post with Meta</title>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_yoast_wpseo_title</wp:meta_key>
|
||||
<wp:meta_value>SEO Title</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_yoast_wpseo_metadesc</wp:meta_key>
|
||||
<wp:meta_value>SEO Description</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.meta.get("_yoast_wpseo_title")).toBe("SEO Title");
|
||||
expect(result.posts[0]?.meta.get("_yoast_wpseo_metadesc")).toBe("SEO Description");
|
||||
});
|
||||
|
||||
it("handles empty WXR", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>Empty Site</title>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(0);
|
||||
expect(result.attachments).toHaveLength(0);
|
||||
expect(result.categories).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("parses page hierarchy (post_parent and menu_order)", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Parent Page</title>
|
||||
<wp:post_id>10</wp:post_id>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
<wp:post_parent>0</wp:post_parent>
|
||||
<wp:menu_order>1</wp:menu_order>
|
||||
</item>
|
||||
<item>
|
||||
<title>Child Page</title>
|
||||
<wp:post_id>11</wp:post_id>
|
||||
<wp:post_type>page</wp:post_type>
|
||||
<wp:post_parent>10</wp:post_parent>
|
||||
<wp:menu_order>2</wp:menu_order>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts).toHaveLength(2);
|
||||
expect(result.posts[0]?.postParent).toBe(0);
|
||||
expect(result.posts[0]?.menuOrder).toBe(1);
|
||||
expect(result.posts[1]?.postParent).toBe(10);
|
||||
expect(result.posts[1]?.menuOrder).toBe(2);
|
||||
});
|
||||
|
||||
it("parses generic wp:term elements (custom taxonomies)", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:term>
|
||||
<wp:term_id>100</wp:term_id>
|
||||
<wp:term_taxonomy>genre</wp:term_taxonomy>
|
||||
<wp:term_slug>sci-fi</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Science Fiction]]></wp:term_name>
|
||||
<wp:term_description><![CDATA[Science fiction books]]></wp:term_description>
|
||||
</wp:term>
|
||||
<wp:term>
|
||||
<wp:term_id>101</wp:term_id>
|
||||
<wp:term_taxonomy>genre</wp:term_taxonomy>
|
||||
<wp:term_slug>fantasy</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Fantasy]]></wp:term_name>
|
||||
<wp:term_parent>sci-fi</wp:term_parent>
|
||||
</wp:term>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.terms).toHaveLength(2);
|
||||
expect(result.terms[0]?.id).toBe(100);
|
||||
expect(result.terms[0]?.taxonomy).toBe("genre");
|
||||
expect(result.terms[0]?.slug).toBe("sci-fi");
|
||||
expect(result.terms[0]?.name).toBe("Science Fiction");
|
||||
expect(result.terms[0]?.description).toBe("Science fiction books");
|
||||
expect(result.terms[1]?.parent).toBe("sci-fi");
|
||||
});
|
||||
|
||||
it("parses nav_menu terms and nav_menu_item posts into structured menus", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<wp:term>
|
||||
<wp:term_id>5</wp:term_id>
|
||||
<wp:term_taxonomy>nav_menu</wp:term_taxonomy>
|
||||
<wp:term_slug>main-menu</wp:term_slug>
|
||||
<wp:term_name><![CDATA[Main Menu]]></wp:term_name>
|
||||
</wp:term>
|
||||
<item>
|
||||
<title>Home</title>
|
||||
<wp:post_id>50</wp:post_id>
|
||||
<wp:post_type>nav_menu_item</wp:post_type>
|
||||
<wp:menu_order>1</wp:menu_order>
|
||||
<category domain="nav_menu" nicename="main-menu"><![CDATA[Main Menu]]></category>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_type</wp:meta_key>
|
||||
<wp:meta_value>custom</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_url</wp:meta_key>
|
||||
<wp:meta_value>https://example.com/</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_menu_item_parent</wp:meta_key>
|
||||
<wp:meta_value>0</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
<item>
|
||||
<title>About</title>
|
||||
<wp:post_id>51</wp:post_id>
|
||||
<wp:post_type>nav_menu_item</wp:post_type>
|
||||
<wp:menu_order>2</wp:menu_order>
|
||||
<category domain="nav_menu" nicename="main-menu"><![CDATA[Main Menu]]></category>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_type</wp:meta_key>
|
||||
<wp:meta_value>post_type</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_object</wp:meta_key>
|
||||
<wp:meta_value>page</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_object_id</wp:meta_key>
|
||||
<wp:meta_value>10</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
<wp:postmeta>
|
||||
<wp:meta_key>_menu_item_menu_item_parent</wp:meta_key>
|
||||
<wp:meta_value>0</wp:meta_value>
|
||||
</wp:postmeta>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
// Check terms array includes nav_menu term
|
||||
expect(result.terms.some((t) => t.taxonomy === "nav_menu")).toBe(true);
|
||||
|
||||
// Check nav_menu_item posts are in posts array
|
||||
expect(result.posts.filter((p) => p.postType === "nav_menu_item")).toHaveLength(2);
|
||||
|
||||
// Check structured navMenus
|
||||
expect(result.navMenus).toHaveLength(1);
|
||||
expect(result.navMenus[0]?.name).toBe("main-menu");
|
||||
expect(result.navMenus[0]?.id).toBe(5);
|
||||
expect(result.navMenus[0]?.items).toHaveLength(2);
|
||||
|
||||
// Check menu items are sorted by menu_order
|
||||
expect(result.navMenus[0]?.items[0]?.title).toBe("Home");
|
||||
expect(result.navMenus[0]?.items[0]?.type).toBe("custom");
|
||||
expect(result.navMenus[0]?.items[0]?.url).toBe("https://example.com/");
|
||||
expect(result.navMenus[0]?.items[0]?.sortOrder).toBe(1);
|
||||
|
||||
expect(result.navMenus[0]?.items[1]?.title).toBe("About");
|
||||
expect(result.navMenus[0]?.items[1]?.type).toBe("post_type");
|
||||
expect(result.navMenus[0]?.items[1]?.objectType).toBe("page");
|
||||
expect(result.navMenus[0]?.items[1]?.objectId).toBe(10);
|
||||
expect(result.navMenus[0]?.items[1]?.sortOrder).toBe(2);
|
||||
});
|
||||
|
||||
it("parses custom taxonomy assignments on posts", async () => {
|
||||
const wxr = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0"
|
||||
xmlns:wp="http://wordpress.org/export/1.2/">
|
||||
<channel>
|
||||
<item>
|
||||
<title>Book Review</title>
|
||||
<wp:post_id>1</wp:post_id>
|
||||
<wp:post_type>post</wp:post_type>
|
||||
<category domain="category" nicename="reviews"><![CDATA[Reviews]]></category>
|
||||
<category domain="genre" nicename="sci-fi"><![CDATA[Science Fiction]]></category>
|
||||
<category domain="genre" nicename="dystopian"><![CDATA[Dystopian]]></category>
|
||||
<category domain="reading_level" nicename="advanced"><![CDATA[Advanced]]></category>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>`;
|
||||
|
||||
const result = await parseWxr(createStream(wxr));
|
||||
|
||||
expect(result.posts[0]?.categories).toContain("reviews");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("genre")).toContain("sci-fi");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("genre")).toContain("dystopian");
|
||||
expect(result.posts[0]?.customTaxonomies?.get("reading_level")).toContain("advanced");
|
||||
});
|
||||
});
|
||||
710
packages/core/tests/unit/client/client.test.ts
Normal file
710
packages/core/tests/unit/client/client.test.ts
Normal file
@@ -0,0 +1,710 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { EmDashClient, EmDashApiError } from "../../../src/client/index.js";
|
||||
import type { Interceptor } from "../../../src/client/transport.js";
|
||||
|
||||
// Regex patterns for route matching
|
||||
const CONTENT_POSTS_ABC_REGEX = /\/content\/posts\/abc/;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Mock backend
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface MockRoute {
|
||||
method: string;
|
||||
path: RegExp | string;
|
||||
handler: (req: Request) => Response | Promise<Response>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a mock HTTP backend as an interceptor.
|
||||
* Routes are matched in order. Unmatched requests return 404.
|
||||
*/
|
||||
function createMockBackend(routes: MockRoute[]): Interceptor {
|
||||
return async (req) => {
|
||||
const url = new URL(req.url);
|
||||
const path = url.pathname + url.search;
|
||||
|
||||
for (const route of routes) {
|
||||
if (req.method !== route.method) continue;
|
||||
if (typeof route.path === "string") {
|
||||
if (!path.includes(route.path)) continue;
|
||||
} else {
|
||||
if (!route.path.test(path)) continue;
|
||||
}
|
||||
return route.handler(req);
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ error: { code: "NOT_FOUND", message: "No matching route" } }),
|
||||
{ status: 404, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
/** Wraps body in `{ data: body }` to match the standard API response envelope. */
|
||||
function jsonResponse(body: unknown, status: number = 200): Response {
|
||||
// Error responses (4xx/5xx) are NOT wrapped in { data }
|
||||
const payload = status >= 400 ? body : { data: body };
|
||||
return new Response(JSON.stringify(payload), {
|
||||
status,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("EmDashClient", () => {
|
||||
describe("_rev token flow", () => {
|
||||
it("blind update (no _rev) succeeds", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", type: "string", label: "Title" }],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "PUT",
|
||||
path: CONTENT_POSTS_ABC_REGEX,
|
||||
handler: async (req) => {
|
||||
const body = (await req.json()) as Record<string, unknown>;
|
||||
// No _rev should be sent
|
||||
expect(body._rev).toBeUndefined();
|
||||
return jsonResponse({
|
||||
item: { id: "abc", data: { title: "Blind" } },
|
||||
_rev: "newrev",
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const updated = await client.update("posts", "abc", {
|
||||
data: { title: "Blind" },
|
||||
});
|
||||
expect(updated.data.title).toBe("Blind");
|
||||
});
|
||||
|
||||
it("get() returns _rev on the item", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", type: "string", label: "Title" }],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "GET",
|
||||
path: CONTENT_POSTS_ABC_REGEX,
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
id: "abc",
|
||||
type: "posts",
|
||||
slug: "hello",
|
||||
status: "draft",
|
||||
data: { title: "Hello" },
|
||||
authorId: null,
|
||||
createdAt: "2026-01-01",
|
||||
updatedAt: "2026-01-01",
|
||||
publishedAt: null,
|
||||
scheduledAt: null,
|
||||
liveRevisionId: null,
|
||||
draftRevisionId: null,
|
||||
},
|
||||
_rev: "dGVzdHJldg",
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const post = await client.get("posts", "abc");
|
||||
expect(post.id).toBe("abc");
|
||||
expect(post._rev).toBe("dGVzdHJldg");
|
||||
});
|
||||
|
||||
it("update() sends _rev when provided", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", type: "string", label: "Title" }],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "PUT",
|
||||
path: CONTENT_POSTS_ABC_REGEX,
|
||||
handler: async (req) => {
|
||||
const body = await req.json();
|
||||
expect((body as Record<string, unknown>)._rev).toBe("dGVzdHJldg");
|
||||
return jsonResponse({
|
||||
item: { id: "abc", data: { title: "Updated" } },
|
||||
_rev: "bmV3cmV2",
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const updated = await client.update("posts", "abc", {
|
||||
data: { title: "Updated" },
|
||||
_rev: "dGVzdHJldg",
|
||||
});
|
||||
expect(updated.data.title).toBe("Updated");
|
||||
expect(updated._rev).toBe("bmV3cmV2");
|
||||
});
|
||||
});
|
||||
|
||||
describe("create()", () => {
|
||||
it("does not require a prior get()", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [{ slug: "title", type: "string", label: "Title" }],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "POST",
|
||||
path: "/content/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
id: "new1",
|
||||
type: "posts",
|
||||
slug: "hello",
|
||||
status: "draft",
|
||||
data: { title: "Hello" },
|
||||
authorId: null,
|
||||
createdAt: "2026-01-01",
|
||||
updatedAt: "2026-01-01",
|
||||
publishedAt: null,
|
||||
scheduledAt: null,
|
||||
liveRevisionId: null,
|
||||
draftRevisionId: null,
|
||||
},
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const item = await client.create("posts", {
|
||||
data: { title: "Hello" },
|
||||
slug: "hello",
|
||||
});
|
||||
expect(item.id).toBe("new1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("API error handling", () => {
|
||||
it("throws EmDashApiError on 4xx responses", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections",
|
||||
handler: () => jsonResponse({ error: { code: "FORBIDDEN", message: "No access" } }, 403),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
try {
|
||||
await client.collections();
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(EmDashApiError);
|
||||
const apiErr = error as EmDashApiError;
|
||||
expect(apiErr.status).toBe(403);
|
||||
expect(apiErr.code).toBe("FORBIDDEN");
|
||||
expect(apiErr.message).toBe("No access");
|
||||
}
|
||||
});
|
||||
|
||||
it("throws EmDashApiError on 500 responses", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/manifest",
|
||||
handler: () =>
|
||||
jsonResponse(
|
||||
{
|
||||
error: {
|
||||
code: "INTERNAL_ERROR",
|
||||
message: "Something broke",
|
||||
},
|
||||
},
|
||||
500,
|
||||
),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
try {
|
||||
await client.manifest();
|
||||
expect.fail("Should have thrown");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(EmDashApiError);
|
||||
expect((error as EmDashApiError).status).toBe(500);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("list()", () => {
|
||||
it("returns items and nextCursor", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/content/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
items: [
|
||||
{
|
||||
id: "1",
|
||||
type: "posts",
|
||||
slug: "a",
|
||||
status: "published",
|
||||
data: {},
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
type: "posts",
|
||||
slug: "b",
|
||||
status: "published",
|
||||
data: {},
|
||||
},
|
||||
],
|
||||
nextCursor: "cursor123",
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const result = await client.list("posts", { status: "published" });
|
||||
expect(result.items).toHaveLength(2);
|
||||
expect(result.nextCursor).toBe("cursor123");
|
||||
});
|
||||
});
|
||||
|
||||
describe("listAll()", () => {
|
||||
it("follows cursors until exhaustion", async () => {
|
||||
let page = 0;
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/content/posts",
|
||||
handler: () => {
|
||||
page++;
|
||||
if (page === 1) {
|
||||
return jsonResponse({
|
||||
items: [{ id: "1", data: {} }],
|
||||
nextCursor: "page2",
|
||||
});
|
||||
}
|
||||
return jsonResponse({
|
||||
items: [{ id: "2", data: {} }],
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const all = [];
|
||||
for await (const item of client.listAll("posts")) {
|
||||
all.push(item);
|
||||
}
|
||||
expect(all).toHaveLength(2);
|
||||
expect(all[0]?.id).toBe("1");
|
||||
expect(all[1]?.id).toBe("2");
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete/publish/unpublish/schedule/restore", () => {
|
||||
it("calls the correct endpoints", async () => {
|
||||
const calledPaths: string[] = [];
|
||||
|
||||
const backend: Interceptor = async (req) => {
|
||||
calledPaths.push(`${req.method} ${new URL(req.url).pathname}`);
|
||||
return jsonResponse({});
|
||||
};
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
await client.delete("posts", "abc");
|
||||
await client.publish("posts", "abc");
|
||||
await client.unpublish("posts", "abc");
|
||||
await client.schedule("posts", "abc", { at: "2026-03-01T00:00:00Z" });
|
||||
await client.restore("posts", "abc");
|
||||
|
||||
expect(calledPaths).toEqual([
|
||||
"DELETE /_emdash/api/content/posts/abc",
|
||||
"POST /_emdash/api/content/posts/abc/publish",
|
||||
"POST /_emdash/api/content/posts/abc/unpublish",
|
||||
"POST /_emdash/api/content/posts/abc/schedule",
|
||||
"POST /_emdash/api/content/posts/abc/restore",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("schema methods", () => {
|
||||
it("collections() returns list", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
items: [
|
||||
{ slug: "posts", label: "Posts", supports: [] },
|
||||
{ slug: "pages", label: "Pages", supports: [] },
|
||||
],
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const cols = await client.collections();
|
||||
expect(cols).toHaveLength(2);
|
||||
expect(cols[0]?.slug).toBe("posts");
|
||||
});
|
||||
|
||||
it("createCollection() sends correct payload", async () => {
|
||||
let capturedBody: unknown;
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "POST",
|
||||
path: "/schema/collections",
|
||||
handler: async (req) => {
|
||||
capturedBody = await req.json();
|
||||
return jsonResponse({
|
||||
item: {
|
||||
slug: "events",
|
||||
label: "Events",
|
||||
labelSingular: "Event",
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
await client.createCollection({
|
||||
slug: "events",
|
||||
label: "Events",
|
||||
labelSingular: "Event",
|
||||
});
|
||||
|
||||
expect(capturedBody).toEqual({
|
||||
slug: "events",
|
||||
label: "Events",
|
||||
labelSingular: "Event",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("PT <-> Markdown auto-conversion", () => {
|
||||
it("converts PT fields to markdown on get()", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
label: "Posts",
|
||||
fields: [
|
||||
{ slug: "title", type: "string", label: "Title" },
|
||||
{ slug: "body", type: "portableText", label: "Body" },
|
||||
],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "GET",
|
||||
path: CONTENT_POSTS_ABC_REGEX,
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
id: "abc",
|
||||
type: "posts",
|
||||
data: {
|
||||
title: "Hello",
|
||||
body: [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [
|
||||
{
|
||||
_type: "span",
|
||||
text: "World",
|
||||
marks: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_rev: "rev1",
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const item = await client.get("posts", "abc");
|
||||
expect(item.data.title).toBe("Hello");
|
||||
expect(typeof item.data.body).toBe("string");
|
||||
expect(item.data.body).toContain("World");
|
||||
});
|
||||
|
||||
it("returns raw PT when raw: true", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
fields: [{ slug: "body", type: "portableText", label: "Body" }],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "GET",
|
||||
path: CONTENT_POSTS_ABC_REGEX,
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
id: "abc",
|
||||
data: {
|
||||
body: [
|
||||
{
|
||||
_type: "block",
|
||||
children: [{ _type: "span", text: "Raw" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
_rev: "rev1",
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const item = await client.get("posts", "abc", { raw: true });
|
||||
expect(Array.isArray(item.data.body)).toBe(true);
|
||||
});
|
||||
|
||||
it("converts markdown to PT on create()", async () => {
|
||||
let capturedData: Record<string, unknown> | undefined;
|
||||
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/schema/collections/posts",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
item: {
|
||||
slug: "posts",
|
||||
fields: [
|
||||
{ slug: "title", type: "string", label: "Title" },
|
||||
{ slug: "body", type: "portableText", label: "Body" },
|
||||
],
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
method: "POST",
|
||||
path: "/content/posts",
|
||||
handler: async (req) => {
|
||||
const body = (await req.json()) as Record<string, unknown>;
|
||||
capturedData = body.data as Record<string, unknown>;
|
||||
return jsonResponse({
|
||||
item: {
|
||||
id: "new1",
|
||||
data: capturedData,
|
||||
},
|
||||
});
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
await client.create("posts", {
|
||||
data: {
|
||||
title: "Hello",
|
||||
body: "Some **bold** text",
|
||||
},
|
||||
});
|
||||
|
||||
expect(capturedData).toBeDefined();
|
||||
expect(capturedData!.title).toBe("Hello");
|
||||
expect(Array.isArray(capturedData!.body)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Taxonomy & menu envelope bugs
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
describe("taxonomies()", () => {
|
||||
it("returns taxonomy array from { taxonomies } envelope", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/taxonomies",
|
||||
handler: () =>
|
||||
jsonResponse({
|
||||
taxonomies: [
|
||||
{
|
||||
id: "t1",
|
||||
name: "categories",
|
||||
label: "Categories",
|
||||
hierarchical: true,
|
||||
collections: ["posts"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const result = await client.taxonomies();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0]!.name).toBe("categories");
|
||||
});
|
||||
});
|
||||
|
||||
describe("menus()", () => {
|
||||
it("returns menu array from bare-array envelope", async () => {
|
||||
const backend = createMockBackend([
|
||||
{
|
||||
method: "GET",
|
||||
path: "/menus",
|
||||
handler: () =>
|
||||
jsonResponse([
|
||||
{
|
||||
id: "m1",
|
||||
name: "primary",
|
||||
label: "Primary",
|
||||
itemCount: 3,
|
||||
},
|
||||
]),
|
||||
},
|
||||
]);
|
||||
|
||||
const client = new EmDashClient({
|
||||
baseUrl: "http://localhost:4321",
|
||||
token: "test",
|
||||
interceptors: [backend],
|
||||
});
|
||||
|
||||
const result = await client.menus();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBe(1);
|
||||
expect(result[0]!.name).toBe("primary");
|
||||
});
|
||||
});
|
||||
});
|
||||
546
packages/core/tests/unit/client/portable-text.test.ts
Normal file
546
packages/core/tests/unit/client/portable-text.test.ts
Normal file
@@ -0,0 +1,546 @@
|
||||
import { describe, it, expect, beforeEach } from "vitest";
|
||||
|
||||
import type { PortableTextBlock, FieldSchema } from "../../../src/client/portable-text.js";
|
||||
import {
|
||||
portableTextToMarkdown,
|
||||
markdownToPortableText,
|
||||
resetKeyCounter,
|
||||
convertDataForRead,
|
||||
convertDataForWrite,
|
||||
} from "../../../src/client/portable-text.js";
|
||||
|
||||
beforeEach(() => {
|
||||
resetKeyCounter();
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PT -> Markdown
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("portableTextToMarkdown", () => {
|
||||
it("converts a simple paragraph", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
_key: "a",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", _key: "s1", text: "Hello world", marks: [] }],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("Hello world\n");
|
||||
});
|
||||
|
||||
it("converts headings h1-h6", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "h1",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Title", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "h3",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Subtitle", marks: [] }],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("# Title\n\n### Subtitle\n");
|
||||
});
|
||||
|
||||
it("converts bold, italic, code, and strikethrough marks", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [
|
||||
{ _type: "span", text: "bold", marks: ["strong"] },
|
||||
{ _type: "span", text: " and ", marks: [] },
|
||||
{ _type: "span", text: "italic", marks: ["em"] },
|
||||
{ _type: "span", text: " and ", marks: [] },
|
||||
{ _type: "span", text: "code", marks: ["code"] },
|
||||
{ _type: "span", text: " and ", marks: [] },
|
||||
{ _type: "span", text: "struck", marks: ["strike-through"] },
|
||||
],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe(
|
||||
"**bold** and _italic_ and `code` and ~~struck~~\n",
|
||||
);
|
||||
});
|
||||
|
||||
it("converts links via markDefs", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [{ _key: "link1", _type: "link", href: "https://example.com" }],
|
||||
children: [
|
||||
{ _type: "span", text: "Click ", marks: [] },
|
||||
{ _type: "span", text: "here", marks: ["link1"] },
|
||||
],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("Click [here](https://example.com)\n");
|
||||
});
|
||||
|
||||
it("converts blockquotes", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "blockquote",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "A quote", marks: [] }],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("> A quote\n");
|
||||
});
|
||||
|
||||
it("converts unordered lists", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "bullet",
|
||||
level: 1,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "First", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "bullet",
|
||||
level: 1,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Second", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "bullet",
|
||||
level: 2,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Nested", marks: [] }],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("- First\n- Second\n - Nested\n");
|
||||
});
|
||||
|
||||
it("converts ordered lists", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "number",
|
||||
level: 1,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "First", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "number",
|
||||
level: 1,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Second", marks: [] }],
|
||||
},
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("1. First\n1. Second\n");
|
||||
});
|
||||
|
||||
it("converts code blocks", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{ _type: "code", _key: "c1", language: "typescript", code: "const x = 1;\nconsole.log(x);" },
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe(
|
||||
"```typescript\nconst x = 1;\nconsole.log(x);\n```\n",
|
||||
);
|
||||
});
|
||||
|
||||
it("converts images", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{ _type: "image", _key: "i1", alt: "A cat", asset: { url: "/img/cat.jpg" } },
|
||||
];
|
||||
expect(portableTextToMarkdown(blocks)).toBe("\n");
|
||||
});
|
||||
|
||||
it("serializes unknown blocks as opaque fences", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Before", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "pluginWidget",
|
||||
_key: "pw1",
|
||||
config: { layout: "grid", items: 3 },
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "After", marks: [] }],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(blocks);
|
||||
expect(md).toContain("Before");
|
||||
expect(md).toContain("After");
|
||||
expect(md).toContain("<!--ec:block ");
|
||||
expect(md).toContain('"_type":"pluginWidget"');
|
||||
expect(md).toContain('"layout":"grid"');
|
||||
});
|
||||
|
||||
it("handles mixed content with paragraphs, headings, and lists", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "h1",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Title", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "A paragraph.", marks: [] }],
|
||||
},
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
listItem: "bullet",
|
||||
level: 1,
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Item", marks: [] }],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(blocks);
|
||||
expect(md).toContain("# Title");
|
||||
expect(md).toContain("A paragraph.");
|
||||
expect(md).toContain("- Item");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Markdown -> PT
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("markdownToPortableText", () => {
|
||||
it("converts a simple paragraph", () => {
|
||||
const blocks = markdownToPortableText("Hello world\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0]._type).toBe("block");
|
||||
expect(blocks[0].style).toBe("normal");
|
||||
expect(blocks[0].children).toHaveLength(1);
|
||||
expect((blocks[0].children[0] as { text: string }).text).toBe("Hello world");
|
||||
});
|
||||
|
||||
it("converts headings", () => {
|
||||
const blocks = markdownToPortableText("# Title\n\n### Subtitle\n");
|
||||
expect(blocks).toHaveLength(2);
|
||||
expect(blocks[0].style).toBe("h1");
|
||||
expect(blocks[1].style).toBe("h3");
|
||||
});
|
||||
|
||||
it("converts bold and italic", () => {
|
||||
const blocks = markdownToPortableText("Some **bold** and _italic_ text\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
const children = blocks[0].children;
|
||||
expect(children.length).toBeGreaterThan(1);
|
||||
|
||||
const boldSpan = children.find((c) => (c.marks ?? []).includes("strong"));
|
||||
expect(boldSpan).toBeDefined();
|
||||
expect(boldSpan!.text).toBe("bold");
|
||||
|
||||
const italicSpan = children.find((c) => (c.marks ?? []).includes("em"));
|
||||
expect(italicSpan).toBeDefined();
|
||||
expect(italicSpan!.text).toBe("italic");
|
||||
});
|
||||
|
||||
it("converts inline code", () => {
|
||||
const blocks = markdownToPortableText("Use `foo()` here\n");
|
||||
const children = blocks[0].children;
|
||||
const codeSpan = children.find((c) => (c.marks ?? []).includes("code"));
|
||||
expect(codeSpan).toBeDefined();
|
||||
expect(codeSpan!.text).toBe("foo()");
|
||||
});
|
||||
|
||||
it("converts links with markDefs", () => {
|
||||
const blocks = markdownToPortableText("Click [here](https://example.com)\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0].markDefs).toHaveLength(1);
|
||||
expect(blocks[0].markDefs[0]._type).toBe("link");
|
||||
expect(blocks[0].markDefs[0].href).toBe("https://example.com");
|
||||
|
||||
const linkSpan = blocks[0].children.find((c) =>
|
||||
(c.marks ?? []).includes(blocks[0].markDefs[0]._key),
|
||||
);
|
||||
expect(linkSpan).toBeDefined();
|
||||
expect(linkSpan!.text).toBe("here");
|
||||
});
|
||||
|
||||
it("converts blockquotes", () => {
|
||||
const blocks = markdownToPortableText("> A quote\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0].style).toBe("blockquote");
|
||||
});
|
||||
|
||||
it("converts unordered lists", () => {
|
||||
const blocks = markdownToPortableText("- First\n- Second\n - Nested\n");
|
||||
expect(blocks).toHaveLength(3);
|
||||
expect(blocks[0].listItem).toBe("bullet");
|
||||
expect(blocks[0].level).toBe(1);
|
||||
expect(blocks[2].listItem).toBe("bullet");
|
||||
expect(blocks[2].level).toBe(2);
|
||||
});
|
||||
|
||||
it("converts ordered lists", () => {
|
||||
const blocks = markdownToPortableText("1. First\n2. Second\n");
|
||||
expect(blocks).toHaveLength(2);
|
||||
expect(blocks[0].listItem).toBe("number");
|
||||
expect(blocks[1].listItem).toBe("number");
|
||||
});
|
||||
|
||||
it("converts code fences", () => {
|
||||
const blocks = markdownToPortableText("```typescript\nconst x = 1;\n```\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0]._type).toBe("code");
|
||||
expect(blocks[0].language).toBe("typescript");
|
||||
expect(blocks[0].code).toBe("const x = 1;");
|
||||
});
|
||||
|
||||
it("converts images", () => {
|
||||
const blocks = markdownToPortableText("\n");
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0]._type).toBe("image");
|
||||
expect(blocks[0].alt).toBe("A cat");
|
||||
expect((blocks[0].asset as { url: string }).url).toBe("/img/cat.jpg");
|
||||
});
|
||||
|
||||
it("deserializes opaque fences back to original blocks", () => {
|
||||
const original = {
|
||||
_type: "pluginWidget",
|
||||
_key: "pw1",
|
||||
config: { layout: "grid", items: 3 },
|
||||
};
|
||||
const md = `<!--ec:block ${JSON.stringify(original)} -->`;
|
||||
const blocks = markdownToPortableText(md);
|
||||
expect(blocks).toHaveLength(1);
|
||||
expect(blocks[0]._type).toBe("pluginWidget");
|
||||
expect(blocks[0]._key).toBe("pw1");
|
||||
expect((blocks[0] as Record<string, unknown>).config).toEqual({
|
||||
layout: "grid",
|
||||
items: 3,
|
||||
});
|
||||
});
|
||||
|
||||
it("skips blank lines", () => {
|
||||
const blocks = markdownToPortableText("Hello\n\n\n\nWorld\n");
|
||||
expect(blocks).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("converts strikethrough", () => {
|
||||
const blocks = markdownToPortableText("Some ~~deleted~~ text\n");
|
||||
const children = blocks[0].children;
|
||||
const strikeSpan = children.find((c) => (c.marks ?? []).includes("strike-through"));
|
||||
expect(strikeSpan).toBeDefined();
|
||||
expect(strikeSpan!.text).toBe("deleted");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Round-trip
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("PT <-> Markdown round-trip", () => {
|
||||
it("preserves simple text through round-trip", () => {
|
||||
const original: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
_key: "a",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", _key: "s", text: "Hello world", marks: [] }],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(original);
|
||||
const roundTripped = markdownToPortableText(md);
|
||||
|
||||
expect(roundTripped).toHaveLength(1);
|
||||
expect(roundTripped[0].style).toBe("normal");
|
||||
expect((roundTripped[0].children[0] as { text: string }).text).toBe("Hello world");
|
||||
});
|
||||
|
||||
it("preserves headings through round-trip", () => {
|
||||
const original: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "h2",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "My Heading", marks: [] }],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(original);
|
||||
const roundTripped = markdownToPortableText(md);
|
||||
|
||||
expect(roundTripped).toHaveLength(1);
|
||||
expect(roundTripped[0].style).toBe("h2");
|
||||
expect((roundTripped[0].children[0] as { text: string }).text).toBe("My Heading");
|
||||
});
|
||||
|
||||
it("preserves opaque fences through round-trip", () => {
|
||||
const custom = {
|
||||
_type: "callout",
|
||||
_key: "c1",
|
||||
style: "warning",
|
||||
text: "Be careful!",
|
||||
};
|
||||
|
||||
const original: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Before", marks: [] }],
|
||||
},
|
||||
custom,
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "After", marks: [] }],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(original);
|
||||
const roundTripped = markdownToPortableText(md);
|
||||
|
||||
expect(roundTripped).toHaveLength(3);
|
||||
expect(roundTripped[1]._type).toBe("callout");
|
||||
expect(roundTripped[1]._key).toBe("c1");
|
||||
expect((roundTripped[1] as Record<string, unknown>).style).toBe("warning");
|
||||
expect((roundTripped[1] as Record<string, unknown>).text).toBe("Be careful!");
|
||||
});
|
||||
|
||||
it("preserves code blocks through round-trip", () => {
|
||||
const original: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "code",
|
||||
_key: "c1",
|
||||
language: "javascript",
|
||||
code: "const x = 42;",
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(original);
|
||||
const roundTripped = markdownToPortableText(md);
|
||||
|
||||
expect(roundTripped).toHaveLength(1);
|
||||
expect(roundTripped[0]._type).toBe("code");
|
||||
expect(roundTripped[0].language).toBe("javascript");
|
||||
expect(roundTripped[0].code).toBe("const x = 42;");
|
||||
});
|
||||
|
||||
it("preserves bold text through round-trip", () => {
|
||||
const original: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [
|
||||
{ _type: "span", text: "Some ", marks: [] },
|
||||
{ _type: "span", text: "bold", marks: ["strong"] },
|
||||
{ _type: "span", text: " text", marks: [] },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const md = portableTextToMarkdown(original);
|
||||
expect(md).toContain("**bold**");
|
||||
|
||||
const roundTripped = markdownToPortableText(md);
|
||||
const boldSpan = roundTripped[0].children.find((c) => (c.marks ?? []).includes("strong"));
|
||||
expect(boldSpan).toBeDefined();
|
||||
expect(boldSpan!.text).toBe("bold");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Schema-aware conversion
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("convertDataForRead", () => {
|
||||
const fields: FieldSchema[] = [
|
||||
{ slug: "title", type: "string" },
|
||||
{ slug: "body", type: "portableText" },
|
||||
{ slug: "sidebar", type: "portableText" },
|
||||
];
|
||||
|
||||
it("converts PT arrays to markdown for portableText fields", () => {
|
||||
const data = {
|
||||
title: "Hello",
|
||||
body: [
|
||||
{
|
||||
_type: "block",
|
||||
style: "normal",
|
||||
markDefs: [],
|
||||
children: [{ _type: "span", text: "Content", marks: [] }],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = convertDataForRead(data, fields);
|
||||
expect(result.title).toBe("Hello");
|
||||
expect(typeof result.body).toBe("string");
|
||||
expect(result.body).toContain("Content");
|
||||
});
|
||||
|
||||
it("skips conversion when raw is true", () => {
|
||||
const data = {
|
||||
body: [{ _type: "block", children: [{ _type: "span", text: "X" }] }],
|
||||
};
|
||||
|
||||
const result = convertDataForRead(data, fields, true);
|
||||
expect(Array.isArray(result.body)).toBe(true);
|
||||
});
|
||||
|
||||
it("does not touch non-portableText fields", () => {
|
||||
const data = { title: "Test", body: "already a string" };
|
||||
const result = convertDataForRead(data, fields);
|
||||
expect(result.title).toBe("Test");
|
||||
expect(result.body).toBe("already a string"); // not an array, skip
|
||||
});
|
||||
});
|
||||
|
||||
describe("convertDataForWrite", () => {
|
||||
const fields: FieldSchema[] = [
|
||||
{ slug: "title", type: "string" },
|
||||
{ slug: "body", type: "portableText" },
|
||||
];
|
||||
|
||||
it("converts markdown strings to PT for portableText fields", () => {
|
||||
const data = { title: "Hello", body: "Some **bold** text" };
|
||||
const result = convertDataForWrite(data, fields);
|
||||
expect(result.title).toBe("Hello");
|
||||
expect(Array.isArray(result.body)).toBe(true);
|
||||
|
||||
const blocks = result.body as PortableTextBlock[];
|
||||
expect(blocks[0]._type).toBe("block");
|
||||
const boldSpan = blocks[0].children.find((c) => (c.marks ?? []).includes("strong"));
|
||||
expect(boldSpan!.text).toBe("bold");
|
||||
});
|
||||
|
||||
it("passes through raw PT arrays unchanged", () => {
|
||||
const ptArray = [{ _type: "block", children: [{ _type: "span", text: "Raw" }] }];
|
||||
const data = { body: ptArray };
|
||||
const result = convertDataForWrite(data, fields);
|
||||
expect(result.body).toBe(ptArray); // same reference
|
||||
});
|
||||
});
|
||||
320
packages/core/tests/unit/client/transport.test.ts
Normal file
320
packages/core/tests/unit/client/transport.test.ts
Normal file
@@ -0,0 +1,320 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import type { Interceptor } from "../../../src/client/transport.js";
|
||||
import {
|
||||
createTransport,
|
||||
csrfInterceptor,
|
||||
refreshInterceptor,
|
||||
tokenInterceptor,
|
||||
} from "../../../src/client/transport.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Create an interceptor that adds a header to the request */
|
||||
function createHeaderInterceptor(name: string, value: string): Interceptor {
|
||||
return async (req, next) => {
|
||||
const headers = new Headers(req.headers);
|
||||
headers.set(name, value);
|
||||
return next(new Request(req, { headers }));
|
||||
};
|
||||
}
|
||||
|
||||
/** Create a mock fetch that returns a fixed response */
|
||||
function mockFetch(body: unknown = {}, status: number = 200): Interceptor {
|
||||
return async () =>
|
||||
new Response(JSON.stringify(body), {
|
||||
status,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// createTransport
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("createTransport", () => {
|
||||
it("calls global fetch when no interceptors are provided", async () => {
|
||||
const transport = createTransport({
|
||||
interceptors: [mockFetch({ ok: true })],
|
||||
});
|
||||
|
||||
const res = await transport.fetch(new Request("https://example.com"));
|
||||
expect(res.status).toBe(200);
|
||||
const json = await res.json();
|
||||
expect(json).toEqual({ ok: true });
|
||||
});
|
||||
|
||||
it("runs interceptors in order", async () => {
|
||||
const order: string[] = [];
|
||||
|
||||
const first: Interceptor = async (req, next) => {
|
||||
order.push("first-before");
|
||||
const res = await next(req);
|
||||
order.push("first-after");
|
||||
return res;
|
||||
};
|
||||
|
||||
const second: Interceptor = async (req, next) => {
|
||||
order.push("second-before");
|
||||
const res = await next(req);
|
||||
order.push("second-after");
|
||||
return res;
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [first, second, mockFetch()],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com"));
|
||||
expect(order).toEqual(["first-before", "second-before", "second-after", "first-after"]);
|
||||
});
|
||||
|
||||
it("allows interceptors to modify requests", async () => {
|
||||
let capturedHeader: string | null = null;
|
||||
|
||||
const addHeader = createHeaderInterceptor("X-Custom", "test-value");
|
||||
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedHeader = req.headers.get("X-Custom");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [addHeader, capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com"));
|
||||
expect(capturedHeader).toBe("test-value");
|
||||
});
|
||||
|
||||
it("allows interceptors to retry on failure", async () => {
|
||||
let attempts = 0;
|
||||
|
||||
const retryOnce: Interceptor = async (req, next) => {
|
||||
const res = await next(req);
|
||||
if (res.status === 401 && attempts === 0) {
|
||||
attempts++;
|
||||
return next(req);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
let callCount = 0;
|
||||
const backend: Interceptor = async () => {
|
||||
callCount++;
|
||||
if (callCount === 1) {
|
||||
return new Response("unauthorized", { status: 401 });
|
||||
}
|
||||
return new Response("ok", { status: 200 });
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [retryOnce, backend],
|
||||
});
|
||||
|
||||
const res = await transport.fetch(new Request("https://example.com"));
|
||||
expect(res.status).toBe(200);
|
||||
expect(callCount).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// csrfInterceptor
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("csrfInterceptor", () => {
|
||||
it("adds X-EmDash-Request header to POST requests", async () => {
|
||||
let capturedHeader: string | null = null;
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedHeader = req.headers.get("X-EmDash-Request");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [csrfInterceptor(), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "POST" }));
|
||||
expect(capturedHeader).toBe("1");
|
||||
});
|
||||
|
||||
it("adds X-EmDash-Request header to PUT requests", async () => {
|
||||
let capturedHeader: string | null = null;
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedHeader = req.headers.get("X-EmDash-Request");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [csrfInterceptor(), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "PUT" }));
|
||||
expect(capturedHeader).toBe("1");
|
||||
});
|
||||
|
||||
it("adds X-EmDash-Request header to DELETE requests", async () => {
|
||||
let capturedHeader: string | null = null;
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedHeader = req.headers.get("X-EmDash-Request");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [csrfInterceptor(), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "DELETE" }));
|
||||
expect(capturedHeader).toBe("1");
|
||||
});
|
||||
|
||||
it("does NOT add header to GET requests", async () => {
|
||||
let capturedHeader: string | null = null;
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedHeader = req.headers.get("X-EmDash-Request");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [csrfInterceptor(), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "GET" }));
|
||||
expect(capturedHeader).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tokenInterceptor
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("tokenInterceptor", () => {
|
||||
it("adds Authorization Bearer header to all requests", async () => {
|
||||
let capturedAuth: string | null = null;
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedAuth = req.headers.get("Authorization");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [tokenInterceptor("ec_pat_abc123"), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com"));
|
||||
expect(capturedAuth).toBe("Bearer ec_pat_abc123");
|
||||
});
|
||||
|
||||
it("adds Authorization to both GET and POST", async () => {
|
||||
const captured: string[] = [];
|
||||
const capture: Interceptor = async (req) => {
|
||||
captured.push(req.headers.get("Authorization") ?? "");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [tokenInterceptor("tok"), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "GET" }));
|
||||
await transport.fetch(new Request("https://example.com", { method: "POST" }));
|
||||
expect(captured).toEqual(["Bearer tok", "Bearer tok"]);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Interceptor composition
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// refreshInterceptor
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("refreshInterceptor", () => {
|
||||
it("unwraps { data: { access_token } } envelope from token endpoint", async () => {
|
||||
let retryAuth: string | null = null;
|
||||
let refreshedToken: string | null = null;
|
||||
let refreshedRefresh: string | null = null;
|
||||
|
||||
const interceptor = refreshInterceptor({
|
||||
refreshToken: "rt_old",
|
||||
tokenEndpoint: "https://example.com/_emdash/api/oauth/token/refresh",
|
||||
onTokenRefreshed: (accessToken, refreshToken) => {
|
||||
refreshedToken = accessToken;
|
||||
refreshedRefresh = refreshToken;
|
||||
},
|
||||
});
|
||||
|
||||
// Mock: first call returns 401, refresh endpoint returns wrapped envelope,
|
||||
// retry should use the new token
|
||||
let callCount = 0;
|
||||
const originalFetch = globalThis.fetch;
|
||||
globalThis.fetch = async (input: string | URL | Request) => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.href : input.url;
|
||||
if (url.includes("/oauth/token/refresh")) {
|
||||
// Server wraps in { data: ... } via apiSuccess/unwrapResult
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
data: {
|
||||
access_token: "new_access",
|
||||
refresh_token: "new_refresh",
|
||||
expires_in: 3600,
|
||||
},
|
||||
}),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
return originalFetch(input);
|
||||
};
|
||||
|
||||
try {
|
||||
const backend: Interceptor = async (req) => {
|
||||
callCount++;
|
||||
if (callCount === 1) {
|
||||
return new Response("unauthorized", { status: 401 });
|
||||
}
|
||||
retryAuth = req.headers.get("Authorization");
|
||||
return new Response("ok", { status: 200 });
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [interceptor, backend],
|
||||
});
|
||||
|
||||
const res = await transport.fetch(new Request("https://example.com/api/test"));
|
||||
expect(res.status).toBe(200);
|
||||
expect(callCount).toBe(2);
|
||||
expect(retryAuth).toBe("Bearer new_access");
|
||||
expect(refreshedToken).toBe("new_access");
|
||||
expect(refreshedRefresh).toBe("new_refresh");
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Interceptor composition
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("interceptor composition", () => {
|
||||
it("csrf + token interceptors compose correctly", async () => {
|
||||
let capturedAuth: string | null = null;
|
||||
let capturedCsrf: string | null = null;
|
||||
|
||||
const capture: Interceptor = async (req) => {
|
||||
capturedAuth = req.headers.get("Authorization");
|
||||
capturedCsrf = req.headers.get("X-EmDash-Request");
|
||||
return new Response("ok");
|
||||
};
|
||||
|
||||
const transport = createTransport({
|
||||
interceptors: [csrfInterceptor(), tokenInterceptor("tok"), capture],
|
||||
});
|
||||
|
||||
await transport.fetch(new Request("https://example.com", { method: "POST" }));
|
||||
expect(capturedAuth).toBe("Bearer tok");
|
||||
expect(capturedCsrf).toBe("1");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,209 @@
|
||||
/**
|
||||
* Inline editor plugin-block round-trip tests.
|
||||
*
|
||||
* Regression tests for the bug where the visual-editing inline editor
|
||||
* coerced unknown Portable Text block types (e.g. `marketing.hero`) into
|
||||
* `pluginBlock` ProseMirror nodes that only carried `{ blockType, id }`,
|
||||
* silently dropping every other field. On save, `pmToPortableText` then
|
||||
* serialised the block back as `{ _type, _key, id }`, persisting the data
|
||||
* loss.
|
||||
*
|
||||
* The fix preserves all non-well-known fields on a `data` attribute and
|
||||
* spreads them back out during the PM → PT direction. See
|
||||
* `InlinePortableTextEditor.tsx` `case "pluginBlock"` and the unknown-block
|
||||
* fallback in `convertPTBlock`.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
_pmToPortableText as pmToPortableText,
|
||||
_portableTextToPM as portableTextToPM,
|
||||
} from "../../../src/components/InlinePortableTextEditor.js";
|
||||
|
||||
function pmDoc(...content: unknown[]) {
|
||||
return { type: "doc", content };
|
||||
}
|
||||
|
||||
describe("inline editor: PT → PM (unknown blocks)", () => {
|
||||
it("captures every non-well-known field into data", () => {
|
||||
const block = {
|
||||
_type: "marketing.hero",
|
||||
_key: "hero",
|
||||
headline: "Build products people want",
|
||||
subheadline: "The all-in-one platform",
|
||||
primaryCtaLabel: "Sign up",
|
||||
primaryCtaUrl: "/signup",
|
||||
centered: true,
|
||||
};
|
||||
|
||||
const pm = portableTextToPM([block]);
|
||||
const node = pm.content?.[0] as {
|
||||
type: string;
|
||||
attrs: { blockType: string; id: string; data: Record<string, unknown> };
|
||||
};
|
||||
|
||||
expect(node.type).toBe("pluginBlock");
|
||||
expect(node.attrs.blockType).toBe("marketing.hero");
|
||||
expect(node.attrs.id).toBe("");
|
||||
expect(node.attrs.data).toEqual({
|
||||
headline: "Build products people want",
|
||||
subheadline: "The all-in-one platform",
|
||||
primaryCtaLabel: "Sign up",
|
||||
primaryCtaUrl: "/signup",
|
||||
centered: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("strips _-prefixed keys from data to prevent accumulation", () => {
|
||||
const block = {
|
||||
_type: "embed",
|
||||
_key: "k1",
|
||||
_internal: "should-strip",
|
||||
caption: "should-keep",
|
||||
};
|
||||
|
||||
const pm = portableTextToPM([block]);
|
||||
const node = pm.content?.[0] as { attrs: { data: Record<string, unknown> } };
|
||||
|
||||
expect(node.attrs.data).toEqual({ caption: "should-keep" });
|
||||
expect(node.attrs.data).not.toHaveProperty("_internal");
|
||||
});
|
||||
|
||||
it("uses url as a fallback for id", () => {
|
||||
const block = { _type: "youtube", _key: "k1", url: "https://youtu.be/abc" };
|
||||
|
||||
const pm = portableTextToPM([block]);
|
||||
const node = pm.content?.[0] as { attrs: { id: string } };
|
||||
|
||||
expect(node.attrs.id).toBe("https://youtu.be/abc");
|
||||
});
|
||||
});
|
||||
|
||||
describe("inline editor: PM → PT (pluginBlock)", () => {
|
||||
it("spreads data fields back into the PT block", () => {
|
||||
const doc = pmDoc({
|
||||
type: "pluginBlock",
|
||||
attrs: {
|
||||
blockType: "marketing.hero",
|
||||
id: "",
|
||||
data: { headline: "Hi", centered: true },
|
||||
},
|
||||
});
|
||||
|
||||
const blocks = pmToPortableText(doc);
|
||||
|
||||
expect(blocks[0]).toMatchObject({
|
||||
_type: "marketing.hero",
|
||||
id: "",
|
||||
headline: "Hi",
|
||||
centered: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("data fields cannot overwrite _type or _key", () => {
|
||||
const doc = pmDoc({
|
||||
type: "pluginBlock",
|
||||
attrs: {
|
||||
blockType: "marketing.hero",
|
||||
id: "",
|
||||
data: { _type: "evil", _key: "evil", headline: "kept" },
|
||||
},
|
||||
});
|
||||
|
||||
const blocks = pmToPortableText(doc);
|
||||
|
||||
expect(blocks[0]!._type).toBe("marketing.hero");
|
||||
expect(blocks[0]!._key).not.toBe("evil");
|
||||
expect(blocks[0]).toMatchObject({ headline: "kept" });
|
||||
});
|
||||
|
||||
it("falls back blockType to 'embed' when missing", () => {
|
||||
const doc = pmDoc({
|
||||
type: "pluginBlock",
|
||||
attrs: { blockType: null, id: "u", data: {} },
|
||||
});
|
||||
|
||||
const blocks = pmToPortableText(doc);
|
||||
|
||||
expect(blocks[0]!._type).toBe("embed");
|
||||
});
|
||||
|
||||
it("handles non-object data gracefully", () => {
|
||||
// Defensive: data could be malformed if persisted from a buggy source.
|
||||
const doc = pmDoc({
|
||||
type: "pluginBlock",
|
||||
attrs: { blockType: "embed", id: "u", data: null },
|
||||
});
|
||||
|
||||
const blocks = pmToPortableText(doc);
|
||||
|
||||
expect(blocks[0]).toMatchObject({ _type: "embed", id: "u" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("inline editor: round-trip preserves plugin block payloads", () => {
|
||||
it("a marketing.hero-shaped block survives PT → PM → PT intact", () => {
|
||||
const original = {
|
||||
_type: "marketing.hero",
|
||||
_key: "hero",
|
||||
headline: "Build products people want",
|
||||
subheadline: "The all-in-one platform",
|
||||
primaryCtaLabel: "Sign up",
|
||||
primaryCtaUrl: "/signup",
|
||||
secondaryCtaLabel: "Watch demo",
|
||||
secondaryCtaUrl: "/demo",
|
||||
centered: true,
|
||||
};
|
||||
|
||||
const pm = portableTextToPM([original]);
|
||||
const roundTripped = pmToPortableText(pm);
|
||||
|
||||
expect(roundTripped).toHaveLength(1);
|
||||
expect(roundTripped[0]).toMatchObject({
|
||||
_type: "marketing.hero",
|
||||
headline: "Build products people want",
|
||||
subheadline: "The all-in-one platform",
|
||||
primaryCtaLabel: "Sign up",
|
||||
primaryCtaUrl: "/signup",
|
||||
secondaryCtaLabel: "Watch demo",
|
||||
secondaryCtaUrl: "/demo",
|
||||
centered: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("nested objects in unknown fields survive round-trip", () => {
|
||||
const original = {
|
||||
_type: "marketing.hero",
|
||||
_key: "hero",
|
||||
primaryCta: { label: "Sign up", url: "/signup" },
|
||||
image: { url: "/hero.png", alt: "Hero" },
|
||||
};
|
||||
|
||||
const pm = portableTextToPM([original]);
|
||||
const roundTripped = pmToPortableText(pm);
|
||||
|
||||
expect(roundTripped[0]).toMatchObject({
|
||||
_type: "marketing.hero",
|
||||
primaryCta: { label: "Sign up", url: "/signup" },
|
||||
image: { url: "/hero.png", alt: "Hero" },
|
||||
});
|
||||
});
|
||||
|
||||
it("repeated round-trips are stable (no _-key leakage)", () => {
|
||||
const original = {
|
||||
_type: "marketing.faq",
|
||||
_key: "faq",
|
||||
items: [{ question: "Q?", answer: "A." }],
|
||||
};
|
||||
|
||||
const rt1 = pmToPortableText(portableTextToPM([original]));
|
||||
const rt2 = pmToPortableText(portableTextToPM(rt1));
|
||||
|
||||
expect(rt2[0]).toMatchObject({
|
||||
_type: "marketing.faq",
|
||||
items: [{ question: "Q?", answer: "A." }],
|
||||
});
|
||||
expect(Object.keys(rt2[0]!).filter((k) => k.startsWith("_"))).toEqual(["_type", "_key"]);
|
||||
});
|
||||
});
|
||||
375
packages/core/tests/unit/config/secrets.test.ts
Normal file
375
packages/core/tests/unit/config/secrets.test.ts
Normal file
@@ -0,0 +1,375 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
EmDashSecretsError,
|
||||
IP_SALT_OPTION_KEY,
|
||||
PREVIEW_SECRET_OPTION_KEY,
|
||||
_clearSecretsCacheForTesting,
|
||||
fingerprintKey,
|
||||
generateEncryptionKey,
|
||||
parseEncryptionKeys,
|
||||
resolveSecrets,
|
||||
resolveSecretsCached,
|
||||
validateEncryptionKeyAtStartup,
|
||||
} from "../../../src/config/secrets.js";
|
||||
import { OptionsRepository } from "../../../src/database/repositories/options.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("config/secrets", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
_clearSecretsCacheForTesting();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
_clearSecretsCacheForTesting();
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("generateEncryptionKey", () => {
|
||||
it("emits the v1 prefix and a base64url body", () => {
|
||||
const key = generateEncryptionKey();
|
||||
expect(key.startsWith("emdash_enc_v1_")).toBe(true);
|
||||
const body = key.slice("emdash_enc_v1_".length);
|
||||
expect(body).toHaveLength(43);
|
||||
expect(body).toMatch(/^[A-Za-z0-9_-]+$/);
|
||||
});
|
||||
|
||||
it("produces unique values across calls", () => {
|
||||
const a = generateEncryptionKey();
|
||||
const b = generateEncryptionKey();
|
||||
expect(a).not.toBe(b);
|
||||
});
|
||||
});
|
||||
|
||||
describe("parseEncryptionKeys", () => {
|
||||
it("returns null for unset / empty input", async () => {
|
||||
expect(await parseEncryptionKeys(undefined)).toBeNull();
|
||||
expect(await parseEncryptionKeys("")).toBeNull();
|
||||
expect(await parseEncryptionKeys(",,,")).toBeNull();
|
||||
expect(await parseEncryptionKeys(" ")).toBeNull();
|
||||
});
|
||||
|
||||
it("parses a single valid key into a {kid, key, raw} entry", async () => {
|
||||
const raw = generateEncryptionKey();
|
||||
const parsed = await parseEncryptionKeys(raw);
|
||||
expect(parsed).toHaveLength(1);
|
||||
expect(parsed?.[0]?.raw).toBe(raw);
|
||||
expect(parsed?.[0]?.key).toBeInstanceOf(Uint8Array);
|
||||
expect(parsed?.[0]?.key.byteLength).toBe(32);
|
||||
expect(parsed?.[0]?.kid).toMatch(/^[0-9a-f]{8}$/);
|
||||
});
|
||||
|
||||
it("kid is stable across repeated calls and matches fingerprintKey()", async () => {
|
||||
// Kid is derived from decoded key bytes (canonicality is enforced
|
||||
// upstream, so raw <-> bytes is 1:1). Repeated parses of the same
|
||||
// canonical raw string must yield the same kid, and the standalone
|
||||
// fingerprintKey() helper must agree with parseEncryptionKeys().
|
||||
const raw = generateEncryptionKey();
|
||||
const a = await parseEncryptionKeys(raw);
|
||||
const b = await parseEncryptionKeys(raw);
|
||||
expect(a?.[0]?.kid).toBe(b?.[0]?.kid);
|
||||
expect(await fingerprintKey(raw)).toBe(a?.[0]?.kid);
|
||||
});
|
||||
|
||||
it("parses comma-separated multi-key with whitespace tolerance", async () => {
|
||||
const a = generateEncryptionKey();
|
||||
const b = generateEncryptionKey();
|
||||
const parsed = await parseEncryptionKeys(` ${a} , ${b} ,`);
|
||||
expect(parsed).toHaveLength(2);
|
||||
expect(parsed?.[0]?.raw).toBe(a);
|
||||
expect(parsed?.[1]?.raw).toBe(b);
|
||||
});
|
||||
|
||||
it("dedupes keys with the same kid (paste mistakes)", async () => {
|
||||
const raw = generateEncryptionKey();
|
||||
const parsed = await parseEncryptionKeys(`${raw},${raw}`);
|
||||
expect(parsed).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("throws on a malformed prefix", async () => {
|
||||
await expect(parseEncryptionKeys("not_a_key")).rejects.toBeInstanceOf(EmDashSecretsError);
|
||||
});
|
||||
|
||||
it("throws on a malformed body (too short)", async () => {
|
||||
await expect(parseEncryptionKeys("emdash_enc_v1_tooShort")).rejects.toBeInstanceOf(
|
||||
EmDashSecretsError,
|
||||
);
|
||||
});
|
||||
|
||||
it("throws on a malformed body (non base64url chars)", async () => {
|
||||
// 43 chars with a bad character
|
||||
const bad = "emdash_enc_v1_" + "!".repeat(43);
|
||||
await expect(parseEncryptionKeys(bad)).rejects.toBeInstanceOf(EmDashSecretsError);
|
||||
});
|
||||
|
||||
it("throws if any entry in a multi-key list is malformed", async () => {
|
||||
const good = generateEncryptionKey();
|
||||
await expect(parseEncryptionKeys(`${good},not_a_key`)).rejects.toBeInstanceOf(
|
||||
EmDashSecretsError,
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects non-canonical base64url so kid stays stable per key material", async () => {
|
||||
// 32-byte keys encode to 43 base64url chars. The 43rd char encodes
|
||||
// 4 bits of which only the high 2 are meaningful — the low 2 bits
|
||||
// must be zero in canonical encoding. Letters whose 6-bit value
|
||||
// has zero low-2-bits: A(0), E(4), I(8), M(12), Q(16), U(20),
|
||||
// Y(24), c(28), g(32), k(36), o(40), s(44), w(48), 0(52), 4(56),
|
||||
// 8(60). Anything else in the last position encodes bits that
|
||||
// canonical encoding would set to zero — same decoded bytes,
|
||||
// different raw string. We reject these to keep kid 1:1 with
|
||||
// key material.
|
||||
//
|
||||
// "A".repeat(43) is canonical (all-zero key). Replace the last
|
||||
// char with "B" (value 1) to get a non-canonical string with
|
||||
// the same decoded bytes.
|
||||
const canonical = `emdash_enc_v1_${"A".repeat(43)}`;
|
||||
const nonCanonical = `emdash_enc_v1_${"A".repeat(42)}B`;
|
||||
|
||||
// Both have the right shape so we know we're testing the
|
||||
// canonical check, not the regex.
|
||||
expect(canonical).toMatch(/^emdash_enc_v1_[A-Za-z0-9_-]{43}$/);
|
||||
expect(nonCanonical).toMatch(/^emdash_enc_v1_[A-Za-z0-9_-]{43}$/);
|
||||
|
||||
// Canonical form parses fine.
|
||||
await expect(parseEncryptionKeys(canonical)).resolves.toBeTruthy();
|
||||
|
||||
// Non-canonical form is rejected.
|
||||
await expect(parseEncryptionKeys(nonCanonical)).rejects.toThrow(EmDashSecretsError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveSecrets", () => {
|
||||
it("default path generates and persists IP salt + preview secret", async () => {
|
||||
const result = await resolveSecrets({ db, env: {} });
|
||||
|
||||
expect(result.ipSaltSource).toBe("db");
|
||||
expect(result.previewSecretSource).toBe("db");
|
||||
expect(result.ipSalt.length).toBeGreaterThan(0);
|
||||
expect(result.previewSecret.length).toBeGreaterThan(0);
|
||||
|
||||
const repo = new OptionsRepository(db);
|
||||
expect(await repo.get<string>(IP_SALT_OPTION_KEY)).toBe(result.ipSalt);
|
||||
expect(await repo.get<string>(PREVIEW_SECRET_OPTION_KEY)).toBe(result.previewSecret);
|
||||
});
|
||||
|
||||
it("does not consult EMDASH_ENCRYPTION_KEY (a malformed key cannot break preview/comments)", async () => {
|
||||
// Regression: previously a malformed EMDASH_ENCRYPTION_KEY was
|
||||
// parsed inside resolveSecrets and the throw propagated through
|
||||
// request-context middleware as a 500 to anonymous visitors with
|
||||
// stale `?_preview=` URLs. The key is validated separately at
|
||||
// startup now; resolveSecrets must not gate on it.
|
||||
const result = await resolveSecrets({
|
||||
db,
|
||||
env: { EMDASH_ENCRYPTION_KEY: "not_a_valid_key" },
|
||||
});
|
||||
expect(result.previewSecret.length).toBeGreaterThan(0);
|
||||
expect(result.ipSalt.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("env override wins for preview secret and ip salt", async () => {
|
||||
const result = await resolveSecrets({
|
||||
db,
|
||||
env: {
|
||||
EMDASH_PREVIEW_SECRET: "env-preview",
|
||||
EMDASH_IP_SALT: "env-ip-salt",
|
||||
},
|
||||
});
|
||||
expect(result.previewSecret).toBe("env-preview");
|
||||
expect(result.previewSecretSource).toBe("env");
|
||||
expect(result.ipSalt).toBe("env-ip-salt");
|
||||
expect(result.ipSaltSource).toBe("env");
|
||||
|
||||
// And nothing was written to the options table on the env path.
|
||||
const repo = new OptionsRepository(db);
|
||||
expect(await repo.get<string>(IP_SALT_OPTION_KEY)).toBeNull();
|
||||
expect(await repo.get<string>(PREVIEW_SECRET_OPTION_KEY)).toBeNull();
|
||||
});
|
||||
|
||||
it("legacy PREVIEW_SECRET fallback works (unprefixed name)", async () => {
|
||||
const result = await resolveSecrets({
|
||||
db,
|
||||
env: { PREVIEW_SECRET: "legacy-preview" },
|
||||
});
|
||||
expect(result.previewSecret).toBe("legacy-preview");
|
||||
expect(result.previewSecretSource).toBe("env");
|
||||
});
|
||||
|
||||
it("legacy EMDASH_AUTH_SECRET fallback for IP salt is honored", async () => {
|
||||
// Prior code derived the IP salt from EMDASH_AUTH_SECRET. Existing
|
||||
// installs that have only EMDASH_AUTH_SECRET set must keep the
|
||||
// same salt — otherwise their existing IP-bucket rate-limit data
|
||||
// rotates uselessly on upgrade.
|
||||
const result = await resolveSecrets({
|
||||
db,
|
||||
env: { EMDASH_AUTH_SECRET: "legacy-auth" },
|
||||
});
|
||||
expect(result.ipSalt).toBe("legacy-auth");
|
||||
expect(result.ipSaltSource).toBe("env");
|
||||
});
|
||||
|
||||
it("EMDASH_IP_SALT wins over EMDASH_AUTH_SECRET fallback", async () => {
|
||||
const result = await resolveSecrets({
|
||||
db,
|
||||
env: {
|
||||
EMDASH_IP_SALT: "explicit-salt",
|
||||
EMDASH_AUTH_SECRET: "legacy-auth",
|
||||
},
|
||||
});
|
||||
expect(result.ipSalt).toBe("explicit-salt");
|
||||
});
|
||||
|
||||
it("idempotent: repeated calls return the same DB-stored values", async () => {
|
||||
const a = await resolveSecrets({ db, env: {} });
|
||||
const b = await resolveSecrets({ db, env: {} });
|
||||
expect(a.ipSalt).toBe(b.ipSalt);
|
||||
expect(a.previewSecret).toBe(b.previewSecret);
|
||||
});
|
||||
|
||||
it("repeated first-resolves are idempotent (sequential test of convergence)", async () => {
|
||||
// Five sequential first-resolves on a fresh DB should converge.
|
||||
// Note: better-sqlite3 is synchronous, so this doesn't exercise
|
||||
// genuine cross-process concurrency. The cross-process atomicity
|
||||
// is provided by `INSERT ... ON CONFLICT DO NOTHING` at the DB
|
||||
// layer; see the lost-race test below for in-process coverage
|
||||
// of the re-read path.
|
||||
const promises: Promise<Awaited<ReturnType<typeof resolveSecrets>>>[] = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
promises.push(resolveSecrets({ db, env: {} }));
|
||||
}
|
||||
const results = await Promise.all(promises);
|
||||
const ipSalts = new Set(results.map((r) => r.ipSalt));
|
||||
const previews = new Set(results.map((r) => r.previewSecret));
|
||||
expect(ipSalts.size).toBe(1);
|
||||
expect(previews.size).toBe(1);
|
||||
});
|
||||
|
||||
it("returns the existing row when first-read finds one already populated", async () => {
|
||||
// Pre-populate the row, then resolve. Exercises the early-return
|
||||
// branch in `ensureGeneratedOption` where the first read hits.
|
||||
const winnerSalt = "row-already-populated";
|
||||
const repo = new OptionsRepository(db);
|
||||
await repo.set(IP_SALT_OPTION_KEY, winnerSalt);
|
||||
|
||||
const result = await resolveSecrets({ db, env: {} });
|
||||
expect(result.ipSalt).toBe(winnerSalt);
|
||||
expect(result.ipSaltSource).toBe("db");
|
||||
});
|
||||
|
||||
it("converges via lost-race re-read when setIfAbsent reports no insert", async () => {
|
||||
// Simulate a genuine cross-process race: caller A reads (no row),
|
||||
// caller B inserts the winner, caller A's setIfAbsent loses
|
||||
// (returns false). A then re-reads and converges on B's value.
|
||||
//
|
||||
// We stub `setIfAbsent` to inject the "concurrent process won"
|
||||
// behavior on the IP-salt key specifically (the resolver also
|
||||
// does this for preview secret in parallel; we need to target
|
||||
// one to assert).
|
||||
const winnerSalt = "concurrent-process-won";
|
||||
const realRepo = new OptionsRepository(db);
|
||||
const stubRepo = Object.create(realRepo) as OptionsRepository;
|
||||
stubRepo.setIfAbsent = async <T>(name: string, value: T) => {
|
||||
if (name === IP_SALT_OPTION_KEY) {
|
||||
// Simulate "winner" inserting first; tell our caller the
|
||||
// insert didn't take so it falls through to the re-read.
|
||||
await realRepo.set(name, winnerSalt);
|
||||
return false;
|
||||
}
|
||||
return realRepo.setIfAbsent(name, value);
|
||||
};
|
||||
|
||||
const result = await resolveSecrets({ db, env: {}, _repo: stubRepo });
|
||||
expect(result.ipSalt).toBe(winnerSalt);
|
||||
expect(result.ipSaltSource).toBe("db");
|
||||
});
|
||||
|
||||
it("throws SECRET_PERSIST_FAILED when setIfAbsent loses but the row is empty after re-read", async () => {
|
||||
// Pathological case: setIfAbsent says "didn't insert" but the
|
||||
// row is still missing. This shouldn't happen in practice with
|
||||
// a sane DB, but the resolver guards against it rather than
|
||||
// looping forever or returning an empty string.
|
||||
const realRepo = new OptionsRepository(db);
|
||||
const stubRepo = Object.create(realRepo) as OptionsRepository;
|
||||
stubRepo.setIfAbsent = async () => false; // Always claim no-op, no row appears.
|
||||
|
||||
await expect(resolveSecrets({ db, env: {}, _repo: stubRepo })).rejects.toThrow(
|
||||
/SECRET_PERSIST_FAILED|Failed to persist/,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validateEncryptionKeyAtStartup", () => {
|
||||
it("returns true for an unset key", async () => {
|
||||
expect(await validateEncryptionKeyAtStartup({})).toBe(true);
|
||||
});
|
||||
|
||||
it("returns true for a valid key", async () => {
|
||||
const key = generateEncryptionKey();
|
||||
expect(await validateEncryptionKeyAtStartup({ EMDASH_ENCRYPTION_KEY: key })).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false (and does not throw) for a malformed key, logging an operator-facing message", async () => {
|
||||
const errors: unknown[][] = [];
|
||||
const original = console.error;
|
||||
console.error = (...args: unknown[]) => {
|
||||
errors.push(args);
|
||||
};
|
||||
try {
|
||||
const result = await validateEncryptionKeyAtStartup({
|
||||
EMDASH_ENCRYPTION_KEY: "not_a_valid_key",
|
||||
});
|
||||
expect(result).toBe(false);
|
||||
expect(errors).toHaveLength(1);
|
||||
expect(String(errors[0]?.[0])).toMatch(/EMDASH_ENCRYPTION_KEY is invalid/);
|
||||
} finally {
|
||||
console.error = original;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("fingerprintKey", () => {
|
||||
it("agrees with parseEncryptionKeys on canonical input", async () => {
|
||||
const raw = generateEncryptionKey();
|
||||
const parsed = await parseEncryptionKeys(raw);
|
||||
expect(await fingerprintKey(raw)).toBe(parsed?.[0]?.kid);
|
||||
});
|
||||
|
||||
it("rejects non-canonical base64url (so the CLI can't print kids the runtime would refuse)", async () => {
|
||||
const nonCanonical = `emdash_enc_v1_${"A".repeat(42)}B`;
|
||||
await expect(fingerprintKey(nonCanonical)).rejects.toBeInstanceOf(EmDashSecretsError);
|
||||
});
|
||||
|
||||
it("rejects a malformed prefix", async () => {
|
||||
await expect(fingerprintKey("not_a_key")).rejects.toBeInstanceOf(EmDashSecretsError);
|
||||
});
|
||||
|
||||
it("rejects bodies of the wrong length", async () => {
|
||||
await expect(fingerprintKey("emdash_enc_v1_tooShort")).rejects.toBeInstanceOf(
|
||||
EmDashSecretsError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveSecretsCached", () => {
|
||||
it("memoizes per-db so multiple callers share one resolution promise", async () => {
|
||||
// First caller starts the resolution; second caller piggybacks.
|
||||
// We can verify they share a value (and the cache key is the db
|
||||
// instance) by comparing against a freshly cleared cache.
|
||||
const a = await resolveSecretsCached(db);
|
||||
const b = await resolveSecretsCached(db);
|
||||
expect(a).toBe(b);
|
||||
|
||||
_clearSecretsCacheForTesting();
|
||||
const c = await resolveSecretsCached(db);
|
||||
// Different cache entry, but same persisted DB values.
|
||||
expect(c).not.toBe(a);
|
||||
expect(c.ipSalt).toBe(a.ipSalt);
|
||||
expect(c.previewSecret).toBe(a.previewSecret);
|
||||
});
|
||||
});
|
||||
});
|
||||
60
packages/core/tests/unit/converters/image-dimensions.test.ts
Normal file
60
packages/core/tests/unit/converters/image-dimensions.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { portableTextToProsemirror } from "../../../src/content/converters/portable-text-to-prosemirror.js";
|
||||
import { prosemirrorToPortableText } from "../../../src/content/converters/prosemirror-to-portable-text.js";
|
||||
import type { PortableTextImageBlock } from "../../../src/content/converters/types.js";
|
||||
|
||||
describe("Image dimension round-trip", () => {
|
||||
const imageBlock: PortableTextImageBlock = {
|
||||
_type: "image",
|
||||
_key: "abc123",
|
||||
asset: { _ref: "media-123", url: "https://example.com/photo.jpg" },
|
||||
alt: "A photo",
|
||||
caption: "My caption",
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
displayWidth: 400,
|
||||
displayHeight: 225,
|
||||
};
|
||||
|
||||
it("preserves displayWidth and displayHeight through PT → PM → PT", () => {
|
||||
// PT → PM
|
||||
const pm = portableTextToProsemirror([imageBlock]);
|
||||
const imageNode = pm.content[0];
|
||||
|
||||
expect(imageNode.type).toBe("image");
|
||||
expect(imageNode.attrs?.displayWidth).toBe(400);
|
||||
expect(imageNode.attrs?.displayHeight).toBe(225);
|
||||
expect(imageNode.attrs?.width).toBe(1920);
|
||||
expect(imageNode.attrs?.height).toBe(1080);
|
||||
|
||||
// PM → PT
|
||||
const pt = prosemirrorToPortableText(pm);
|
||||
const restored = pt[0] as PortableTextImageBlock;
|
||||
|
||||
expect(restored._type).toBe("image");
|
||||
expect(restored.displayWidth).toBe(400);
|
||||
expect(restored.displayHeight).toBe(225);
|
||||
expect(restored.width).toBe(1920);
|
||||
expect(restored.height).toBe(1080);
|
||||
});
|
||||
|
||||
it("handles images without display dimensions", () => {
|
||||
const noDisplayDims: PortableTextImageBlock = {
|
||||
_type: "image",
|
||||
_key: "def456",
|
||||
asset: { _ref: "media-456", url: "https://example.com/other.jpg" },
|
||||
width: 800,
|
||||
height: 600,
|
||||
};
|
||||
|
||||
const pm = portableTextToProsemirror([noDisplayDims]);
|
||||
const pt = prosemirrorToPortableText(pm);
|
||||
const restored = pt[0] as PortableTextImageBlock;
|
||||
|
||||
expect(restored.displayWidth).toBeUndefined();
|
||||
expect(restored.displayHeight).toBeUndefined();
|
||||
expect(restored.width).toBe(800);
|
||||
expect(restored.height).toBe(600);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,92 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { portableTextToProsemirror } from "../../../src/content/converters/portable-text-to-prosemirror.js";
|
||||
import type { PortableTextBlock } from "../../../src/content/converters/types.js";
|
||||
|
||||
describe("Image blocks without asset wrapper", () => {
|
||||
it("does not crash when an image block has url at the top level instead of inside asset", () => {
|
||||
// This is the format that can originate from migrations or third-party imports
|
||||
// (e.g. Ghost → Portable Text). Without the fix, accessing block.asset.url
|
||||
// throws TypeError: Cannot read properties of undefined (reading 'url').
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "block",
|
||||
_key: "b1",
|
||||
style: "normal",
|
||||
children: [{ _type: "span", _key: "s1", text: "Before image", marks: [] }],
|
||||
markDefs: [],
|
||||
},
|
||||
{
|
||||
_type: "image",
|
||||
_key: "img1",
|
||||
url: "https://example.com/photo.jpg",
|
||||
alt: "A photo without asset wrapper",
|
||||
} as unknown as PortableTextBlock,
|
||||
{
|
||||
_type: "block",
|
||||
_key: "b2",
|
||||
style: "normal",
|
||||
children: [{ _type: "span", _key: "s2", text: "After image", marks: [] }],
|
||||
markDefs: [],
|
||||
},
|
||||
];
|
||||
|
||||
const result = portableTextToProsemirror(blocks);
|
||||
|
||||
expect(result.type).toBe("doc");
|
||||
expect(result.content).toHaveLength(3);
|
||||
});
|
||||
|
||||
it("extracts src and alt from top-level url when asset is missing", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "image",
|
||||
_key: "img1",
|
||||
url: "https://example.com/photo.jpg",
|
||||
alt: "A test image",
|
||||
} as unknown as PortableTextBlock,
|
||||
];
|
||||
|
||||
const result = portableTextToProsemirror(blocks);
|
||||
const imageNode = result.content[0];
|
||||
|
||||
expect(imageNode.type).toBe("image");
|
||||
expect(imageNode.attrs?.src).toBe("https://example.com/photo.jpg");
|
||||
expect(imageNode.attrs?.alt).toBe("A test image");
|
||||
});
|
||||
|
||||
it("handles image block with neither asset nor url gracefully", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "image",
|
||||
_key: "img1",
|
||||
} as unknown as PortableTextBlock,
|
||||
];
|
||||
|
||||
const result = portableTextToProsemirror(blocks);
|
||||
const imageNode = result.content[0];
|
||||
|
||||
expect(imageNode.type).toBe("image");
|
||||
expect(imageNode.attrs?.src).toBe("");
|
||||
expect(imageNode.attrs?.alt).toBe("");
|
||||
});
|
||||
|
||||
it("still converts well-formed image blocks with asset wrapper correctly", () => {
|
||||
const blocks: PortableTextBlock[] = [
|
||||
{
|
||||
_type: "image",
|
||||
_key: "img1",
|
||||
asset: { _ref: "media-123", url: "https://example.com/photo.jpg" },
|
||||
alt: "A proper image",
|
||||
},
|
||||
];
|
||||
|
||||
const result = portableTextToProsemirror(blocks);
|
||||
const imageNode = result.content[0];
|
||||
|
||||
expect(imageNode.type).toBe("image");
|
||||
expect(imageNode.attrs?.src).toBe("https://example.com/photo.jpg");
|
||||
expect(imageNode.attrs?.alt).toBe("A proper image");
|
||||
expect(imageNode.attrs?.mediaId).toBe("media-123");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,74 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { sql } from "kysely";
|
||||
import { afterEach, beforeEach, describe, expect, it } from "vitest";
|
||||
|
||||
import { createDatabase } from "../../../../src/database/connection.js";
|
||||
import { down, up } from "../../../../src/database/migrations/031_bylines.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
|
||||
describe("031_bylines migration", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
|
||||
await db.schema
|
||||
.createTable("users")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
await db.schema
|
||||
.createTable("media")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
|
||||
await db.schema
|
||||
.createTable("ec_posts")
|
||||
.addColumn("id", "text", (col) => col.primaryKey())
|
||||
.execute();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
it("adds byline tables and primary_byline_id to existing content tables", async () => {
|
||||
await up(db);
|
||||
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
expect(tableNames).toContain("_emdash_bylines");
|
||||
expect(tableNames).toContain("_emdash_content_bylines");
|
||||
|
||||
const contentTable = tables.find((t) => t.name === "ec_posts");
|
||||
expect(contentTable).toBeDefined();
|
||||
expect(contentTable?.columns.map((c) => c.name)).toContain("primary_byline_id");
|
||||
|
||||
const idx = await sql<{ name: string }>`
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index' AND name = 'idx_ec_posts_primary_byline'
|
||||
`.execute(db);
|
||||
expect(idx.rows).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("reverts added tables, indexes, and columns", async () => {
|
||||
await up(db);
|
||||
await down(db);
|
||||
|
||||
const tables = await db.introspection.getTables();
|
||||
const tableNames = tables.map((t) => t.name);
|
||||
expect(tableNames).not.toContain("_emdash_bylines");
|
||||
expect(tableNames).not.toContain("_emdash_content_bylines");
|
||||
|
||||
const contentTable = tables.find((t) => t.name === "ec_posts");
|
||||
expect(contentTable).toBeDefined();
|
||||
expect(contentTable?.columns.map((c) => c.name)).not.toContain("primary_byline_id");
|
||||
|
||||
const idx = await sql<{ name: string }>`
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = 'index' AND name = 'idx_ec_posts_primary_byline'
|
||||
`.execute(db);
|
||||
expect(idx.rows).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
256
packages/core/tests/unit/database/repositories/byline.test.ts
Normal file
256
packages/core/tests/unit/database/repositories/byline.test.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { BylineRepository } from "../../../../src/database/repositories/byline.js";
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { SQL_BATCH_SIZE } from "../../../../src/utils/chunks.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
describe("BylineRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let bylineRepo: BylineRepository;
|
||||
let contentRepo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
bylineRepo = new BylineRepository(db);
|
||||
contentRepo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("creates and reads bylines", async () => {
|
||||
const created = await bylineRepo.create({
|
||||
slug: "jane-doe",
|
||||
displayName: "Jane Doe",
|
||||
isGuest: true,
|
||||
});
|
||||
|
||||
expect(created.slug).toBe("jane-doe");
|
||||
expect(created.displayName).toBe("Jane Doe");
|
||||
expect(created.isGuest).toBe(true);
|
||||
|
||||
const foundById = await bylineRepo.findById(created.id);
|
||||
expect(foundById?.id).toBe(created.id);
|
||||
|
||||
const foundBySlug = await bylineRepo.findBySlug("jane-doe");
|
||||
expect(foundBySlug?.id).toBe(created.id);
|
||||
|
||||
const foundByUser = await bylineRepo.findByUserId("missing-user");
|
||||
expect(foundByUser).toBeNull();
|
||||
});
|
||||
|
||||
it("supports updates and paginated listing", async () => {
|
||||
const alpha = await bylineRepo.create({
|
||||
slug: "alpha",
|
||||
displayName: "Alpha Writer",
|
||||
isGuest: true,
|
||||
});
|
||||
await bylineRepo.create({
|
||||
slug: "beta",
|
||||
displayName: "Beta Writer",
|
||||
isGuest: false,
|
||||
});
|
||||
|
||||
const updated = await bylineRepo.update(alpha.id, {
|
||||
displayName: "Alpha Updated",
|
||||
websiteUrl: "https://example.com",
|
||||
});
|
||||
expect(updated?.displayName).toBe("Alpha Updated");
|
||||
expect(updated?.websiteUrl).toBe("https://example.com");
|
||||
|
||||
const searchResult = await bylineRepo.findMany({ search: "Beta" });
|
||||
expect(searchResult.items).toHaveLength(1);
|
||||
expect(searchResult.items[0]?.slug).toBe("beta");
|
||||
|
||||
const page1 = await bylineRepo.findMany({ limit: 1 });
|
||||
expect(page1.items).toHaveLength(1);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
const page2 = await bylineRepo.findMany({ limit: 1, cursor: page1.nextCursor });
|
||||
expect(page2.items).toHaveLength(1);
|
||||
expect(page2.items[0]?.id).not.toBe(page1.items[0]?.id);
|
||||
});
|
||||
|
||||
it("assigns ordered bylines to content and syncs primary_byline_id", async () => {
|
||||
const lead = await bylineRepo.create({
|
||||
slug: "lead",
|
||||
displayName: "Lead Author",
|
||||
});
|
||||
const second = await bylineRepo.create({
|
||||
slug: "second",
|
||||
displayName: "Second Author",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "bylined-post",
|
||||
data: { title: "Bylined Post" },
|
||||
});
|
||||
|
||||
const assigned = await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: lead.id },
|
||||
{ bylineId: second.id, roleLabel: "Editor" },
|
||||
]);
|
||||
|
||||
expect(assigned).toHaveLength(2);
|
||||
expect(assigned[0]?.byline.id).toBe(lead.id);
|
||||
expect(assigned[0]?.sortOrder).toBe(0);
|
||||
expect(assigned[1]?.byline.id).toBe(second.id);
|
||||
expect(assigned[1]?.roleLabel).toBe("Editor");
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBe(lead.id);
|
||||
});
|
||||
|
||||
it("reorders bylines and updates primary_byline_id", async () => {
|
||||
const first = await bylineRepo.create({
|
||||
slug: "first",
|
||||
displayName: "First",
|
||||
});
|
||||
const second = await bylineRepo.create({
|
||||
slug: "second-reorder",
|
||||
displayName: "Second",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "reordered-post",
|
||||
data: { title: "Reordered" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: first.id },
|
||||
{ bylineId: second.id },
|
||||
]);
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [
|
||||
{ bylineId: second.id },
|
||||
{ bylineId: first.id },
|
||||
]);
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBe(second.id);
|
||||
|
||||
const bylines = await bylineRepo.getContentBylines("post", content.id);
|
||||
expect(bylines[0]?.byline.id).toBe(second.id);
|
||||
expect(bylines[1]?.byline.id).toBe(first.id);
|
||||
});
|
||||
|
||||
it("getContentBylinesMany handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "batch-author",
|
||||
displayName: "Batch Author",
|
||||
});
|
||||
|
||||
// Create a few real content entries with bylines
|
||||
const realIds: string[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: `batch-post-${i}`,
|
||||
data: { title: `Batch Post ${i}` },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
realIds.push(content.id);
|
||||
}
|
||||
|
||||
// Build an ID list larger than SQL_BATCH_SIZE with the real IDs spread across chunks
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
// Place real IDs so they span different chunks
|
||||
ids[0] = realIds[0]!;
|
||||
ids[SQL_BATCH_SIZE - 1] = realIds[1]!;
|
||||
ids[SQL_BATCH_SIZE + 5] = realIds[2]!;
|
||||
|
||||
const result = await bylineRepo.getContentBylinesMany("post", ids);
|
||||
|
||||
// All 3 real entries should have their byline resolved
|
||||
expect(result.get(realIds[0]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[1]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[2]!)).toHaveLength(1);
|
||||
expect(result.get(realIds[0]!)![0]!.byline.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("getContentBylinesMany does not duplicate credits for repeated content IDs", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "duplicate-batch-author",
|
||||
displayName: "Duplicate Batch Author",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "duplicate-batch-post",
|
||||
data: { title: "Duplicate Batch Post" },
|
||||
});
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = content.id;
|
||||
ids[SQL_BATCH_SIZE + 5] = content.id;
|
||||
|
||||
const result = await bylineRepo.getContentBylinesMany("post", ids);
|
||||
|
||||
expect(result.get(content.id)).toHaveLength(1);
|
||||
expect(result.get(content.id)?.[0]?.byline.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("findByUserIds handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
// Create a real user so the FK constraint is satisfied
|
||||
const userId = "user-batch-test";
|
||||
await db
|
||||
.insertInto("users" as any)
|
||||
.values({ id: userId, email: "batch@test.com", name: "Batch", role: 50 })
|
||||
.execute();
|
||||
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "user-batch",
|
||||
displayName: "User Batch",
|
||||
userId,
|
||||
});
|
||||
|
||||
// Build a user ID list larger than SQL_BATCH_SIZE
|
||||
const userIds: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
userIds.push(`user-fake-${i}`);
|
||||
}
|
||||
userIds[SQL_BATCH_SIZE + 5] = userId;
|
||||
|
||||
const result = await bylineRepo.findByUserIds(userIds);
|
||||
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(userId)?.id).toBe(byline.id);
|
||||
});
|
||||
|
||||
it("deletes byline, removes links, and nulls primary_byline_id", async () => {
|
||||
const byline = await bylineRepo.create({
|
||||
slug: "delete-me",
|
||||
displayName: "Delete Me",
|
||||
});
|
||||
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "delete-byline-post",
|
||||
data: { title: "Delete Byline" },
|
||||
});
|
||||
|
||||
await bylineRepo.setContentBylines("post", content.id, [{ bylineId: byline.id }]);
|
||||
|
||||
const deleted = await bylineRepo.delete(byline.id);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const unresolved = await bylineRepo.getContentBylines("post", content.id);
|
||||
expect(unresolved).toHaveLength(0);
|
||||
|
||||
const refreshed = await contentRepo.findById("post", content.id);
|
||||
expect(refreshed?.primaryBylineId).toBeNull();
|
||||
});
|
||||
});
|
||||
611
packages/core/tests/unit/database/repositories/content.test.ts
Normal file
611
packages/core/tests/unit/database/repositories/content.test.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import { RevisionRepository } from "../../../../src/database/repositories/revision.js";
|
||||
import { EmDashValidationError } from "../../../../src/database/repositories/types.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { createPostFixture, createPageFixture } from "../../../utils/fixtures.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
// Regex patterns for ID validation
|
||||
const ULID_FORMAT_REGEX = /^[0-9A-Z]+$/i;
|
||||
|
||||
describe("ContentRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let repo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
repo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
describe("create()", () => {
|
||||
it("should create content with valid data", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBeTruthy();
|
||||
expect(result.type).toBe("post");
|
||||
expect(result.slug).toBe("hello-world");
|
||||
expect(result.status).toBe("draft");
|
||||
expect(result.data).toEqual(input.data);
|
||||
});
|
||||
|
||||
it("should generate ULID for ID", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
// ULID is 26 characters long
|
||||
expect(result.id).toHaveLength(26);
|
||||
// ULID starts with timestamp (base32) - should be alphanumeric
|
||||
expect(result.id).toMatch(ULID_FORMAT_REGEX);
|
||||
});
|
||||
|
||||
it("should set default status to draft", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).status;
|
||||
|
||||
const result = await repo.create(input);
|
||||
expect(result.status).toBe("draft");
|
||||
});
|
||||
|
||||
it("should throw validation error when type is missing", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).type;
|
||||
|
||||
await expect(repo.create(input)).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("should allow creating content without slug", async () => {
|
||||
const input = createPostFixture();
|
||||
delete (input as any).slug;
|
||||
|
||||
const result = await repo.create(input);
|
||||
expect(result.slug).toBeNull();
|
||||
});
|
||||
|
||||
it("should set createdAt and updatedAt timestamps", async () => {
|
||||
const input = createPostFixture();
|
||||
const result = await repo.create(input);
|
||||
|
||||
expect(result.createdAt).toBeTruthy();
|
||||
expect(result.updatedAt).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should persist primaryBylineId on create", async () => {
|
||||
const result = await repo.create(
|
||||
createPostFixture({
|
||||
slug: "with-primary-byline",
|
||||
primaryBylineId: "byline_1",
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.primaryBylineId).toBe("byline_1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("findById()", () => {
|
||||
it("should return content by ID", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
|
||||
expect(found).toBeDefined();
|
||||
expect(found?.id).toBe(created.id);
|
||||
expect(found?.data).toEqual(created.data);
|
||||
});
|
||||
|
||||
it("should return null for non-existent ID", async () => {
|
||||
const found = await repo.findById("post", "01J9FAKE0000000000000000");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const found = await repo.findById("post", created.id);
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should not return content of wrong type", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findById("page", created.id);
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findBySlug()", () => {
|
||||
it("should return content by slug", async () => {
|
||||
const input = createPostFixture({ slug: "test-slug" });
|
||||
const created = await repo.create(input);
|
||||
|
||||
const found = await repo.findBySlug("post", "test-slug");
|
||||
|
||||
expect(found).toBeDefined();
|
||||
expect(found?.id).toBe(created.id);
|
||||
expect(found?.slug).toBe("test-slug");
|
||||
});
|
||||
|
||||
it("should return null for non-existent slug", async () => {
|
||||
const found = await repo.findBySlug("post", "non-existent");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should not return content of wrong type", async () => {
|
||||
const input = createPostFixture({ slug: "test-slug" });
|
||||
await repo.create(input);
|
||||
|
||||
const found = await repo.findBySlug("page", "test-slug");
|
||||
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findMany()", () => {
|
||||
it("should return all content of specified type", async () => {
|
||||
await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items).toHaveLength(2);
|
||||
expect(result.items.every((item) => item.type === "post")).toBe(true);
|
||||
});
|
||||
|
||||
it("should filter by status", async () => {
|
||||
await repo.create(createPostFixture({ slug: "draft", status: "draft" }));
|
||||
await repo.create(createPostFixture({ slug: "published", status: "published" }));
|
||||
|
||||
const result = await repo.findMany("post", {
|
||||
where: { status: "published" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].status).toBe("published");
|
||||
});
|
||||
|
||||
it("should filter by authorId", async () => {
|
||||
await repo.create(createPostFixture({ slug: "author1", authorId: "user1" }));
|
||||
await repo.create(createPostFixture({ slug: "author2", authorId: "user2" }));
|
||||
|
||||
const result = await repo.findMany("post", {
|
||||
where: { authorId: "user1" },
|
||||
});
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].authorId).toBe("user1");
|
||||
});
|
||||
|
||||
it("should support cursor pagination", async () => {
|
||||
// Create multiple posts
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await repo.create(createPostFixture({ slug: `post-${i}` }));
|
||||
}
|
||||
|
||||
// First page
|
||||
const page1 = await repo.findMany("post", { limit: 2 });
|
||||
expect(page1.items).toHaveLength(2);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Second page
|
||||
const page2 = await repo.findMany("post", {
|
||||
limit: 2,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
expect(page2.items).toHaveLength(2);
|
||||
expect(page2.nextCursor).toBeTruthy();
|
||||
|
||||
// Verify no overlap
|
||||
const page1Ids = page1.items.map((i) => i.id);
|
||||
const page2Ids = page2.items.map((i) => i.id);
|
||||
expect(page1Ids).not.toContain(page2Ids[0]);
|
||||
});
|
||||
|
||||
it("should support ordering", async () => {
|
||||
// Create posts with specific dates
|
||||
const post1 = await repo.create(createPostFixture({ slug: "old-post" }));
|
||||
// Wait a bit to ensure different timestamps
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
const post2 = await repo.create(createPostFixture({ slug: "new-post" }));
|
||||
|
||||
// Default order (desc by createdAt)
|
||||
const resultDesc = await repo.findMany("post", {
|
||||
orderBy: { field: "createdAt", direction: "desc" },
|
||||
});
|
||||
expect(resultDesc.items[0].id).toBe(post2.id);
|
||||
|
||||
// Ascending order
|
||||
const resultAsc = await repo.findMany("post", {
|
||||
orderBy: { field: "createdAt", direction: "asc" },
|
||||
});
|
||||
expect(resultAsc.items[0].id).toBe(post1.id);
|
||||
});
|
||||
|
||||
it("should respect limit", async () => {
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
await repo.create(createPostFixture({ slug: `post-${i}` }));
|
||||
}
|
||||
|
||||
const result = await repo.findMany("post", { limit: 5 });
|
||||
|
||||
expect(result.items).toHaveLength(5);
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.delete("post", post1.id);
|
||||
|
||||
const result = await repo.findMany("post");
|
||||
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0].slug).toBe("post-2");
|
||||
});
|
||||
});
|
||||
|
||||
describe("update()", () => {
|
||||
it("should update content data", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated Title", content: [] },
|
||||
});
|
||||
|
||||
expect(updated.data).toEqual({ title: "Updated Title", content: [] });
|
||||
});
|
||||
|
||||
it("should update status", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
status: "published",
|
||||
});
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
});
|
||||
|
||||
it("should update slug", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
slug: "new-slug",
|
||||
});
|
||||
|
||||
expect(updated.slug).toBe("new-slug");
|
||||
});
|
||||
|
||||
it("should update publishedAt timestamp", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const publishedAt = new Date().toISOString();
|
||||
const updated = await repo.update("post", created.id, {
|
||||
publishedAt,
|
||||
});
|
||||
|
||||
expect(updated.publishedAt).toBe(publishedAt);
|
||||
});
|
||||
|
||||
it("should update updatedAt timestamp automatically", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
// Wait a bit to ensure different timestamp
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
data: { title: "Updated" },
|
||||
});
|
||||
|
||||
expect(updated.updatedAt).not.toBe(created.updatedAt);
|
||||
});
|
||||
|
||||
it("should throw error for non-existent content", async () => {
|
||||
await expect(repo.update("post", "01J9FAKE0000000000000000", { data: {} })).rejects.toThrow(
|
||||
"Content not found",
|
||||
);
|
||||
});
|
||||
|
||||
it("should update primaryBylineId", async () => {
|
||||
const created = await repo.create(
|
||||
createPostFixture({
|
||||
slug: "update-primary-byline",
|
||||
primaryBylineId: "byline_old",
|
||||
}),
|
||||
);
|
||||
|
||||
const updated = await repo.update("post", created.id, {
|
||||
primaryBylineId: "byline_new",
|
||||
});
|
||||
|
||||
expect(updated.primaryBylineId).toBe("byline_new");
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete()", () => {
|
||||
it("should soft delete content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(true);
|
||||
|
||||
// Verify content is not returned by findById
|
||||
const found = await repo.findById("post", created.id);
|
||||
expect(found).toBeNull();
|
||||
});
|
||||
|
||||
it("should return false for non-existent content", async () => {
|
||||
const result = await repo.delete("post", "01J9FAKE0000000000000000");
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when deleting already deleted content", async () => {
|
||||
const input = createPostFixture();
|
||||
const created = await repo.create(input);
|
||||
await repo.delete("post", created.id);
|
||||
|
||||
const result = await repo.delete("post", created.id);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("count()", () => {
|
||||
it("should count all content of specified type", async () => {
|
||||
await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.create(createPageFixture({ slug: "page-1" }));
|
||||
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
|
||||
it("should count with status filter", async () => {
|
||||
await repo.create(createPostFixture({ slug: "draft", status: "draft" }));
|
||||
await repo.create(createPostFixture({ slug: "published", status: "published" }));
|
||||
|
||||
const count = await repo.count("post", { status: "published" });
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
it("should count with authorId filter", async () => {
|
||||
await repo.create(createPostFixture({ slug: "author1", authorId: "user1" }));
|
||||
await repo.create(createPostFixture({ slug: "author2", authorId: "user2" }));
|
||||
|
||||
const count = await repo.count("post", { authorId: "user1" });
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
|
||||
it("should exclude soft-deleted content", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "post-1" }));
|
||||
await repo.create(createPostFixture({ slug: "post-2" }));
|
||||
await repo.delete("post", post1.id);
|
||||
|
||||
const count = await repo.count("post");
|
||||
|
||||
expect(count).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("schedule()", () => {
|
||||
it("should set status to 'scheduled' for draft posts", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
|
||||
const updated = await repo.schedule("post", post.id, future);
|
||||
|
||||
expect(updated.status).toBe("scheduled");
|
||||
expect(updated.scheduledAt).toBe(future);
|
||||
});
|
||||
|
||||
it("should keep status 'published' for published posts", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
|
||||
const updated = await repo.schedule("post", post.id, future);
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
expect(updated.scheduledAt).toBe(future);
|
||||
});
|
||||
|
||||
it("should reject dates in the past", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const past = new Date(Date.now() - 86_400_000).toISOString();
|
||||
|
||||
await expect(repo.schedule("post", post.id, past)).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("should reject invalid date strings", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
|
||||
await expect(repo.schedule("post", post.id, "not-a-date")).rejects.toThrow(
|
||||
EmDashValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unschedule()", () => {
|
||||
it("should revert scheduled draft to 'draft'", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const updated = await repo.unschedule("post", post.id);
|
||||
|
||||
expect(updated.status).toBe("draft");
|
||||
expect(updated.scheduledAt).toBeNull();
|
||||
});
|
||||
|
||||
it("should keep published posts as 'published'", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const updated = await repo.unschedule("post", post.id);
|
||||
|
||||
expect(updated.status).toBe("published");
|
||||
expect(updated.scheduledAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("setDraftRevision()", () => {
|
||||
it("sets the draft_revision_id so publish() picks it up", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const draft = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post.id,
|
||||
data: { ...post.data, title: "Staged for publish" },
|
||||
});
|
||||
|
||||
await repo.setDraftRevision("post", post.id, draft.id);
|
||||
|
||||
const afterStaging = await repo.findById("post", post.id);
|
||||
expect(afterStaging?.draftRevisionId).toBe(draft.id);
|
||||
|
||||
const published = await repo.publish("post", post.id);
|
||||
|
||||
expect(published.liveRevisionId).toBe(draft.id);
|
||||
expect(published.draftRevisionId).toBeNull();
|
||||
});
|
||||
|
||||
it("throws when the content item does not exist", async () => {
|
||||
await expect(
|
||||
repo.setDraftRevision("post", "01K0000000000000000000000", "01K0000000000000000000001"),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("throws when the revision does not exist", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await expect(
|
||||
repo.setDraftRevision("post", post.id, "01K0000000000000000000001"),
|
||||
).rejects.toThrow(EmDashValidationError);
|
||||
});
|
||||
|
||||
it("throws when the revision belongs to a different content item", async () => {
|
||||
const post1 = await repo.create(createPostFixture({ slug: "one" }));
|
||||
const post2 = await repo.create(createPostFixture({ slug: "two" }));
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const draft = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post2.id,
|
||||
data: post2.data,
|
||||
});
|
||||
|
||||
await expect(repo.setDraftRevision("post", post1.id, draft.id)).rejects.toThrow(
|
||||
EmDashValidationError,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("publish() clears schedule", () => {
|
||||
it("should clear scheduled_at when publishing a scheduled draft", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const published = await repo.publish("post", post.id);
|
||||
|
||||
expect(published.status).toBe("published");
|
||||
expect(published.scheduledAt).toBeNull();
|
||||
});
|
||||
|
||||
it("should clear scheduled_at when publishing a published post with scheduled changes", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const republished = await repo.publish("post", post.id);
|
||||
|
||||
expect(republished.status).toBe("published");
|
||||
expect(republished.scheduledAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("findReadyToPublish()", () => {
|
||||
it("should find scheduled drafts past their time", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
// Schedule in the past by directly updating (schedule() rejects past dates)
|
||||
const past = new Date(Date.now() - 60_000).toISOString();
|
||||
await repo.update("post", post.id, { status: "scheduled", scheduledAt: past });
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(1);
|
||||
expect(ready[0]!.id).toBe(post.id);
|
||||
});
|
||||
|
||||
it("should find published posts with past scheduled_at", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
await repo.publish("post", post.id);
|
||||
// Set scheduled_at in the past directly
|
||||
const past = new Date(Date.now() - 60_000).toISOString();
|
||||
await repo.update("post", post.id, { scheduledAt: past });
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(1);
|
||||
expect(ready[0]!.id).toBe(post.id);
|
||||
});
|
||||
|
||||
it("should not include items with future scheduled_at", async () => {
|
||||
const post = await repo.create(createPostFixture());
|
||||
const future = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", post.id, future);
|
||||
|
||||
const ready = await repo.findReadyToPublish("post");
|
||||
|
||||
expect(ready).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("countScheduled()", () => {
|
||||
it("should count both scheduled drafts and published posts with scheduled_at", async () => {
|
||||
// Draft with schedule
|
||||
const draft = await repo.create(createPostFixture({ slug: "draft-scheduled" }));
|
||||
const future1 = new Date(Date.now() + 86_400_000).toISOString();
|
||||
await repo.schedule("post", draft.id, future1);
|
||||
|
||||
// Published with schedule
|
||||
const pub = await repo.create(createPostFixture({ slug: "pub-scheduled" }));
|
||||
await repo.publish("post", pub.id);
|
||||
const future2 = new Date(Date.now() + 172_800_000).toISOString();
|
||||
await repo.schedule("post", pub.id, future2);
|
||||
|
||||
// Unscheduled draft (should not be counted)
|
||||
await repo.create(createPostFixture({ slug: "plain-draft" }));
|
||||
|
||||
const count = await repo.countScheduled("post");
|
||||
|
||||
expect(count).toBe(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,60 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
decodeCursor,
|
||||
encodeCursor,
|
||||
InvalidCursorError,
|
||||
} from "../../../../src/database/repositories/types.js";
|
||||
|
||||
describe("decodeCursor", () => {
|
||||
it("round-trips a valid cursor", () => {
|
||||
const cursor = encodeCursor("2024-01-01", "01ABC");
|
||||
const decoded = decodeCursor(cursor);
|
||||
expect(decoded).toEqual({ orderValue: "2024-01-01", id: "01ABC" });
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on empty string", () => {
|
||||
expect(() => decodeCursor("")).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on non-base64 input", () => {
|
||||
expect(() => decodeCursor("not-base64-!!!")).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on base64 of malformed JSON", () => {
|
||||
const bad = Buffer.from("{not valid json").toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError on base64 JSON missing required fields", () => {
|
||||
const bad = Buffer.from(JSON.stringify({ wrong: "shape" })).toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("throws InvalidCursorError when id is not a string", () => {
|
||||
const bad = Buffer.from(JSON.stringify({ orderValue: "x", id: 42 })).toString("base64");
|
||||
expect(() => decodeCursor(bad)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("rejects oversized cursors before attempting to decode (DoS guard)", () => {
|
||||
// MAX_CURSOR_LENGTH is 4096 inside the decoder. The MCP/REST schemas
|
||||
// cap earlier (2048), but the decoder is the last line of defense
|
||||
// for any caller that bypasses the schemas. A pre-decode rejection
|
||||
// avoids allocating O(N) bytes for `decodeBase64` on a hostile
|
||||
// input.
|
||||
const huge = "A".repeat(5000);
|
||||
expect(() => decodeCursor(huge)).toThrow(InvalidCursorError);
|
||||
});
|
||||
|
||||
it("error message truncates very long cursors", () => {
|
||||
const longish = "A".repeat(200);
|
||||
try {
|
||||
decodeCursor(longish);
|
||||
expect.fail("expected throw");
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InvalidCursorError);
|
||||
// The truncation cap is 50; the message itself stays short.
|
||||
expect((error as Error).message.length).toBeLessThan(120);
|
||||
}
|
||||
});
|
||||
});
|
||||
114
packages/core/tests/unit/database/repositories/seo.test.ts
Normal file
114
packages/core/tests/unit/database/repositories/seo.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { ContentRepository } from "../../../../src/database/repositories/content.js";
|
||||
import { SeoRepository } from "../../../../src/database/repositories/seo.js";
|
||||
import type { Database } from "../../../../src/database/types.js";
|
||||
import { SQL_BATCH_SIZE } from "../../../../src/utils/chunks.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../../utils/test-db.js";
|
||||
|
||||
describe("SeoRepository", () => {
|
||||
let db: Kysely<Database>;
|
||||
let seoRepo: SeoRepository;
|
||||
let contentRepo: ContentRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
// Enable SEO on the post collection — createCollection defaults has_seo to 0.
|
||||
await db
|
||||
.updateTable("_emdash_collections")
|
||||
.set({ has_seo: 1 })
|
||||
.where("slug", "=", "post")
|
||||
.execute();
|
||||
seoRepo = new SeoRepository(db);
|
||||
contentRepo = new ContentRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("getMany handles more IDs than SQL_BATCH_SIZE", async () => {
|
||||
// Create a few real content entries with SEO rows
|
||||
const realIds: string[] = [];
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: `seo-batch-post-${i}`,
|
||||
data: { title: `SEO Batch Post ${i}` },
|
||||
});
|
||||
await seoRepo.upsert("post", content.id, {
|
||||
title: `SEO Title ${i}`,
|
||||
description: `SEO Description ${i}`,
|
||||
});
|
||||
realIds.push(content.id);
|
||||
}
|
||||
|
||||
// Build an ID list larger than SQL_BATCH_SIZE with real IDs spread across chunks
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = realIds[0]!;
|
||||
ids[SQL_BATCH_SIZE - 1] = realIds[1]!;
|
||||
ids[SQL_BATCH_SIZE + 5] = realIds[2]!;
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
// All input IDs should be present in the result Map
|
||||
expect(result.size).toBe(ids.length);
|
||||
|
||||
// Real IDs should have their SEO data resolved
|
||||
expect(result.get(realIds[0]!)?.title).toBe("SEO Title 0");
|
||||
expect(result.get(realIds[1]!)?.title).toBe("SEO Title 1");
|
||||
expect(result.get(realIds[2]!)?.title).toBe("SEO Title 2");
|
||||
|
||||
// Fake IDs should get default values
|
||||
expect(result.get("fake-id-5")?.title).toBeNull();
|
||||
expect(result.get("fake-id-5")?.description).toBeNull();
|
||||
expect(result.get("fake-id-5")?.noIndex).toBe(false);
|
||||
});
|
||||
|
||||
it("getMany returns defaults for every input id when no rows exist", async () => {
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`missing-id-${i}`);
|
||||
}
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
expect(result.size).toBe(ids.length);
|
||||
for (const id of ids) {
|
||||
const entry = result.get(id);
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry?.title).toBeNull();
|
||||
expect(entry?.description).toBeNull();
|
||||
expect(entry?.image).toBeNull();
|
||||
expect(entry?.canonical).toBeNull();
|
||||
expect(entry?.noIndex).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it("getMany deduplicates repeated content IDs without duplicate rows", async () => {
|
||||
const content = await contentRepo.create({
|
||||
type: "post",
|
||||
slug: "seo-duplicate-post",
|
||||
data: { title: "SEO Duplicate" },
|
||||
});
|
||||
await seoRepo.upsert("post", content.id, {
|
||||
title: "Duplicate SEO",
|
||||
});
|
||||
|
||||
const ids: string[] = [];
|
||||
for (let i = 0; i < SQL_BATCH_SIZE + 10; i++) {
|
||||
ids.push(`fake-id-${i}`);
|
||||
}
|
||||
ids[0] = content.id;
|
||||
ids[SQL_BATCH_SIZE + 5] = content.id;
|
||||
|
||||
const result = await seoRepo.getMany("post", ids);
|
||||
|
||||
// The real entry should resolve to its SEO row regardless of the duplicate input
|
||||
expect(result.get(content.id)?.title).toBe("Duplicate SEO");
|
||||
});
|
||||
});
|
||||
48
packages/core/tests/unit/db/dialect-runtime-imports.test.ts
Normal file
48
packages/core/tests/unit/db/dialect-runtime-imports.test.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
/**
|
||||
* Regression test for #741 ("Cannot find module 'kysely'" after build).
|
||||
*
|
||||
* The dialect runtime modules (db/sqlite.ts, db/libsql.ts, db/postgres.ts)
|
||||
* are bundled into the user's site dist via `noExternal: ["emdash"]` in
|
||||
* the Astro integration's Vite SSR config. If any of them uses CJS
|
||||
* `require("kysely")` (or another external) instead of a static `import`,
|
||||
* the bundler emits a literal `require("kysely")` call into the user's
|
||||
* dist chunks. At runtime under pnpm's strict node_modules layout, that
|
||||
* `require()` resolves from the user's `dist/server/chunks/` directory,
|
||||
* walks up looking for `node_modules/kysely`, doesn't find it (because
|
||||
* kysely is only a transitive dep of `emdash`), and throws
|
||||
* `MODULE_NOT_FOUND`.
|
||||
*
|
||||
* Static `import`s let Vite either externalize the dep correctly or pull
|
||||
* it into the bundle. Either outcome resolves at runtime; the dynamic
|
||||
* `require()` form does not. Keep these files static-import-only.
|
||||
*/
|
||||
describe("dialect runtime modules", () => {
|
||||
const dialectFiles = [
|
||||
fileURLToPath(new URL("../../../src/db/sqlite.ts", import.meta.url)),
|
||||
fileURLToPath(new URL("../../../src/db/libsql.ts", import.meta.url)),
|
||||
fileURLToPath(new URL("../../../src/db/postgres.ts", import.meta.url)),
|
||||
];
|
||||
|
||||
for (const file of dialectFiles) {
|
||||
it(`${file.split("/db/")[1]} does not use require() to load externals`, () => {
|
||||
const source = readFileSync(file, "utf-8");
|
||||
// Strip line comments (`//`), block comments (`/* … */`), and string
|
||||
// literals before scanning. We only care about actual code-level
|
||||
// `require(` calls; a docstring or inline comment that mentions
|
||||
// the historical bug should not trip the assertion.
|
||||
const codeOnly = source
|
||||
.replace(/\/\*[\s\S]*?\*\//g, "")
|
||||
.replace(/(^|[^:])\/\/.*$/gm, "$1")
|
||||
.replace(/(["'`])(?:\\.|(?!\1).)*\1/g, '""');
|
||||
// Any standalone require( call in these files re-introduces the
|
||||
// bug: the bundler leaves it as-is, and runtime resolution under
|
||||
// pnpm fails for transitive deps like `kysely`.
|
||||
expect(codeOnly).not.toMatch(/(?<![.\w])require\s*\(/);
|
||||
});
|
||||
}
|
||||
});
|
||||
348
packages/core/tests/unit/fields/all-fields.test.ts
Normal file
348
packages/core/tests/unit/fields/all-fields.test.ts
Normal file
@@ -0,0 +1,348 @@
|
||||
import { z } from "astro/zod";
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
text,
|
||||
textarea,
|
||||
number,
|
||||
boolean as booleanField,
|
||||
select,
|
||||
multiSelect,
|
||||
datetime,
|
||||
slug,
|
||||
image,
|
||||
file,
|
||||
reference,
|
||||
json,
|
||||
richText,
|
||||
portableText,
|
||||
} from "../../../src/fields/index.js";
|
||||
|
||||
// Test regex patterns
|
||||
const UPPERCASE_PATTERN_REGEX = /^[A-Z]+$/;
|
||||
const SLUG_UPPERCASE_PATTERN_REGEX = /^[A-Z_]+$/;
|
||||
|
||||
describe("Field Types", () => {
|
||||
describe("text", () => {
|
||||
it("should create basic text field", () => {
|
||||
const field = text();
|
||||
expect(field.type).toBe("text");
|
||||
expect(field.schema).toBeDefined();
|
||||
expect(field.ui?.widget).toBe("text");
|
||||
});
|
||||
|
||||
it("should validate required text", () => {
|
||||
const field = text({ required: true });
|
||||
expect(() => field.schema.parse("hello")).not.toThrow();
|
||||
expect(() => field.schema.parse(undefined)).toThrow();
|
||||
});
|
||||
|
||||
it("should validate optional text", () => {
|
||||
const field = text({ required: false });
|
||||
expect(() => field.schema.parse("hello")).not.toThrow();
|
||||
expect(() => field.schema.parse(undefined)).not.toThrow();
|
||||
});
|
||||
|
||||
it("should enforce minLength", () => {
|
||||
const field = text({ minLength: 5 });
|
||||
expect(() => field.schema.parse("hello")).not.toThrow();
|
||||
expect(() => field.schema.parse("hi")).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce maxLength", () => {
|
||||
const field = text({ maxLength: 10 });
|
||||
expect(() => field.schema.parse("hello")).not.toThrow();
|
||||
expect(() => field.schema.parse("hello world!")).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce pattern", () => {
|
||||
const field = text({ pattern: UPPERCASE_PATTERN_REGEX });
|
||||
expect(() => field.schema.parse("HELLO")).not.toThrow();
|
||||
expect(() => field.schema.parse("hello")).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("textarea", () => {
|
||||
it("should create textarea field", () => {
|
||||
const field = textarea();
|
||||
expect(field.type).toBe("textarea");
|
||||
expect(field.ui?.widget).toBe("textarea");
|
||||
expect(field.ui?.rows).toBe(6);
|
||||
});
|
||||
|
||||
it("should accept custom rows", () => {
|
||||
const field = textarea({ rows: 10 });
|
||||
expect(field.ui?.rows).toBe(10);
|
||||
});
|
||||
|
||||
it("should enforce length constraints", () => {
|
||||
const field = textarea({ minLength: 10, maxLength: 100 });
|
||||
expect(() => field.schema.parse("a".repeat(50))).not.toThrow();
|
||||
expect(() => field.schema.parse("short")).toThrow();
|
||||
expect(() => field.schema.parse("a".repeat(200))).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("number", () => {
|
||||
it("should create number field", () => {
|
||||
const field = number();
|
||||
expect(field.type).toBe("number");
|
||||
expect(field.ui?.widget).toBe("number");
|
||||
});
|
||||
|
||||
it("should validate numbers", () => {
|
||||
const field = number({ required: true });
|
||||
expect(() => field.schema.parse(42)).not.toThrow();
|
||||
expect(() => field.schema.parse(3.14)).not.toThrow();
|
||||
expect(() => field.schema.parse("42")).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce integer constraint", () => {
|
||||
const field = number({ integer: true });
|
||||
expect(() => field.schema.parse(42)).not.toThrow();
|
||||
expect(() => field.schema.parse(3.14)).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce min/max", () => {
|
||||
const field = number({ min: 0, max: 100 });
|
||||
expect(() => field.schema.parse(50)).not.toThrow();
|
||||
expect(() => field.schema.parse(-1)).toThrow();
|
||||
expect(() => field.schema.parse(101)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("boolean", () => {
|
||||
it("should create boolean field", () => {
|
||||
const field = booleanField();
|
||||
expect(field.type).toBe("boolean");
|
||||
expect(field.ui?.widget).toBe("boolean");
|
||||
});
|
||||
|
||||
it("should validate booleans", () => {
|
||||
const field = booleanField();
|
||||
expect(() => field.schema.parse(true)).not.toThrow();
|
||||
expect(() => field.schema.parse(false)).not.toThrow();
|
||||
expect(() => field.schema.parse("true")).toThrow();
|
||||
});
|
||||
|
||||
it("should apply default value", () => {
|
||||
const field = booleanField({ default: true });
|
||||
const result = field.schema.parse(undefined);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("select", () => {
|
||||
it("should create select field", () => {
|
||||
const field = select({ options: ["one", "two", "three"] as const });
|
||||
expect(field.type).toBe("select");
|
||||
expect(field.ui?.widget).toBe("select");
|
||||
});
|
||||
|
||||
it("should validate enum values", () => {
|
||||
const field = select({
|
||||
options: ["red", "green", "blue"] as const,
|
||||
required: true,
|
||||
});
|
||||
expect(() => field.schema.parse("red")).not.toThrow();
|
||||
expect(() => field.schema.parse("yellow")).toThrow();
|
||||
});
|
||||
|
||||
it("should apply default value", () => {
|
||||
const field = select({
|
||||
options: ["small", "medium", "large"] as const,
|
||||
default: "medium",
|
||||
});
|
||||
const result = field.schema.parse(undefined);
|
||||
expect(result).toBe("medium");
|
||||
});
|
||||
});
|
||||
|
||||
describe("multiSelect", () => {
|
||||
it("should create multiSelect field", () => {
|
||||
const field = multiSelect({ options: ["a", "b", "c"] as const });
|
||||
expect(field.type).toBe("multiSelect");
|
||||
expect(field.ui?.widget).toBe("multiSelect");
|
||||
});
|
||||
|
||||
it("should validate array of enum values", () => {
|
||||
const field = multiSelect({
|
||||
options: ["tag1", "tag2", "tag3"] as const,
|
||||
required: true,
|
||||
});
|
||||
expect(() => field.schema.parse(["tag1", "tag2"])).not.toThrow();
|
||||
expect(() => field.schema.parse(["tag1", "invalid"])).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce min/max selections", () => {
|
||||
const field = multiSelect({
|
||||
options: ["a", "b", "c", "d"] as const,
|
||||
min: 1,
|
||||
max: 3,
|
||||
});
|
||||
expect(() => field.schema.parse(["a", "b"])).not.toThrow();
|
||||
expect(() => field.schema.parse([])).toThrow();
|
||||
expect(() => field.schema.parse(["a", "b", "c", "d"])).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("datetime", () => {
|
||||
it("should create datetime field", () => {
|
||||
const field = datetime();
|
||||
expect(field.type).toBe("datetime");
|
||||
expect(field.ui?.widget).toBe("datetime");
|
||||
});
|
||||
|
||||
it("should validate dates", () => {
|
||||
const field = datetime({ required: true });
|
||||
expect(() => field.schema.parse(new Date())).not.toThrow();
|
||||
expect(() => field.schema.parse("2024-01-01")).toThrow();
|
||||
});
|
||||
|
||||
it("should enforce min/max dates", () => {
|
||||
const min = new Date("2024-01-01");
|
||||
const max = new Date("2024-12-31");
|
||||
const field = datetime({ min, max });
|
||||
|
||||
expect(() => field.schema.parse(new Date("2024-06-15"))).not.toThrow();
|
||||
expect(() => field.schema.parse(new Date("2023-12-31"))).toThrow();
|
||||
expect(() => field.schema.parse(new Date("2025-01-01"))).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("slug", () => {
|
||||
it("should create slug field", () => {
|
||||
const field = slug();
|
||||
expect(field.type).toBe("slug");
|
||||
expect(field.ui?.widget).toBe("slug");
|
||||
});
|
||||
|
||||
it("should validate slug format", () => {
|
||||
const field = slug({ required: true });
|
||||
expect(() => field.schema.parse("hello-world")).not.toThrow();
|
||||
expect(() => field.schema.parse("hello-world-123")).not.toThrow();
|
||||
expect(() => field.schema.parse("Hello World")).toThrow();
|
||||
expect(() => field.schema.parse("hello_world")).toThrow();
|
||||
});
|
||||
|
||||
it("should accept custom pattern", () => {
|
||||
const field = slug({ pattern: SLUG_UPPERCASE_PATTERN_REGEX });
|
||||
expect(() => field.schema.parse("HELLO_WORLD")).not.toThrow();
|
||||
expect(() => field.schema.parse("hello-world")).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("image", () => {
|
||||
it("should create image field", () => {
|
||||
const field = image();
|
||||
expect(field.type).toBe("image");
|
||||
expect(field.ui?.widget).toBe("image");
|
||||
});
|
||||
|
||||
it("should validate image value structure", () => {
|
||||
const field = image({ required: true });
|
||||
const validImage = {
|
||||
id: "img-123",
|
||||
src: "https://example.com/photo.jpg",
|
||||
alt: "A photo",
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
};
|
||||
expect(() => field.schema.parse(validImage)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("file", () => {
|
||||
it("should create file field", () => {
|
||||
const field = file();
|
||||
expect(field.type).toBe("file");
|
||||
expect(field.ui?.widget).toBe("file");
|
||||
});
|
||||
|
||||
it("should validate file value structure", () => {
|
||||
const field = file({ required: true });
|
||||
const validFile = {
|
||||
id: "file-123",
|
||||
url: "https://example.com/doc.pdf",
|
||||
filename: "doc.pdf",
|
||||
mimeType: "application/pdf",
|
||||
size: 1024000,
|
||||
};
|
||||
expect(() => field.schema.parse(validFile)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("reference", () => {
|
||||
it("should create reference field", () => {
|
||||
const field = reference({ to: "posts" });
|
||||
expect(field.type).toBe("reference");
|
||||
expect(field.ui?.widget).toBe("reference");
|
||||
});
|
||||
|
||||
it("should validate string ID", () => {
|
||||
const field = reference({ to: "posts", required: true });
|
||||
expect(() => field.schema.parse("post-123")).not.toThrow();
|
||||
expect(() => field.schema.parse(123)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("json", () => {
|
||||
it("should create json field", () => {
|
||||
const field = json();
|
||||
expect(field.type).toBe("json");
|
||||
expect(field.ui?.widget).toBe("json");
|
||||
});
|
||||
|
||||
it("should accept any JSON data", () => {
|
||||
const field = json();
|
||||
expect(() => field.schema.parse({ foo: "bar" })).not.toThrow();
|
||||
expect(() => field.schema.parse([1, 2, 3])).not.toThrow();
|
||||
expect(() => field.schema.parse("string")).not.toThrow();
|
||||
});
|
||||
|
||||
it("should validate with custom schema", () => {
|
||||
const customSchema = z.object({
|
||||
name: z.string(),
|
||||
age: z.number(),
|
||||
});
|
||||
|
||||
const field = json({ schema: customSchema });
|
||||
expect(() => field.schema.parse({ name: "John", age: 30 })).not.toThrow();
|
||||
expect(() => field.schema.parse({ name: "John" })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("richText", () => {
|
||||
it("should create richText field", () => {
|
||||
const field = richText();
|
||||
expect(field.type).toBe("richText");
|
||||
expect(field.ui?.widget).toBe("richText");
|
||||
});
|
||||
|
||||
it("should validate string content", () => {
|
||||
const field = richText({ required: true });
|
||||
expect(() => field.schema.parse("# Heading\n\nParagraph")).not.toThrow();
|
||||
expect(() => field.schema.parse(123)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe("portableText", () => {
|
||||
it("should create portableText field", () => {
|
||||
const field = portableText();
|
||||
expect(field.type).toBe("portableText");
|
||||
expect(field.ui?.widget).toBe("portableText");
|
||||
});
|
||||
|
||||
it("should validate array of blocks", () => {
|
||||
const field = portableText({ required: true });
|
||||
const blocks = [
|
||||
{
|
||||
_type: "block",
|
||||
_key: "key1",
|
||||
children: [{ _type: "span", text: "Hello" }],
|
||||
},
|
||||
];
|
||||
expect(() => field.schema.parse(blocks)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
225
packages/core/tests/unit/import/sections.test.ts
Normal file
225
packages/core/tests/unit/import/sections.test.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
/**
|
||||
* Tests for importing WordPress reusable blocks as sections
|
||||
*/
|
||||
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import type { WxrPost } from "../../../src/cli/wxr/parser.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { importReusableBlocksAsSections } from "../../../src/import/sections.js";
|
||||
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
describe("importReusableBlocksAsSections", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabase();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("should import wp_block posts as sections", async () => {
|
||||
const posts: WxrPost[] = [
|
||||
{
|
||||
id: 100,
|
||||
title: "Newsletter CTA",
|
||||
postName: "newsletter-cta",
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: `<!-- wp:heading {"level":3} -->
|
||||
<h3>Subscribe to Our Newsletter</h3>
|
||||
<!-- /wp:heading -->
|
||||
|
||||
<!-- wp:paragraph -->
|
||||
<p>Get the latest updates.</p>
|
||||
<!-- /wp:paragraph -->`,
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
{
|
||||
id: 101,
|
||||
title: "Hero Banner",
|
||||
postName: "hero-banner",
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: `<!-- wp:heading -->
|
||||
<h2>Welcome</h2>
|
||||
<!-- /wp:heading -->`,
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
// Regular post - should be ignored
|
||||
{
|
||||
id: 1,
|
||||
title: "Regular Post",
|
||||
postName: "regular-post",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
content: "<p>Hello</p>",
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
];
|
||||
|
||||
const result = await importReusableBlocksAsSections(posts, db);
|
||||
|
||||
expect(result.sectionsCreated).toBe(2);
|
||||
expect(result.sectionsSkipped).toBe(0);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
|
||||
// Verify sections were created
|
||||
const sections = await db.selectFrom("_emdash_sections").selectAll().execute();
|
||||
|
||||
expect(sections).toHaveLength(2);
|
||||
|
||||
const newsletter = sections.find((s) => s.slug === "newsletter-cta");
|
||||
expect(newsletter).toBeDefined();
|
||||
expect(newsletter?.title).toBe("Newsletter CTA");
|
||||
expect(newsletter?.source).toBe("import");
|
||||
|
||||
const hero = sections.find((s) => s.slug === "hero-banner");
|
||||
expect(hero).toBeDefined();
|
||||
expect(hero?.title).toBe("Hero Banner");
|
||||
});
|
||||
|
||||
it("should skip existing sections by slug", async () => {
|
||||
// Create existing section
|
||||
await db
|
||||
.insertInto("_emdash_sections")
|
||||
.values({
|
||||
id: "existing-1",
|
||||
slug: "newsletter-cta",
|
||||
title: "Existing Newsletter",
|
||||
description: null,
|
||||
keywords: null,
|
||||
content: "[]",
|
||||
preview_media_id: null,
|
||||
source: "user",
|
||||
theme_id: null,
|
||||
created_at: new Date().toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const posts: WxrPost[] = [
|
||||
{
|
||||
id: 100,
|
||||
title: "Newsletter CTA",
|
||||
postName: "newsletter-cta",
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: "<p>New content</p>",
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
{
|
||||
id: 101,
|
||||
title: "New Block",
|
||||
postName: "new-block",
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: "<p>New</p>",
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
];
|
||||
|
||||
const result = await importReusableBlocksAsSections(posts, db);
|
||||
|
||||
expect(result.sectionsCreated).toBe(1);
|
||||
expect(result.sectionsSkipped).toBe(1);
|
||||
|
||||
// Original title should be preserved
|
||||
const existing = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.selectAll()
|
||||
.where("slug", "=", "newsletter-cta")
|
||||
.executeTakeFirst();
|
||||
|
||||
expect(existing?.title).toBe("Existing Newsletter");
|
||||
});
|
||||
|
||||
it("should return empty result when no wp_block posts", async () => {
|
||||
const posts: WxrPost[] = [
|
||||
{
|
||||
id: 1,
|
||||
title: "Regular Post",
|
||||
postName: "regular-post",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
content: "<p>Hello</p>",
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
];
|
||||
|
||||
const result = await importReusableBlocksAsSections(posts, db);
|
||||
|
||||
expect(result.sectionsCreated).toBe(0);
|
||||
expect(result.sectionsSkipped).toBe(0);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should convert Gutenberg content to Portable Text", async () => {
|
||||
const posts: WxrPost[] = [
|
||||
{
|
||||
id: 100,
|
||||
title: "Test Block",
|
||||
postName: "test-block",
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: `<!-- wp:paragraph -->
|
||||
<p>Hello <strong>world</strong>!</p>
|
||||
<!-- /wp:paragraph -->`,
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
];
|
||||
|
||||
await importReusableBlocksAsSections(posts, db);
|
||||
|
||||
const section = await db
|
||||
.selectFrom("_emdash_sections")
|
||||
.selectAll()
|
||||
.where("slug", "=", "test-block")
|
||||
.executeTakeFirst();
|
||||
|
||||
const content = JSON.parse(section?.content ?? "[]");
|
||||
|
||||
expect(content).toBeInstanceOf(Array);
|
||||
expect(content.length).toBeGreaterThan(0);
|
||||
expect(content[0]._type).toBe("block");
|
||||
});
|
||||
|
||||
it("should generate slug from title if postName is missing", async () => {
|
||||
const posts: WxrPost[] = [
|
||||
{
|
||||
id: 100,
|
||||
title: "My Custom Block Title",
|
||||
postName: undefined as unknown as string,
|
||||
postType: "wp_block",
|
||||
status: "publish",
|
||||
content: "<p>Test</p>",
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
},
|
||||
];
|
||||
|
||||
await importReusableBlocksAsSections(posts, db);
|
||||
|
||||
const section = await db.selectFrom("_emdash_sections").selectAll().executeTakeFirst();
|
||||
|
||||
expect(section?.slug).toBe("my-custom-block-title");
|
||||
});
|
||||
});
|
||||
791
packages/core/tests/unit/import/ssrf.test.ts
Normal file
791
packages/core/tests/unit/import/ssrf.test.ts
Normal file
@@ -0,0 +1,791 @@
|
||||
/**
|
||||
* Tests for SSRF protection in import/ssrf.ts
|
||||
*
|
||||
* Covers:
|
||||
* - IPv4-mapped IPv6 hex normalization (#58)
|
||||
* - Private IP detection across all forms
|
||||
* - validateExternalUrl blocking internal targets
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import {
|
||||
cloudflareDohResolver,
|
||||
normalizeIPv6MappedToIPv4,
|
||||
resolveAndValidateExternalUrl,
|
||||
SsrfError,
|
||||
validateExternalUrl,
|
||||
} from "../../../src/import/ssrf.js";
|
||||
|
||||
describe("validateExternalUrl", () => {
|
||||
// =========================================================================
|
||||
// Basic validation
|
||||
// =========================================================================
|
||||
|
||||
it("accepts valid external URLs", () => {
|
||||
expect(validateExternalUrl("https://example.com")).toBeInstanceOf(URL);
|
||||
expect(validateExternalUrl("https://wordpress.org/feed")).toBeInstanceOf(URL);
|
||||
expect(validateExternalUrl("http://93.184.216.34/path")).toBeInstanceOf(URL);
|
||||
});
|
||||
|
||||
it("rejects non-http schemes", () => {
|
||||
expect(() => validateExternalUrl("ftp://example.com")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("file:///etc/passwd")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("javascript:alert(1)")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects invalid URLs", () => {
|
||||
expect(() => validateExternalUrl("not a url")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Blocked hostnames
|
||||
// =========================================================================
|
||||
|
||||
it("blocks localhost", () => {
|
||||
expect(() => validateExternalUrl("http://localhost/path")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://localhost:8080")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks metadata endpoints", () => {
|
||||
expect(() => validateExternalUrl("http://metadata.google.internal/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv4 private ranges
|
||||
// =========================================================================
|
||||
|
||||
it("blocks loopback (127.0.0.0/8)", () => {
|
||||
expect(() => validateExternalUrl("http://127.0.0.1/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.255.255.255/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks private 10.0.0.0/8", () => {
|
||||
expect(() => validateExternalUrl("http://10.0.0.1/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://10.255.255.255/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks private 172.16.0.0/12", () => {
|
||||
expect(() => validateExternalUrl("http://172.16.0.1/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://172.31.255.255/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks private 192.168.0.0/16", () => {
|
||||
expect(() => validateExternalUrl("http://192.168.0.1/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://192.168.255.255/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks link-local (169.254.0.0/16) including cloud metadata", () => {
|
||||
expect(() => validateExternalUrl("http://169.254.169.254/latest/meta-data/")).toThrow(
|
||||
SsrfError,
|
||||
);
|
||||
expect(() => validateExternalUrl("http://169.254.0.1/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv6 loopback
|
||||
// =========================================================================
|
||||
|
||||
it("blocks IPv6 loopback [::1]", () => {
|
||||
expect(() => validateExternalUrl("http://[::1]/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://[::1]:8080/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Issue #58: IPv4-mapped IPv6 in hex form
|
||||
//
|
||||
// The WHATWG URL parser normalizes [::ffff:127.0.0.1] to [::ffff:7f00:1].
|
||||
// Before the fix, the hex form bypassed isPrivateIp() because the regex
|
||||
// only matched dotted-decimal.
|
||||
// =========================================================================
|
||||
|
||||
it("blocks IPv4-mapped IPv6 loopback in hex form [::ffff:7f00:1]", () => {
|
||||
// This is the normalized form of [::ffff:127.0.0.1]
|
||||
expect(() => validateExternalUrl("http://[::ffff:7f00:1]/evil")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-mapped IPv6 cloud metadata [::ffff:a9fe:a9fe]", () => {
|
||||
// This is the normalized form of [::ffff:169.254.169.254]
|
||||
expect(() => validateExternalUrl("http://[::ffff:a9fe:a9fe]/latest/meta-data/")).toThrow(
|
||||
SsrfError,
|
||||
);
|
||||
});
|
||||
|
||||
it("blocks IPv4-mapped IPv6 private 10.x [::ffff:a00:1]", () => {
|
||||
// This is the normalized form of [::ffff:10.0.0.1]
|
||||
expect(() => validateExternalUrl("http://[::ffff:a00:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-mapped IPv6 private 192.168.x [::ffff:c0a8:1]", () => {
|
||||
// This is the normalized form of [::ffff:192.168.0.1]
|
||||
expect(() => validateExternalUrl("http://[::ffff:c0a8:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-mapped IPv6 private 172.16.x [::ffff:ac10:1]", () => {
|
||||
// This is the normalized form of [::ffff:172.16.0.1]
|
||||
expect(() => validateExternalUrl("http://[::ffff:ac10:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-mapped IPv6 in dotted-decimal form", () => {
|
||||
// The dotted-decimal form should also be blocked (it worked before too)
|
||||
// The URL parser normalizes this to hex, so this exercises the same path
|
||||
expect(() => validateExternalUrl("http://[::ffff:127.0.0.1]/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://[::ffff:169.254.169.254]/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://[::ffff:10.0.0.1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("allows IPv4-mapped IPv6 for public IPs", () => {
|
||||
// [::ffff:93.184.216.34] -> hex form after URL parsing
|
||||
// 93 = 0x5d, 184 = 0xb8 -> 0x5db8
|
||||
// 216 = 0xd8, 34 = 0x22 -> 0xd822
|
||||
// So [::ffff:5db8:d822] should be allowed
|
||||
expect(validateExternalUrl("http://[::ffff:5db8:d822]/")).toBeInstanceOf(URL);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv4-compatible (deprecated) addresses: ::XXXX:XXXX (no ffff prefix)
|
||||
//
|
||||
// [::127.0.0.1] normalizes to [::7f00:1] which has no ffff prefix.
|
||||
// Without the fix, these bypass all ffff-based checks.
|
||||
// =========================================================================
|
||||
|
||||
it("blocks IPv4-compatible loopback [::7f00:1]", () => {
|
||||
// Normalized form of [::127.0.0.1]
|
||||
expect(() => validateExternalUrl("http://[::7f00:1]/evil")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-compatible cloud metadata [::a9fe:a9fe]", () => {
|
||||
// Normalized form of [::169.254.169.254]
|
||||
expect(() => validateExternalUrl("http://[::a9fe:a9fe]/latest/meta-data/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-compatible private 10.x [::a00:1]", () => {
|
||||
// Normalized form of [::10.0.0.1]
|
||||
expect(() => validateExternalUrl("http://[::a00:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv4-compatible private 192.168.x [::c0a8:1]", () => {
|
||||
// Normalized form of [::192.168.0.1]
|
||||
expect(() => validateExternalUrl("http://[::c0a8:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("allows IPv4-compatible public IPs [::5db8:d822]", () => {
|
||||
// 93.184.216.34 in hex
|
||||
expect(validateExternalUrl("http://[::5db8:d822]/")).toBeInstanceOf(URL);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// NAT64 prefix: 64:ff9b::XXXX:XXXX
|
||||
//
|
||||
// [64:ff9b::127.0.0.1] normalizes to [64:ff9b::7f00:1].
|
||||
// NAT64 gateways embed IPv4 in IPv6 using this well-known prefix.
|
||||
// =========================================================================
|
||||
|
||||
it("blocks NAT64 loopback [64:ff9b::7f00:1]", () => {
|
||||
expect(() => validateExternalUrl("http://[64:ff9b::7f00:1]/evil")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks NAT64 cloud metadata [64:ff9b::a9fe:a9fe]", () => {
|
||||
expect(() => validateExternalUrl("http://[64:ff9b::a9fe:a9fe]/latest/meta-data/")).toThrow(
|
||||
SsrfError,
|
||||
);
|
||||
});
|
||||
|
||||
it("blocks NAT64 private 10.x [64:ff9b::a00:1]", () => {
|
||||
expect(() => validateExternalUrl("http://[64:ff9b::a00:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks NAT64 private 192.168.x [64:ff9b::c0a8:1]", () => {
|
||||
expect(() => validateExternalUrl("http://[64:ff9b::c0a8:1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("allows NAT64 public IPs [64:ff9b::5db8:d822]", () => {
|
||||
expect(validateExternalUrl("http://[64:ff9b::5db8:d822]/")).toBeInstanceOf(URL);
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv6 link-local and ULA
|
||||
// =========================================================================
|
||||
|
||||
it("blocks IPv6 link-local (fe80::)", () => {
|
||||
expect(() => validateExternalUrl("http://[fe80::1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks IPv6 unique local (fc00::/fd00::)", () => {
|
||||
expect(() => validateExternalUrl("http://[fc00::1]/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://[fd00::1]/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks 0.0.0.0/8 range", () => {
|
||||
expect(() => validateExternalUrl("http://0.0.0.0/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://0.0.0.1/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// IPv4 literals with trailing dots. A single trailing dot is stripped by
|
||||
// the WHATWG URL parser, but multiple trailing dots are preserved on
|
||||
// .hostname. parseIpv4 rejects anything with a dot count != 4, so
|
||||
// "127.0.0.1.." falls through to isPrivateIp's IPv6 fallback and
|
||||
// returns false, bypassing the private-IP check. We must strip trailing
|
||||
// dots before the private-IP check.
|
||||
it("blocks IPv4 literals with trailing dots", () => {
|
||||
expect(() => validateExternalUrl("http://127.0.0.1./")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1../")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://169.254.169.254../")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://10.0.0.1../")).toThrow(SsrfError);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// normalizeIPv6MappedToIPv4 — direct unit tests (#58)
|
||||
//
|
||||
// This function converts IPv4-mapped/translated IPv6 hex addresses back to
|
||||
// dotted-decimal IPv4 so they can be checked against private ranges. Without
|
||||
// it, the WHATWG URL parser's hex normalization bypasses SSRF protection.
|
||||
// =============================================================================
|
||||
|
||||
describe("normalizeIPv6MappedToIPv4", () => {
|
||||
// =========================================================================
|
||||
// Standard hex-form: ::ffff:XXXX:XXXX
|
||||
// =========================================================================
|
||||
|
||||
it("converts loopback ::ffff:7f00:1 -> 127.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:7f00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("converts cloud metadata ::ffff:a9fe:a9fe -> 169.254.169.254", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:a9fe:a9fe")).toBe("169.254.169.254");
|
||||
});
|
||||
|
||||
it("converts private 10.x ::ffff:a00:1 -> 10.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:a00:1")).toBe("10.0.0.1");
|
||||
});
|
||||
|
||||
it("converts private 192.168.x ::ffff:c0a8:1 -> 192.168.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:c0a8:1")).toBe("192.168.0.1");
|
||||
});
|
||||
|
||||
it("converts private 172.16.x ::ffff:ac10:1 -> 172.16.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:ac10:1")).toBe("172.16.0.1");
|
||||
});
|
||||
|
||||
it("converts public IP ::ffff:5db8:d822 -> 93.184.216.34", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:5db8:d822")).toBe("93.184.216.34");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Edge values
|
||||
// =========================================================================
|
||||
|
||||
it("converts ::ffff:0:0 -> 0.0.0.0", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:0:0")).toBe("0.0.0.0");
|
||||
});
|
||||
|
||||
it("converts ::ffff:ffff:ffff -> 255.255.255.255", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:ffff:ffff")).toBe("255.255.255.255");
|
||||
});
|
||||
|
||||
it("converts 4-digit hex groups correctly ::ffff:c612:e3a -> 198.18.14.58", () => {
|
||||
// 0xc612 = 198*256 + 18 = 50706
|
||||
// 0x0e3a = 14*256 + 58 = 3642
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:c612:e3a")).toBe("198.18.14.58");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Case insensitivity
|
||||
// =========================================================================
|
||||
|
||||
it("handles uppercase hex digits", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::FFFF:7F00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("handles mixed case hex digits", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:A9FE:a9fe")).toBe("169.254.169.254");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Bracket-wrapped form returns null (brackets stripped by caller)
|
||||
// validateExternalUrl strips brackets before calling isPrivateIp,
|
||||
// so normalizeIPv6MappedToIPv4 never receives bracketed input.
|
||||
// =========================================================================
|
||||
|
||||
it("returns null for bracketed input (brackets stripped by caller)", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("[::ffff:7f00:1]")).toBeNull();
|
||||
expect(normalizeIPv6MappedToIPv4("[::ffff:a9fe:a9fe]")).toBeNull();
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv4-translated (RFC 6052): ::ffff:0:XXXX:XXXX
|
||||
// =========================================================================
|
||||
|
||||
it("converts translated form ::ffff:0:7f00:1 -> 127.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:0:7f00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("converts translated form ::ffff:0:a9fe:a9fe -> 169.254.169.254", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:0:a9fe:a9fe")).toBe("169.254.169.254");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Fully expanded form: 0000:0000:0000:0000:0000:ffff:XXXX:XXXX
|
||||
// =========================================================================
|
||||
|
||||
it("converts expanded form 0:0:0:0:0:ffff:7f00:1 -> 127.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("0:0:0:0:0:ffff:7f00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("converts expanded form 0000:0000:0000:0000:0000:ffff:a9fe:a9fe -> 169.254.169.254", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("0000:0000:0000:0000:0000:ffff:a9fe:a9fe")).toBe(
|
||||
"169.254.169.254",
|
||||
);
|
||||
});
|
||||
|
||||
it("converts expanded form with mixed zero lengths", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("0:00:000:0000:0:ffff:a00:1")).toBe("10.0.0.1");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// IPv4-compatible (deprecated) form: ::XXXX:XXXX (no ffff prefix)
|
||||
// =========================================================================
|
||||
|
||||
it("converts IPv4-compatible loopback ::7f00:1 -> 127.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::7f00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("converts IPv4-compatible metadata ::a9fe:a9fe -> 169.254.169.254", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::a9fe:a9fe")).toBe("169.254.169.254");
|
||||
});
|
||||
|
||||
it("converts IPv4-compatible private ::a00:1 -> 10.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::a00:1")).toBe("10.0.0.1");
|
||||
});
|
||||
|
||||
it("converts IPv4-compatible public ::5db8:d822 -> 93.184.216.34", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::5db8:d822")).toBe("93.184.216.34");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// NAT64 prefix (RFC 6052): 64:ff9b::XXXX:XXXX
|
||||
// =========================================================================
|
||||
|
||||
it("converts NAT64 loopback 64:ff9b::7f00:1 -> 127.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("64:ff9b::7f00:1")).toBe("127.0.0.1");
|
||||
});
|
||||
|
||||
it("converts NAT64 metadata 64:ff9b::a9fe:a9fe -> 169.254.169.254", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("64:ff9b::a9fe:a9fe")).toBe("169.254.169.254");
|
||||
});
|
||||
|
||||
it("converts NAT64 private 64:ff9b::a00:1 -> 10.0.0.1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("64:ff9b::a00:1")).toBe("10.0.0.1");
|
||||
});
|
||||
|
||||
it("converts NAT64 public 64:ff9b::5db8:d822 -> 93.184.216.34", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("64:ff9b::5db8:d822")).toBe("93.184.216.34");
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Non-matching inputs -> null
|
||||
// =========================================================================
|
||||
|
||||
it("returns null for plain IPv4", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("127.0.0.1")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for IPv6 loopback ::1", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("::1")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for regular IPv6 address", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("2001:db8::1")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for link-local IPv6", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("fe80::1")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for hostnames", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("example.com")).toBeNull();
|
||||
expect(normalizeIPv6MappedToIPv4("localhost")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for empty string", () => {
|
||||
expect(normalizeIPv6MappedToIPv4("")).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for dotted-decimal mapped form (handled separately)", () => {
|
||||
// ::ffff:127.0.0.1 uses the dotted-decimal regex, not hex normalization
|
||||
expect(normalizeIPv6MappedToIPv4("::ffff:127.0.0.1")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// Wildcard DNS services — hostname blocklist
|
||||
//
|
||||
// Services like nip.io map "127.0.0.1.nip.io" to 127.0.0.1. Without DNS
|
||||
// resolution they pass validateExternalUrl since the hostname is neither an
|
||||
// IP literal nor on the small internal-names list. Adding the apex domains
|
||||
// to BLOCKED_HOSTNAMES catches the most widely-used rebinding tools without
|
||||
// requiring a network round-trip.
|
||||
// =============================================================================
|
||||
|
||||
describe("validateExternalUrl — wildcard DNS rebinding services", () => {
|
||||
it("blocks nip.io and its subdomains", () => {
|
||||
expect(() => validateExternalUrl("http://nip.io/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1.nip.io/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://169.254.169.254.nip.io/latest/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks sslip.io and its subdomains", () => {
|
||||
expect(() => validateExternalUrl("http://sslip.io/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1.sslip.io/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks xip.io and its subdomains", () => {
|
||||
expect(() => validateExternalUrl("http://xip.io/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://10.0.0.1.xip.io/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks traefik.me and its subdomains", () => {
|
||||
expect(() => validateExternalUrl("http://traefik.me/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1.traefik.me/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("is case-insensitive for blocklisted hostnames", () => {
|
||||
expect(() => validateExternalUrl("http://NIP.IO/")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1.Nip.Io/")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
// Trailing-dot FQDN form. The WHATWG URL parser preserves the dot on
|
||||
// `.hostname`, so a naive exact-match or `.endsWith(suffix)` check misses
|
||||
// these. Without explicit normalization, attackers can bypass both
|
||||
// BLOCKED_HOSTNAMES and the suffix list by appending a single dot.
|
||||
it("blocks trailing-dot FQDNs on the hostname blocklist", () => {
|
||||
expect(() => validateExternalUrl("http://localhost./")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("blocks trailing-dot FQDNs on the wildcard suffix list", () => {
|
||||
expect(() => validateExternalUrl("http://nip.io./")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://127.0.0.1.nip.io./")).toThrow(SsrfError);
|
||||
expect(() => validateExternalUrl("http://sslip.io./")).toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("allows look-alike domains that are not on the blocklist", () => {
|
||||
// Defensive: we should only block specific known services, not any
|
||||
// domain that happens to contain "nip" or similar.
|
||||
expect(validateExternalUrl("http://nippon.example.com/")).toBeInstanceOf(URL);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// resolveAndValidateExternalUrl — async DNS-aware validation
|
||||
//
|
||||
// Runs validateExternalUrl first (cheap pre-flight), then resolves the
|
||||
// hostname via an injectable resolver and checks each returned IP against
|
||||
// the private-range blocklist. Catches DNS rebinding attacks using domains
|
||||
// the attacker controls (not just known public rebinding services).
|
||||
// =============================================================================
|
||||
|
||||
describe("resolveAndValidateExternalUrl", () => {
|
||||
// Helper: build a stubbed resolver that returns a fixed list of IPs.
|
||||
function resolver(ips: string[]): (host: string) => Promise<string[]> {
|
||||
return async () => ips;
|
||||
}
|
||||
|
||||
// Helper: a resolver that fails. Used to assert fail-closed behaviour.
|
||||
function failingResolver(error = new Error("DNS failure")) {
|
||||
return async () => {
|
||||
throw error;
|
||||
};
|
||||
}
|
||||
|
||||
it("accepts public IPs", async () => {
|
||||
const url = await resolveAndValidateExternalUrl("https://example.com/", {
|
||||
resolver: resolver(["93.184.216.34"]),
|
||||
});
|
||||
expect(url).toBeInstanceOf(URL);
|
||||
expect(url.hostname).toBe("example.com");
|
||||
});
|
||||
|
||||
it("rejects hostnames that resolve to loopback", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver(["127.0.0.1"]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects hostnames that resolve to cloud metadata IP", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver(["169.254.169.254"]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects hostnames that resolve to any RFC1918 address", async () => {
|
||||
for (const ip of ["10.0.0.1", "172.16.0.1", "192.168.1.1"]) {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver([ip]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects if ANY resolved IP is private (multi-record DNS rebinding)", async () => {
|
||||
// Attacker serves two A records; we must reject if either is private,
|
||||
// not just the first one.
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver(["93.184.216.34", "127.0.0.1"]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects IPv6 loopback in resolved records", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver(["::1"]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects IPv6 link-local in resolved records (any case)", async () => {
|
||||
for (const ip of ["fe80::1", "FE80::1", "Fe80::abcd"]) {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver([ip]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects IPv6 unique-local in resolved records (any case)", async () => {
|
||||
for (const ip of ["fc00::1", "FC00::1", "fd12:3456::1", "FD00::BEEF"]) {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver([ip]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects IPv4-mapped IPv6 loopback in resolved records", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://attacker.example/", {
|
||||
resolver: resolver(["::ffff:127.0.0.1"]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("accepts public IPv6 addresses", async () => {
|
||||
const url = await resolveAndValidateExternalUrl("https://example.com/", {
|
||||
resolver: resolver(["2606:4700:4700::1111"]),
|
||||
});
|
||||
expect(url).toBeInstanceOf(URL);
|
||||
});
|
||||
|
||||
it("runs synchronous validateExternalUrl first (short-circuits on literal IP)", async () => {
|
||||
// 127.0.0.1 as a literal hostname is caught by validateExternalUrl
|
||||
// before any DNS lookup. Pass a resolver that would throw to prove it
|
||||
// isn't called.
|
||||
const r = failingResolver(new Error("should not be called"));
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("http://127.0.0.1/", { resolver: r }),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("fails closed when the resolver throws", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://example.com/", {
|
||||
resolver: failingResolver(),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("rejects empty resolver result (hostname resolves to nothing)", async () => {
|
||||
await expect(
|
||||
resolveAndValidateExternalUrl("https://example.com/", {
|
||||
resolver: resolver([]),
|
||||
}),
|
||||
).rejects.toThrow(SsrfError);
|
||||
});
|
||||
|
||||
it("returns the parsed URL on success", async () => {
|
||||
const url = await resolveAndValidateExternalUrl("https://example.com/path?q=1", {
|
||||
resolver: resolver(["93.184.216.34"]),
|
||||
});
|
||||
expect(url.pathname).toBe("/path");
|
||||
expect(url.searchParams.get("q")).toBe("1");
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// cloudflareDohResolver — unit tests for the DoH parser
|
||||
//
|
||||
// Stubs globalThis.fetch to simulate various DoH responses. The resolver
|
||||
// must:
|
||||
// - return IPs from valid A and AAAA responses
|
||||
// - treat NXDOMAIN (Status=3) as an empty result (legitimately non-existent)
|
||||
// - fail closed on SERVFAIL (Status=2), REFUSED (Status=5), and other
|
||||
// non-zero statuses, so that split-view DNS can't smuggle a private IP
|
||||
// past the check by SERVFAIL'ing one record type
|
||||
// - fail on HTTP errors
|
||||
// - fail on malformed JSON or responses with missing fields
|
||||
// =============================================================================
|
||||
|
||||
describe("cloudflareDohResolver", () => {
|
||||
let originalFetch: typeof globalThis.fetch;
|
||||
|
||||
beforeEach(() => {
|
||||
originalFetch = globalThis.fetch;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
});
|
||||
|
||||
function stubFetch(
|
||||
responses: Record<"A" | "AAAA", { body?: unknown; status?: number; throws?: Error }>,
|
||||
): void {
|
||||
globalThis.fetch = vi.fn(async (input: RequestInfo | URL): Promise<Response> => {
|
||||
const url = typeof input === "string" ? input : input instanceof URL ? input.href : input.url;
|
||||
const type: "A" | "AAAA" = url.includes("type=AAAA") ? "AAAA" : "A";
|
||||
const res = responses[type];
|
||||
if (res.throws) throw res.throws;
|
||||
return new Response(JSON.stringify(res.body ?? {}), { status: res.status ?? 200 });
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- minimal stub
|
||||
}) as unknown as typeof globalThis.fetch;
|
||||
}
|
||||
|
||||
it("returns A and AAAA records from a valid Status=0 response", async () => {
|
||||
stubFetch({
|
||||
A: { body: { Status: 0, Answer: [{ data: "93.184.216.34" }] } },
|
||||
AAAA: { body: { Status: 0, Answer: [{ data: "2606:4700::1" }] } },
|
||||
});
|
||||
|
||||
const ips = await cloudflareDohResolver("example.com");
|
||||
expect(ips).toContain("93.184.216.34");
|
||||
expect(ips).toContain("2606:4700::1");
|
||||
});
|
||||
|
||||
it("treats NXDOMAIN (Status=3) as empty (legitimately no records)", async () => {
|
||||
stubFetch({
|
||||
A: { body: { Status: 3 } },
|
||||
AAAA: { body: { Status: 3 } },
|
||||
});
|
||||
const ips = await cloudflareDohResolver("does-not-exist.example");
|
||||
expect(ips).toEqual([]);
|
||||
});
|
||||
|
||||
it("fails closed on SERVFAIL (Status=2)", async () => {
|
||||
// Split-view attack: attacker authoritative NS returns SERVFAIL to
|
||||
// Cloudflare's resolver but real records to the victim's resolver.
|
||||
// If we silently treated SERVFAIL as empty, we'd combine whatever
|
||||
// the other record type returned and call it "public" — bypassing
|
||||
// the check.
|
||||
stubFetch({
|
||||
A: { body: { Status: 2 } },
|
||||
AAAA: { body: { Status: 0, Answer: [{ data: "2606:4700::1" }] } },
|
||||
});
|
||||
await expect(cloudflareDohResolver("attacker.example")).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("fails closed on REFUSED (Status=5)", async () => {
|
||||
stubFetch({
|
||||
A: { body: { Status: 5 } },
|
||||
AAAA: { body: { Status: 0, Answer: [{ data: "2606:4700::1" }] } },
|
||||
});
|
||||
await expect(cloudflareDohResolver("attacker.example")).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("fails closed on HTTP errors from the DoH endpoint", async () => {
|
||||
stubFetch({
|
||||
A: { status: 500 },
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
await expect(cloudflareDohResolver("example.com")).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("fails closed on malformed response bodies missing Status", async () => {
|
||||
stubFetch({
|
||||
A: { body: {} },
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
await expect(cloudflareDohResolver("example.com")).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("fails closed on network errors", async () => {
|
||||
stubFetch({
|
||||
A: { throws: new Error("network down") },
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
await expect(cloudflareDohResolver("example.com")).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("returns empty array when both A and AAAA return no records but Status=0", async () => {
|
||||
stubFetch({
|
||||
A: { body: { Status: 0, Answer: [] } },
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
const ips = await cloudflareDohResolver("example.com");
|
||||
expect(ips).toEqual([]);
|
||||
});
|
||||
|
||||
it("skips Answer entries without string data", async () => {
|
||||
stubFetch({
|
||||
A: {
|
||||
body: {
|
||||
Status: 0,
|
||||
Answer: [{ data: "93.184.216.34" }, { data: 12345 }, {}, { notData: "foo" }],
|
||||
},
|
||||
},
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
const ips = await cloudflareDohResolver("example.com");
|
||||
expect(ips).toEqual(["93.184.216.34"]);
|
||||
});
|
||||
|
||||
// DoH responses often include CNAME records in the Answer chain alongside
|
||||
// (or instead of) A/AAAA records. Their `data` field is a hostname, not
|
||||
// an IP. If we return them, the validator's isPrivateIp check silently
|
||||
// accepts them (parseIpv4 returns null → "not private" → pass).
|
||||
it("filters CNAME-style hostname answers, keeping only IP literals", async () => {
|
||||
stubFetch({
|
||||
A: {
|
||||
body: {
|
||||
Status: 0,
|
||||
Answer: [
|
||||
{ data: "cdn.example.com." }, // CNAME target, not an IP
|
||||
{ data: "93.184.216.34" }, // real A record
|
||||
],
|
||||
},
|
||||
},
|
||||
AAAA: {
|
||||
body: {
|
||||
Status: 0,
|
||||
Answer: [{ data: "other.example.com." }, { data: "2606:4700::1" }],
|
||||
},
|
||||
},
|
||||
});
|
||||
const ips = await cloudflareDohResolver("example.com");
|
||||
expect(ips).toEqual(["93.184.216.34", "2606:4700::1"]);
|
||||
});
|
||||
|
||||
it("rejects a response that contains only CNAME strings", async () => {
|
||||
stubFetch({
|
||||
A: {
|
||||
body: {
|
||||
Status: 0,
|
||||
Answer: [{ data: "target.example.com." }],
|
||||
},
|
||||
},
|
||||
AAAA: { body: { Status: 0, Answer: [] } },
|
||||
});
|
||||
const ips = await cloudflareDohResolver("cname-only.example");
|
||||
// No IPs at all — the caller should treat this as "could not resolve"
|
||||
// and fail closed, not pretend the CNAME target is an address.
|
||||
expect(ips).toEqual([]);
|
||||
});
|
||||
});
|
||||
413
packages/core/tests/unit/import/wordpress-plugin-i18n.test.ts
Normal file
413
packages/core/tests/unit/import/wordpress-plugin-i18n.test.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
/**
|
||||
* Tests for WPML/Polylang auto-detection in WordPress plugin import source.
|
||||
*
|
||||
* Verifies that the probe() and analyze() methods correctly extract and
|
||||
* surface i18n detection from the EmDash Exporter plugin's API responses.
|
||||
*/
|
||||
|
||||
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { wordpressPluginSource } from "../../../src/import/sources/wordpress-plugin.js";
|
||||
import { setDefaultDnsResolver } from "../../../src/import/ssrf.js";
|
||||
|
||||
// ─── Mock fetch ──────────────────────────────────────────────────────────────
|
||||
|
||||
const mockFetch = vi.fn();
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
// Bypass DoH so the fetch mock only sees the calls these tests model.
|
||||
let previousResolver: ReturnType<typeof setDefaultDnsResolver> | undefined;
|
||||
beforeAll(() => {
|
||||
previousResolver = setDefaultDnsResolver(async () => ["93.184.216.34"]);
|
||||
});
|
||||
afterAll(() => {
|
||||
setDefaultDnsResolver(previousResolver ?? null);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mockFetch.mockReset();
|
||||
});
|
||||
|
||||
// ─── Fixtures ────────────────────────────────────────────────────────────────
|
||||
|
||||
/** Minimal valid probe response without i18n */
|
||||
function makeProbeResponse(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
emdash_exporter: "1.0.0",
|
||||
wordpress_version: "6.5",
|
||||
site: {
|
||||
title: "Test Site",
|
||||
description: "A test site",
|
||||
url: "https://example.com",
|
||||
home: "https://example.com",
|
||||
language: "en-US",
|
||||
timezone: "UTC",
|
||||
},
|
||||
capabilities: {
|
||||
application_passwords: true,
|
||||
acf: false,
|
||||
yoast: false,
|
||||
rankmath: false,
|
||||
},
|
||||
post_types: [
|
||||
{ name: "post", label: "Posts", count: 10 },
|
||||
{ name: "page", label: "Pages", count: 5 },
|
||||
],
|
||||
media_count: 20,
|
||||
endpoints: {},
|
||||
auth_instructions: {
|
||||
method: "application_passwords",
|
||||
instructions: "Create an application password",
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/** Minimal valid analyze response without i18n */
|
||||
function makeAnalyzeResponse(overrides: Record<string, unknown> = {}) {
|
||||
return {
|
||||
site: { title: "Test Site", url: "https://example.com" },
|
||||
post_types: [
|
||||
{
|
||||
name: "post",
|
||||
label: "Posts",
|
||||
label_singular: "Post",
|
||||
total: 10,
|
||||
by_status: { publish: 8, draft: 2 },
|
||||
supports: { title: true, editor: true, thumbnail: true },
|
||||
taxonomies: ["category", "post_tag"],
|
||||
custom_fields: [],
|
||||
hierarchical: false,
|
||||
has_archive: true,
|
||||
},
|
||||
],
|
||||
taxonomies: [
|
||||
{
|
||||
name: "category",
|
||||
label: "Categories",
|
||||
hierarchical: true,
|
||||
term_count: 5,
|
||||
object_types: ["post"],
|
||||
},
|
||||
{
|
||||
name: "post_tag",
|
||||
label: "Tags",
|
||||
hierarchical: false,
|
||||
term_count: 12,
|
||||
object_types: ["post"],
|
||||
},
|
||||
],
|
||||
authors: [
|
||||
{ id: 1, login: "admin", email: "admin@example.com", display_name: "Admin", post_count: 10 },
|
||||
],
|
||||
attachments: { count: 20, by_type: { "image/jpeg": 15, "image/png": 5 } },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ─── Probe tests ─────────────────────────────────────────────────────────────
|
||||
|
||||
describe("WordPress Plugin Source — i18n detection", () => {
|
||||
describe("probe()", () => {
|
||||
it("returns i18n when WPML is detected", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify(
|
||||
makeProbeResponse({
|
||||
i18n: {
|
||||
plugin: "wpml",
|
||||
default_locale: "en",
|
||||
locales: ["en", "fr", "de"],
|
||||
},
|
||||
}),
|
||||
),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const result = await wordpressPluginSource.probe!("https://example.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.i18n).toEqual({
|
||||
plugin: "wpml",
|
||||
defaultLocale: "en",
|
||||
locales: ["en", "fr", "de"],
|
||||
});
|
||||
});
|
||||
|
||||
it("returns i18n when Polylang is detected", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify(
|
||||
makeProbeResponse({
|
||||
i18n: {
|
||||
plugin: "polylang",
|
||||
default_locale: "fr",
|
||||
locales: ["fr", "en"],
|
||||
},
|
||||
}),
|
||||
),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const result = await wordpressPluginSource.probe!("https://example.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.i18n).toEqual({
|
||||
plugin: "polylang",
|
||||
defaultLocale: "fr",
|
||||
locales: ["fr", "en"],
|
||||
});
|
||||
});
|
||||
|
||||
it("returns undefined i18n when no multilingual plugin", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(makeProbeResponse()), { status: 200 }),
|
||||
);
|
||||
|
||||
const result = await wordpressPluginSource.probe!("https://example.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.i18n).toBeUndefined();
|
||||
});
|
||||
|
||||
it("preserves other probe fields alongside i18n", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify(
|
||||
makeProbeResponse({
|
||||
i18n: {
|
||||
plugin: "wpml",
|
||||
default_locale: "en",
|
||||
locales: ["en", "es"],
|
||||
},
|
||||
}),
|
||||
),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const result = await wordpressPluginSource.probe!("https://example.com");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.sourceId).toBe("wordpress-plugin");
|
||||
expect(result!.confidence).toBe("definite");
|
||||
expect(result!.detected.platform).toBe("wordpress");
|
||||
expect(result!.preview?.posts).toBe(10);
|
||||
expect(result!.i18n?.plugin).toBe("wpml");
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Analyze tests ───────────────────────────────────────────────────────
|
||||
|
||||
describe("analyze()", () => {
|
||||
it("returns i18n when WPML is detected", async () => {
|
||||
mockFetch.mockImplementation(async (url: string) => {
|
||||
if (url.includes("/analyze")) {
|
||||
return new Response(
|
||||
JSON.stringify(
|
||||
makeAnalyzeResponse({
|
||||
i18n: {
|
||||
plugin: "wpml",
|
||||
default_locale: "en",
|
||||
locales: ["en", "fr", "de"],
|
||||
},
|
||||
}),
|
||||
),
|
||||
{ status: 200 },
|
||||
);
|
||||
}
|
||||
// Media endpoint — return empty
|
||||
return new Response(
|
||||
JSON.stringify({ items: [], total: 0, pages: 0, page: 1, per_page: 100 }),
|
||||
{ status: 200 },
|
||||
);
|
||||
});
|
||||
|
||||
const analysis = await wordpressPluginSource.analyze(
|
||||
{ type: "url", url: "https://example.com", token: "test-token" },
|
||||
{},
|
||||
);
|
||||
|
||||
expect(analysis.i18n).toEqual({
|
||||
plugin: "wpml",
|
||||
defaultLocale: "en",
|
||||
locales: ["en", "fr", "de"],
|
||||
});
|
||||
});
|
||||
|
||||
it("returns i18n when Polylang is detected", async () => {
|
||||
mockFetch.mockImplementation(async (url: string) => {
|
||||
if (url.includes("/analyze")) {
|
||||
return new Response(
|
||||
JSON.stringify(
|
||||
makeAnalyzeResponse({
|
||||
i18n: {
|
||||
plugin: "polylang",
|
||||
default_locale: "fr",
|
||||
locales: ["fr", "en", "de"],
|
||||
},
|
||||
}),
|
||||
),
|
||||
{ status: 200 },
|
||||
);
|
||||
}
|
||||
return new Response(
|
||||
JSON.stringify({ items: [], total: 0, pages: 0, page: 1, per_page: 100 }),
|
||||
{ status: 200 },
|
||||
);
|
||||
});
|
||||
|
||||
const analysis = await wordpressPluginSource.analyze(
|
||||
{ type: "url", url: "https://example.com", token: "test-token" },
|
||||
{},
|
||||
);
|
||||
|
||||
expect(analysis.i18n).toEqual({
|
||||
plugin: "polylang",
|
||||
defaultLocale: "fr",
|
||||
locales: ["fr", "en", "de"],
|
||||
});
|
||||
});
|
||||
|
||||
it("returns undefined i18n when no multilingual plugin", async () => {
|
||||
mockFetch.mockImplementation(async (url: string) => {
|
||||
if (url.includes("/analyze")) {
|
||||
return new Response(JSON.stringify(makeAnalyzeResponse()), { status: 200 });
|
||||
}
|
||||
return new Response(
|
||||
JSON.stringify({ items: [], total: 0, pages: 0, page: 1, per_page: 100 }),
|
||||
{ status: 200 },
|
||||
);
|
||||
});
|
||||
|
||||
const analysis = await wordpressPluginSource.analyze(
|
||||
{ type: "url", url: "https://example.com", token: "test-token" },
|
||||
{},
|
||||
);
|
||||
|
||||
expect(analysis.i18n).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Content fetch — locale/translationGroup passthrough ─────────────────
|
||||
|
||||
describe("fetchContent()", () => {
|
||||
it("passes through locale and translationGroup from plugin posts", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify({
|
||||
items: [
|
||||
{
|
||||
id: 1,
|
||||
post_type: "post",
|
||||
status: "publish",
|
||||
slug: "hello-world",
|
||||
title: "Hello World",
|
||||
content: "",
|
||||
excerpt: "",
|
||||
date: "2024-01-01T00:00:00",
|
||||
date_gmt: "2024-01-01T00:00:00",
|
||||
modified: "2024-01-01T00:00:00",
|
||||
modified_gmt: "2024-01-01T00:00:00",
|
||||
author: null,
|
||||
parent: null,
|
||||
menu_order: 0,
|
||||
taxonomies: {},
|
||||
meta: {},
|
||||
locale: "en",
|
||||
translation_group: "group-1",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
post_type: "post",
|
||||
status: "publish",
|
||||
slug: "bonjour-le-monde",
|
||||
title: "Bonjour le monde",
|
||||
content: "",
|
||||
excerpt: "",
|
||||
date: "2024-01-01T00:00:00",
|
||||
date_gmt: "2024-01-01T00:00:00",
|
||||
modified: "2024-01-01T00:00:00",
|
||||
modified_gmt: "2024-01-01T00:00:00",
|
||||
author: null,
|
||||
parent: null,
|
||||
menu_order: 0,
|
||||
taxonomies: {},
|
||||
meta: {},
|
||||
locale: "fr",
|
||||
translation_group: "group-1",
|
||||
},
|
||||
],
|
||||
total: 2,
|
||||
pages: 1,
|
||||
page: 1,
|
||||
per_page: 100,
|
||||
}),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const items = [];
|
||||
for await (const item of wordpressPluginSource.fetchContent(
|
||||
{ type: "url", url: "https://example.com", token: "test-token" },
|
||||
{ postTypes: ["post"] },
|
||||
)) {
|
||||
items.push(item);
|
||||
}
|
||||
|
||||
expect(items).toHaveLength(2);
|
||||
expect(items[0]!.locale).toBe("en");
|
||||
expect(items[0]!.translationGroup).toBe("group-1");
|
||||
expect(items[1]!.locale).toBe("fr");
|
||||
expect(items[1]!.translationGroup).toBe("group-1");
|
||||
});
|
||||
|
||||
it("returns undefined locale/translationGroup when not present", async () => {
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify({
|
||||
items: [
|
||||
{
|
||||
id: 1,
|
||||
post_type: "post",
|
||||
status: "publish",
|
||||
slug: "hello",
|
||||
title: "Hello",
|
||||
content: "",
|
||||
excerpt: "",
|
||||
date: "2024-01-01T00:00:00",
|
||||
date_gmt: "2024-01-01T00:00:00",
|
||||
modified: "2024-01-01T00:00:00",
|
||||
modified_gmt: "2024-01-01T00:00:00",
|
||||
author: null,
|
||||
parent: null,
|
||||
menu_order: 0,
|
||||
taxonomies: {},
|
||||
meta: {},
|
||||
},
|
||||
],
|
||||
total: 1,
|
||||
pages: 1,
|
||||
page: 1,
|
||||
per_page: 100,
|
||||
}),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const items = [];
|
||||
for await (const item of wordpressPluginSource.fetchContent(
|
||||
{ type: "url", url: "https://example.com", token: "test-token" },
|
||||
{ postTypes: ["post"] },
|
||||
)) {
|
||||
items.push(item);
|
||||
}
|
||||
|
||||
expect(items).toHaveLength(1);
|
||||
expect(items[0]!.locale).toBeUndefined();
|
||||
expect(items[0]!.translationGroup).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,113 @@
|
||||
/**
|
||||
* Tests for WordPress import slug sanitization
|
||||
*
|
||||
* Regression test for emdash-cms/emdash#79: WordPress import crashes on
|
||||
* collections with hyphens in slug (e.g. Elementor `elementor-hf`).
|
||||
*
|
||||
* WordPress post type slugs commonly use hyphens (e.g. `elementor-hf`,
|
||||
* `my-custom-type`), but EmDash collection slugs require `[a-z][a-z0-9_]*`.
|
||||
* The fix sanitizes all unknown post type slugs so they conform to the
|
||||
* collection slug format, rather than trying to enumerate every plugin's
|
||||
* internal post types.
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import {
|
||||
mapPostTypeToCollection,
|
||||
sanitizeSlug,
|
||||
} from "../../../src/astro/routes/api/import/wordpress/analyze.js";
|
||||
|
||||
describe("sanitizeSlug", () => {
|
||||
it("replaces hyphens with underscores", () => {
|
||||
expect(sanitizeSlug("elementor-hf")).toBe("elementor_hf");
|
||||
});
|
||||
|
||||
it("replaces multiple hyphens", () => {
|
||||
expect(sanitizeSlug("my-custom-type")).toBe("my_custom_type");
|
||||
});
|
||||
|
||||
it("strips leading non-letter characters", () => {
|
||||
expect(sanitizeSlug("123abc")).toBe("abc");
|
||||
expect(sanitizeSlug("_foo")).toBe("foo");
|
||||
});
|
||||
|
||||
it("leaves valid slugs unchanged", () => {
|
||||
expect(sanitizeSlug("posts")).toBe("posts");
|
||||
expect(sanitizeSlug("my_type")).toBe("my_type");
|
||||
});
|
||||
|
||||
it("handles mixed invalid characters", () => {
|
||||
expect(sanitizeSlug("my.custom" as string)).toBe("my_custom");
|
||||
expect(sanitizeSlug("type with spaces" as string)).toBe("type_with_spaces");
|
||||
});
|
||||
|
||||
it("falls back to 'imported' when result would be empty", () => {
|
||||
expect(sanitizeSlug("123")).toBe("imported");
|
||||
expect(sanitizeSlug("---")).toBe("imported");
|
||||
expect(sanitizeSlug("_")).toBe("imported");
|
||||
expect(sanitizeSlug("")).toBe("imported");
|
||||
});
|
||||
|
||||
it("multiple degenerate slugs produce the same fallback (deduplicated during analysis)", () => {
|
||||
// These all collapse to "imported" — analyzeWxr appends _1, _2, etc.
|
||||
expect(sanitizeSlug("123")).toBe("imported");
|
||||
expect(sanitizeSlug("456")).toBe("imported");
|
||||
expect(sanitizeSlug("---")).toBe("imported");
|
||||
});
|
||||
|
||||
it("handles leading hyphens in realistic WP slugs", () => {
|
||||
expect(sanitizeSlug("-elementor-hf")).toBe("elementor_hf");
|
||||
});
|
||||
|
||||
it("lowercases uppercase letters instead of dropping them", () => {
|
||||
expect(sanitizeSlug("MyCustomType")).toBe("mycustomtype");
|
||||
expect(sanitizeSlug("MyPortfolio")).toBe("myportfolio");
|
||||
expect(sanitizeSlug("ALLCAPS")).toBe("allcaps");
|
||||
});
|
||||
|
||||
it("prefixes reserved collection slugs with wp_", () => {
|
||||
expect(sanitizeSlug("media")).toBe("wp_media");
|
||||
expect(sanitizeSlug("content")).toBe("wp_content");
|
||||
expect(sanitizeSlug("users")).toBe("wp_users");
|
||||
expect(sanitizeSlug("revisions")).toBe("wp_revisions");
|
||||
expect(sanitizeSlug("taxonomies")).toBe("wp_taxonomies");
|
||||
expect(sanitizeSlug("options")).toBe("wp_options");
|
||||
expect(sanitizeSlug("audit_logs")).toBe("wp_audit_logs");
|
||||
});
|
||||
});
|
||||
|
||||
describe("mapPostTypeToCollection", () => {
|
||||
it("maps known WordPress post types", () => {
|
||||
expect(mapPostTypeToCollection("post")).toBe("posts");
|
||||
expect(mapPostTypeToCollection("page")).toBe("pages");
|
||||
expect(mapPostTypeToCollection("product")).toBe("products");
|
||||
});
|
||||
|
||||
it("maps attachment to media (known mapping bypasses reserved check)", () => {
|
||||
expect(mapPostTypeToCollection("attachment")).toBe("media");
|
||||
});
|
||||
|
||||
it("sanitizes unknown post types with hyphens (fixes #79)", () => {
|
||||
expect(mapPostTypeToCollection("elementor-hf")).toBe("elementor_hf");
|
||||
expect(mapPostTypeToCollection("my-custom-type")).toBe("my_custom_type");
|
||||
});
|
||||
|
||||
it("sanitizes post types from other common plugins", () => {
|
||||
// WooCommerce
|
||||
expect(mapPostTypeToCollection("shop-order")).toBe("shop_order");
|
||||
// ACF
|
||||
expect(mapPostTypeToCollection("acf-field-group")).toBe("acf_field_group");
|
||||
});
|
||||
|
||||
it("passes through valid unknown post types unchanged", () => {
|
||||
expect(mapPostTypeToCollection("recipes")).toBe("recipes");
|
||||
expect(mapPostTypeToCollection("portfolio")).toBe("portfolio");
|
||||
});
|
||||
|
||||
it("prefixes reserved slugs that fall through to sanitize", () => {
|
||||
// "content" is not in the known mapping, so it hits sanitizeSlug
|
||||
expect(mapPostTypeToCollection("content")).toBe("wp_content");
|
||||
expect(mapPostTypeToCollection("users")).toBe("wp_users");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
/**
|
||||
* Regression test for #747: WordPress importer must clear the URL pattern
|
||||
* cache after creating new collections so that public routing immediately
|
||||
* resolves the new patterns. The original symptom of #747 (the execute
|
||||
* step reading a stale DB-persisted manifest) is no longer possible —
|
||||
* the manifest is built fresh per admin request and never cached — but
|
||||
* the URL pattern cache is still per-isolate, and prepare->execute
|
||||
* happens in two separate requests that may or may not share an isolate.
|
||||
*/
|
||||
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { POST } from "../../../src/astro/routes/api/import/wordpress/prepare.js";
|
||||
import { setupTestDatabase } from "../../utils/test-db.js";
|
||||
|
||||
function buildRequest(body: unknown): Request {
|
||||
return new Request("http://localhost/_emdash/api/import/wordpress/prepare", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-EmDash-Request": "1",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
function buildContext(emdash: any, user = { id: "test-user", role: 50 }) {
|
||||
return {
|
||||
request: buildRequest({
|
||||
postTypes: [
|
||||
{
|
||||
name: "tablepress_table",
|
||||
collection: "tablepress_table",
|
||||
fields: [{ slug: "title", label: "Title", type: "string", required: true }],
|
||||
},
|
||||
],
|
||||
}),
|
||||
locals: { emdash, user },
|
||||
};
|
||||
}
|
||||
|
||||
describe("POST /api/import/wordpress/prepare", () => {
|
||||
it("invalidates the URL pattern cache after creating a new collection (regression for #747)", async () => {
|
||||
const db = await setupTestDatabase();
|
||||
const invalidateUrlPatternCache = vi.fn();
|
||||
|
||||
const emdash = {
|
||||
db,
|
||||
handleContentCreate: vi.fn(),
|
||||
invalidateUrlPatternCache,
|
||||
};
|
||||
|
||||
const ctx = buildContext(emdash);
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion)
|
||||
const response = await POST(ctx as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(invalidateUrlPatternCache).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not invalidate the URL pattern cache when prepareImport makes no schema changes", async () => {
|
||||
const db = await setupTestDatabase();
|
||||
// Pre-create the collection so prepare finds nothing new to do.
|
||||
const { SchemaRegistry } = await import("../../../src/schema/registry.js");
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.createCollection({
|
||||
slug: "tablepress_table",
|
||||
label: "Tablepress Tables",
|
||||
labelSingular: "Tablepress Table",
|
||||
});
|
||||
await registry.createField("tablepress_table", {
|
||||
slug: "title",
|
||||
label: "Title",
|
||||
type: "string",
|
||||
});
|
||||
|
||||
const invalidateUrlPatternCache = vi.fn();
|
||||
const emdash = {
|
||||
db,
|
||||
handleContentCreate: vi.fn(),
|
||||
invalidateUrlPatternCache,
|
||||
};
|
||||
|
||||
const ctx = buildContext(emdash);
|
||||
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion)
|
||||
const response = await POST(ctx as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(invalidateUrlPatternCache).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
139
packages/core/tests/unit/import/wp-prepare-schema.test.ts
Normal file
139
packages/core/tests/unit/import/wp-prepare-schema.test.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Tests for WordPress import prepare schema validation
|
||||
*
|
||||
* Regression test for #167: wpPrepareBody schema defined fields as z.record()
|
||||
* but all producers (analyzer, admin UI) send an array of ImportFieldDef.
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { wpPrepareBody } from "../../../src/api/schemas/import.js";
|
||||
|
||||
describe("wpPrepareBody schema", () => {
|
||||
it("accepts fields as an array of ImportFieldDef objects", () => {
|
||||
const input = {
|
||||
postTypes: [
|
||||
{
|
||||
name: "post",
|
||||
collection: "posts",
|
||||
fields: [
|
||||
{
|
||||
slug: "content",
|
||||
label: "Content",
|
||||
type: "portableText",
|
||||
required: true,
|
||||
searchable: true,
|
||||
},
|
||||
{
|
||||
slug: "excerpt",
|
||||
label: "Excerpt",
|
||||
type: "text",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = wpPrepareBody.safeParse(input);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("accepts fields with optional searchable property", () => {
|
||||
const input = {
|
||||
postTypes: [
|
||||
{
|
||||
name: "page",
|
||||
collection: "pages",
|
||||
fields: [
|
||||
{
|
||||
slug: "featured_image",
|
||||
label: "Featured Image",
|
||||
type: "image",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = wpPrepareBody.safeParse(input);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("accepts postTypes without fields (optional)", () => {
|
||||
const input = {
|
||||
postTypes: [
|
||||
{
|
||||
name: "post",
|
||||
collection: "posts",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = wpPrepareBody.safeParse(input);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("rejects fields with missing required properties", () => {
|
||||
const input = {
|
||||
postTypes: [
|
||||
{
|
||||
name: "post",
|
||||
collection: "posts",
|
||||
fields: [
|
||||
{
|
||||
slug: "content",
|
||||
// missing label, type, required
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = wpPrepareBody.safeParse(input);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("accepts multiple postTypes with fields", () => {
|
||||
const input = {
|
||||
postTypes: [
|
||||
{
|
||||
name: "post",
|
||||
collection: "posts",
|
||||
fields: [
|
||||
{
|
||||
slug: "content",
|
||||
label: "Content",
|
||||
type: "portableText",
|
||||
required: true,
|
||||
searchable: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: "page",
|
||||
collection: "pages",
|
||||
fields: [
|
||||
{
|
||||
slug: "content",
|
||||
label: "Content",
|
||||
type: "portableText",
|
||||
required: true,
|
||||
searchable: true,
|
||||
},
|
||||
{
|
||||
slug: "featured_image",
|
||||
label: "Featured Image",
|
||||
type: "image",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const result = wpPrepareBody.safeParse(input);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
167
packages/core/tests/unit/import/wxr-date-handling.test.ts
Normal file
167
packages/core/tests/unit/import/wxr-date-handling.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Tests for WXR import date handling
|
||||
*
|
||||
* Verifies that wxrPostToNormalizedItem correctly preserves post dates
|
||||
* and publish status from WordPress exports.
|
||||
*
|
||||
* @see https://github.com/emdash-cms/emdash/issues/322
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import type { WxrPost } from "../../../src/cli/wxr/parser.js";
|
||||
import { wxrPostToNormalizedItem } from "../../../src/import/sources/wxr.js";
|
||||
|
||||
function makePost(overrides: Partial<WxrPost> = {}): WxrPost {
|
||||
return {
|
||||
categories: [],
|
||||
tags: [],
|
||||
meta: new Map(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("wxrPostToNormalizedItem date handling", () => {
|
||||
it("prefers postDateGmt over postDate for the date field", () => {
|
||||
const post = makePost({
|
||||
id: 1,
|
||||
title: "Test Post",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
postName: "test-post",
|
||||
// postDate is site-local time (no timezone), postDateGmt is UTC
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
postDateGmt: "2023-06-15 12:30:00",
|
||||
pubDate: "Thu, 15 Jun 2023 12:30:00 +0000",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
// Should use the GMT date, not the site-local date
|
||||
expect(item.date.toISOString()).toBe("2023-06-15T12:30:00.000Z");
|
||||
});
|
||||
|
||||
it("falls back to pubDate when postDateGmt is missing", () => {
|
||||
const post = makePost({
|
||||
id: 2,
|
||||
title: "Post without GMT date",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
postName: "no-gmt",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
pubDate: "Thu, 15 Jun 2023 12:30:00 +0000",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
// pubDate is RFC 2822 with timezone, should parse correctly to UTC
|
||||
expect(item.date.toISOString()).toBe("2023-06-15T12:30:00.000Z");
|
||||
});
|
||||
|
||||
it("falls back to postDate when both postDateGmt and pubDate are missing", () => {
|
||||
const post = makePost({
|
||||
id: 3,
|
||||
title: "Post with only local date",
|
||||
postType: "post",
|
||||
status: "draft",
|
||||
postName: "local-only",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
// postDate is site-local, parsed as-is (imprecise but best available)
|
||||
expect(item.date).toBeInstanceOf(Date);
|
||||
expect(item.date.getTime()).not.toBeNaN();
|
||||
});
|
||||
|
||||
it("defaults to current time when no dates are available", () => {
|
||||
const before = Date.now();
|
||||
const post = makePost({
|
||||
id: 4,
|
||||
title: "Post with no dates",
|
||||
postType: "post",
|
||||
status: "draft",
|
||||
postName: "no-dates",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
const after = Date.now();
|
||||
|
||||
expect(item.date.getTime()).toBeGreaterThanOrEqual(before);
|
||||
expect(item.date.getTime()).toBeLessThanOrEqual(after);
|
||||
});
|
||||
|
||||
it("ignores the WXR sentinel value '0000-00-00 00:00:00' for postDateGmt", () => {
|
||||
const post = makePost({
|
||||
id: 5,
|
||||
title: "Draft with zero GMT date",
|
||||
postType: "post",
|
||||
status: "draft",
|
||||
postName: "zero-gmt",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
postDateGmt: "0000-00-00 00:00:00",
|
||||
pubDate: "Thu, 15 Jun 2023 12:30:00 +0000",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
// Should NOT use the zero sentinel, should fall back to pubDate
|
||||
expect(item.date.toISOString()).toBe("2023-06-15T12:30:00.000Z");
|
||||
});
|
||||
|
||||
it("uses postModifiedGmt over postModified for the modified field", () => {
|
||||
const post = makePost({
|
||||
id: 6,
|
||||
title: "Modified Post",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
postName: "modified",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
postDateGmt: "2023-06-15 12:30:00",
|
||||
postModified: "2023-07-01 10:00:00",
|
||||
postModifiedGmt: "2023-07-01 14:00:00",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
expect(item.modified).toBeInstanceOf(Date);
|
||||
expect(item.modified!.toISOString()).toBe("2023-07-01T14:00:00.000Z");
|
||||
});
|
||||
|
||||
it("returns undefined for modified when no modified dates exist", () => {
|
||||
const post = makePost({
|
||||
id: 7,
|
||||
title: "Never Modified",
|
||||
postType: "post",
|
||||
status: "publish",
|
||||
postName: "never-modified",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
postDateGmt: "2023-06-15 12:30:00",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
expect(item.modified).toBeUndefined();
|
||||
});
|
||||
|
||||
it("skips sentinel '0000-00-00 00:00:00' for postModifiedGmt and falls back", () => {
|
||||
const post = makePost({
|
||||
id: 8,
|
||||
title: "Draft with zero modified GMT",
|
||||
postType: "post",
|
||||
status: "draft",
|
||||
postName: "zero-modified-gmt",
|
||||
postDate: "2023-06-15 08:30:00",
|
||||
postDateGmt: "2023-06-15 12:30:00",
|
||||
postModified: "2023-07-01 10:00:00",
|
||||
postModifiedGmt: "0000-00-00 00:00:00",
|
||||
});
|
||||
|
||||
const item = wxrPostToNormalizedItem(post, new Map());
|
||||
|
||||
// Should skip the sentinel and fall back to postModified
|
||||
expect(item.modified).toBeInstanceOf(Date);
|
||||
expect(item.modified!.getTime()).not.toBeNaN();
|
||||
});
|
||||
});
|
||||
224
packages/core/tests/unit/loader-cursor-pagination.test.ts
Normal file
224
packages/core/tests/unit/loader-cursor-pagination.test.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { handleContentCreate } from "../../src/api/index.js";
|
||||
import { decodeCursor } from "../../src/database/repositories/types.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
import { emdashLoader } from "../../src/loader.js";
|
||||
import { runWithContext } from "../../src/request-context.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../utils/test-db.js";
|
||||
|
||||
describe("Loader cursor pagination", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
async function createPublishedPost(title: string) {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title },
|
||||
status: "published",
|
||||
});
|
||||
if (!result.success) throw new Error("Failed to create post");
|
||||
return result.data!.item;
|
||||
}
|
||||
|
||||
it("should return nextCursor when there are more results", async () => {
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post", limit: 3 } }),
|
||||
);
|
||||
|
||||
expect(result.entries).toHaveLength(3);
|
||||
expect(result.nextCursor).toBeTruthy();
|
||||
|
||||
// Verify the cursor is a valid encoded cursor
|
||||
const decoded = decodeCursor(result.nextCursor!);
|
||||
expect(decoded).not.toBeNull();
|
||||
expect(decoded!.orderValue).toBeTruthy();
|
||||
expect(decoded!.id).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should not return nextCursor when all results fit in one page", async () => {
|
||||
await createPublishedPost("Post 1");
|
||||
await createPublishedPost("Post 2");
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post", limit: 10 } }),
|
||||
);
|
||||
|
||||
expect(result.entries).toHaveLength(2);
|
||||
expect(result.nextCursor).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should not return nextCursor when no limit is set", async () => {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
expect(result.entries).toHaveLength(3);
|
||||
expect(result.nextCursor).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should paginate through all results using cursor", async () => {
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
|
||||
// First page
|
||||
const page1 = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post", limit: 2 } }),
|
||||
);
|
||||
expect(page1.entries).toHaveLength(2);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Second page
|
||||
const page2 = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({
|
||||
filter: { type: "post", limit: 2, cursor: page1.nextCursor },
|
||||
}),
|
||||
);
|
||||
expect(page2.entries).toHaveLength(2);
|
||||
expect(page2.nextCursor).toBeTruthy();
|
||||
|
||||
// Third page (last item)
|
||||
const page3 = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({
|
||||
filter: { type: "post", limit: 2, cursor: page2.nextCursor },
|
||||
}),
|
||||
);
|
||||
expect(page3.entries).toHaveLength(1);
|
||||
expect(page3.nextCursor).toBeUndefined();
|
||||
|
||||
// Verify no overlap between pages
|
||||
const allIds = [
|
||||
...page1.entries!.map((e) => e.data.id),
|
||||
...page2.entries!.map((e) => e.data.id),
|
||||
...page3.entries!.map((e) => e.data.id),
|
||||
];
|
||||
const uniqueIds = new Set(allIds);
|
||||
expect(uniqueIds.size).toBe(5);
|
||||
});
|
||||
|
||||
it("should maintain sort order across pages", async () => {
|
||||
// Create posts with different titles to test ascending sort
|
||||
const titles = ["Delta", "Alpha", "Echo", "Bravo", "Charlie"];
|
||||
for (const title of titles) {
|
||||
await createPublishedPost(title);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
|
||||
// Paginate with ascending title order
|
||||
const allEntries: Array<{ data: Record<string, unknown> }> = [];
|
||||
let cursor: string | undefined;
|
||||
|
||||
for (let page = 0; page < 10; page++) {
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({
|
||||
filter: {
|
||||
type: "post",
|
||||
limit: 2,
|
||||
cursor,
|
||||
orderBy: { title: "asc" },
|
||||
},
|
||||
}),
|
||||
);
|
||||
allEntries.push(...result.entries!);
|
||||
cursor = result.nextCursor;
|
||||
if (!cursor) break;
|
||||
}
|
||||
|
||||
expect(allEntries).toHaveLength(5);
|
||||
const sortedTitles = allEntries.map((e) => e.data.title);
|
||||
expect(sortedTitles).toEqual(["Alpha", "Bravo", "Charlie", "Delta", "Echo"]);
|
||||
});
|
||||
|
||||
it("should return empty entries with no nextCursor for empty collection", async () => {
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post", limit: 10 } }),
|
||||
);
|
||||
|
||||
expect(result.entries).toHaveLength(0);
|
||||
expect(result.nextCursor).toBeUndefined();
|
||||
});
|
||||
|
||||
it("should reject invalid cursor with a clear error", async () => {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
|
||||
// Invalid cursors now fail loud rather than silently re-fetching the
|
||||
// first page. The loader catches `InvalidCursorError` from
|
||||
// `decodeCursor` and surfaces it via the loader-result envelope.
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({
|
||||
filter: { type: "post", limit: 10, cursor: "not-a-valid-cursor" },
|
||||
}),
|
||||
);
|
||||
|
||||
expect((result as { error?: Error }).error?.message).toMatch(/Invalid pagination cursor/);
|
||||
});
|
||||
|
||||
it("should work with limit of 1", async () => {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
const allEntries: Array<{ data: Record<string, unknown> }> = [];
|
||||
let cursor: string | undefined;
|
||||
|
||||
// Page through one at a time
|
||||
for (let page = 0; page < 10; page++) {
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({
|
||||
filter: { type: "post", limit: 1, cursor },
|
||||
}),
|
||||
);
|
||||
allEntries.push(...result.entries!);
|
||||
cursor = result.nextCursor;
|
||||
if (!cursor) break;
|
||||
}
|
||||
|
||||
expect(allEntries).toHaveLength(3);
|
||||
const uniqueIds = new Set(allEntries.map((e) => e.data.id));
|
||||
expect(uniqueIds.size).toBe(3);
|
||||
});
|
||||
|
||||
it("should include nextCursor in collection-level return alongside cacheHint", async () => {
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
await createPublishedPost(`Post ${i}`);
|
||||
}
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post", limit: 2 } }),
|
||||
);
|
||||
|
||||
// Both cacheHint and nextCursor should be present
|
||||
expect(result.cacheHint).toBeDefined();
|
||||
expect(result.cacheHint!.tags).toEqual(["post"]);
|
||||
expect(result.nextCursor).toBeTruthy();
|
||||
});
|
||||
});
|
||||
129
packages/core/tests/unit/loader-media-src.test.ts
Normal file
129
packages/core/tests/unit/loader-media-src.test.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { handleContentCreate } from "../../src/api/index.js";
|
||||
import { RevisionRepository } from "../../src/database/repositories/revision.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
import { emdashLoader } from "../../src/loader.js";
|
||||
import { runWithContext } from "../../src/request-context.js";
|
||||
import { SchemaRegistry } from "../../src/schema/registry.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../utils/test-db.js";
|
||||
|
||||
const MEDIA_ID = "01KPD97MWB5DVHBHK69TW55KY3";
|
||||
const MEDIA_URL = `/_emdash/api/media/file/${MEDIA_ID}`;
|
||||
|
||||
describe("Loader media src", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
const registry = new SchemaRegistry(db);
|
||||
await registry.createField("post", {
|
||||
slug: "hero",
|
||||
label: "Hero",
|
||||
type: "image",
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
it("resolves bare media ID in src to a file URL", async () => {
|
||||
await handleContentCreate(db, "post", {
|
||||
data: {
|
||||
title: "Test",
|
||||
hero: { provider: "local", id: "", src: MEDIA_ID },
|
||||
},
|
||||
status: "published",
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
const hero = result.entries![0]!.data.hero as Record<string, unknown>;
|
||||
expect(hero.id).toBe(MEDIA_ID);
|
||||
expect(hero.src).toBe(MEDIA_URL);
|
||||
});
|
||||
|
||||
it("resolves bare media ID in loadEntry", async () => {
|
||||
await handleContentCreate(db, "post", {
|
||||
data: {
|
||||
title: "Test",
|
||||
hero: { provider: "local", id: "", src: MEDIA_ID },
|
||||
},
|
||||
status: "published",
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const collection = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
const entryId = collection.entries![0]!.data.id as string;
|
||||
|
||||
const entry = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadEntry!({ filter: { type: "post", id: entryId } }),
|
||||
);
|
||||
|
||||
const hero = entry!.data.hero as Record<string, unknown>;
|
||||
expect(hero.id).toBe(MEDIA_ID);
|
||||
expect(hero.src).toBe(MEDIA_URL);
|
||||
});
|
||||
|
||||
it("does not rewrite an existing local media URL", async () => {
|
||||
await handleContentCreate(db, "post", {
|
||||
data: {
|
||||
title: "Test",
|
||||
hero: { provider: "local", id: "", src: MEDIA_URL },
|
||||
},
|
||||
status: "published",
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const result = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadCollection!({ filter: { type: "post" } }),
|
||||
);
|
||||
|
||||
const hero = result.entries![0]!.data.hero as Record<string, unknown>;
|
||||
expect(hero.id).toBe(MEDIA_ID);
|
||||
expect(hero.src).toBe(MEDIA_URL);
|
||||
});
|
||||
|
||||
it("resolves bare media ID in revision-backed loadEntry", async () => {
|
||||
const createResult = await handleContentCreate(db, "post", {
|
||||
data: {
|
||||
title: "Published",
|
||||
hero: { provider: "local", id: "", src: MEDIA_URL },
|
||||
},
|
||||
status: "published",
|
||||
});
|
||||
if (!createResult.success) throw new Error("Failed to create post");
|
||||
|
||||
const revisionRepo = new RevisionRepository(db);
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: createResult.data!.item.id,
|
||||
data: {
|
||||
title: "Draft",
|
||||
hero: { provider: "local", id: "", src: MEDIA_ID },
|
||||
},
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const entry = await runWithContext({ editMode: false, db }, () =>
|
||||
loader.loadEntry!({
|
||||
filter: {
|
||||
type: "post",
|
||||
id: createResult.data!.item.id,
|
||||
revisionId: revision.id,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const hero = entry!.data.hero as Record<string, unknown>;
|
||||
expect(hero.id).toBe(MEDIA_ID);
|
||||
expect(hero.src).toBe(MEDIA_URL);
|
||||
});
|
||||
});
|
||||
121
packages/core/tests/unit/loader-revision-preview.test.ts
Normal file
121
packages/core/tests/unit/loader-revision-preview.test.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { handleContentCreate } from "../../src/api/index.js";
|
||||
import { ContentRepository } from "../../src/database/repositories/content.js";
|
||||
import { RevisionRepository } from "../../src/database/repositories/revision.js";
|
||||
import type { Database } from "../../src/database/types.js";
|
||||
import { emdashLoader } from "../../src/loader.js";
|
||||
import { runWithContext } from "../../src/request-context.js";
|
||||
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../utils/test-db.js";
|
||||
|
||||
describe("Loader revision preview", () => {
|
||||
let db: Kysely<Database>;
|
||||
let revisionRepo: RevisionRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
db = await setupTestDatabaseWithCollections();
|
||||
revisionRepo = new RevisionRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await teardownTestDatabase(db);
|
||||
});
|
||||
|
||||
async function createPublishedPost(title: string) {
|
||||
const result = await handleContentCreate(db, "post", {
|
||||
data: { title },
|
||||
status: "published",
|
||||
});
|
||||
if (!result.success) throw new Error("Failed to create post");
|
||||
return result.data!.item;
|
||||
}
|
||||
|
||||
it("should return Date objects for system date fields in revision preview", async () => {
|
||||
const post = await createPublishedPost("Test Post");
|
||||
|
||||
// Publish the post to set published_at
|
||||
const contentRepo = new ContentRepository(db);
|
||||
await contentRepo.publish("post", post.id);
|
||||
|
||||
// Create a revision (simulating a draft edit)
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post.id,
|
||||
data: { title: "Draft Title" },
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const slug = post.slug!;
|
||||
const result = await runWithContext({ editMode: true, db }, () =>
|
||||
loader.loadEntry!({ filter: { type: "post", id: slug, revisionId: revision.id } }),
|
||||
);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toHaveProperty("error");
|
||||
const data = (result as { data: Record<string, unknown> }).data;
|
||||
|
||||
// These must be Date objects, not ISO strings
|
||||
expect(data.createdAt).toBeInstanceOf(Date);
|
||||
expect(data.updatedAt).toBeInstanceOf(Date);
|
||||
expect(data.publishedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should return null for unpopulated date fields in revision preview", async () => {
|
||||
// Create a draft post (no publishedAt)
|
||||
const createResult = await handleContentCreate(db, "post", {
|
||||
data: { title: "Draft Post" },
|
||||
status: "draft",
|
||||
});
|
||||
if (!createResult.success) throw new Error("Failed to create post");
|
||||
const post = createResult.data!.item;
|
||||
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post.id,
|
||||
data: { title: "Updated Draft" },
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const slug = post.slug!;
|
||||
const entry = await runWithContext({ editMode: true, db }, () =>
|
||||
loader.loadEntry!({ filter: { type: "post", id: slug, revisionId: revision.id } }),
|
||||
);
|
||||
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry).not.toHaveProperty("error");
|
||||
const data = (entry as { data: Record<string, unknown> }).data;
|
||||
|
||||
// Draft posts have no publishedAt
|
||||
expect(data.publishedAt).toBeNull();
|
||||
// But createdAt and updatedAt should still be Date objects
|
||||
expect(data.createdAt).toBeInstanceOf(Date);
|
||||
expect(data.updatedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it("should use revision content fields while preserving system date types", async () => {
|
||||
const post = await createPublishedPost("Original Title");
|
||||
|
||||
const revision = await revisionRepo.create({
|
||||
collection: "post",
|
||||
entryId: post.id,
|
||||
data: { title: "Revised Title" },
|
||||
});
|
||||
|
||||
const loader = emdashLoader();
|
||||
const slug = post.slug!;
|
||||
const entry = await runWithContext({ editMode: true, db }, () =>
|
||||
loader.loadEntry!({ filter: { type: "post", id: slug, revisionId: revision.id } }),
|
||||
);
|
||||
|
||||
expect(entry).toBeDefined();
|
||||
expect(entry).not.toHaveProperty("error");
|
||||
const data = (entry as { data: Record<string, unknown> }).data;
|
||||
|
||||
// Content from revision
|
||||
expect(data.title).toBe("Revised Title");
|
||||
// System dates from content table, as Date objects
|
||||
expect(data.createdAt).toBeInstanceOf(Date);
|
||||
expect(data.updatedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
});
|
||||
1133
packages/core/tests/unit/mcp/authorization.test.ts
Normal file
1133
packages/core/tests/unit/mcp/authorization.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
420
packages/core/tests/unit/media/normalize.test.ts
Normal file
420
packages/core/tests/unit/media/normalize.test.ts
Normal file
@@ -0,0 +1,420 @@
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { normalizeMediaValue } from "../../../src/media/normalize.js";
|
||||
import type { MediaProvider, MediaProviderItem } from "../../../src/media/types.js";
|
||||
|
||||
function mockProvider(getResult: MediaProviderItem | null = null): MediaProvider {
|
||||
return {
|
||||
list: vi.fn().mockResolvedValue({ items: [], nextCursor: undefined }),
|
||||
get: vi.fn().mockResolvedValue(getResult),
|
||||
getEmbed: vi.fn().mockReturnValue({ type: "image", src: "/test" }),
|
||||
};
|
||||
}
|
||||
|
||||
function getProvider(
|
||||
providers: Record<string, MediaProvider>,
|
||||
): (id: string) => MediaProvider | undefined {
|
||||
return (id: string) => providers[id];
|
||||
}
|
||||
|
||||
describe("normalizeMediaValue", () => {
|
||||
it("returns null for null input", async () => {
|
||||
const result = await normalizeMediaValue(null, getProvider({}));
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for undefined input", async () => {
|
||||
const result = await normalizeMediaValue(undefined, getProvider({}));
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("converts bare HTTP URL to external MediaValue", async () => {
|
||||
const result = await normalizeMediaValue("https://example.com/photo.jpg", getProvider({}));
|
||||
expect(result).toEqual({
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "https://example.com/photo.jpg",
|
||||
});
|
||||
});
|
||||
|
||||
it("converts bare HTTPS URL to external MediaValue", async () => {
|
||||
const result = await normalizeMediaValue("http://example.com/photo.jpg", getProvider({}));
|
||||
expect(result).toEqual({
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "http://example.com/photo.jpg",
|
||||
});
|
||||
});
|
||||
|
||||
it("converts bare internal media URL to full local MediaValue via provider", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
alt: "A photo",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
"/_emdash/api/media/file/01ABC.jpg",
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(local.get).toHaveBeenCalledWith("01ABC.jpg");
|
||||
expect(result).toEqual({
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
alt: "A photo",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
});
|
||||
});
|
||||
|
||||
it("falls back to external for internal URL when local provider unavailable", async () => {
|
||||
const result = await normalizeMediaValue("/_emdash/api/media/file/01ABC.jpg", getProvider({}));
|
||||
expect(result).toEqual({
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "/_emdash/api/media/file/01ABC.jpg",
|
||||
});
|
||||
});
|
||||
|
||||
it("falls back to external for internal URL when provider.get returns null", async () => {
|
||||
const local = mockProvider(null);
|
||||
const result = await normalizeMediaValue(
|
||||
"/_emdash/api/media/file/01ABC.jpg",
|
||||
getProvider({ local }),
|
||||
);
|
||||
expect(result).toEqual({
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "/_emdash/api/media/file/01ABC.jpg",
|
||||
});
|
||||
});
|
||||
|
||||
it("fills missing dimensions from local provider", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
alt: "My photo",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(local.get).toHaveBeenCalledWith("01ABC");
|
||||
expect(result).toMatchObject({
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
alt: "My photo",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
});
|
||||
});
|
||||
|
||||
it("fills missing storageKey from local provider", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(local.get).toHaveBeenCalledWith("01ABC");
|
||||
expect(result).toMatchObject({
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
});
|
||||
});
|
||||
|
||||
it("fills missing mimeType and filename from local provider", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(result).toMatchObject({
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
});
|
||||
});
|
||||
|
||||
it("fills dimensions from external provider", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "cf-abc123",
|
||||
filename: "hero.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
meta: { variants: ["public"] },
|
||||
};
|
||||
const cfImages = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "cloudflare-images",
|
||||
id: "cf-abc123",
|
||||
alt: "Hero banner",
|
||||
previewUrl: "https://imagedelivery.net/hash/cf-abc123/w=400",
|
||||
},
|
||||
getProvider({ "cloudflare-images": cfImages }),
|
||||
);
|
||||
|
||||
expect(cfImages.get).toHaveBeenCalledWith("cf-abc123");
|
||||
expect(result).toMatchObject({
|
||||
provider: "cloudflare-images",
|
||||
id: "cf-abc123",
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
alt: "Hero banner",
|
||||
previewUrl: "https://imagedelivery.net/hash/cf-abc123/w=400",
|
||||
});
|
||||
});
|
||||
|
||||
it("does not call provider when dimensions already present", async () => {
|
||||
const cfImages = mockProvider(null);
|
||||
|
||||
const value = {
|
||||
provider: "cloudflare-images",
|
||||
id: "cf-abc123",
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
filename: "hero.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
alt: "Hero banner",
|
||||
previewUrl: "https://imagedelivery.net/hash/cf-abc123/w=400",
|
||||
meta: { variants: ["public"] },
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({ "cloudflare-images": cfImages }));
|
||||
|
||||
expect(cfImages.get).not.toHaveBeenCalled();
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("preserves caller alt over provider alt", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
alt: "Provider alt text",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
alt: "User alt text",
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(result!.alt).toBe("User alt text");
|
||||
});
|
||||
|
||||
it("uses provider alt when caller alt is not set", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
alt: "Provider alt text",
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
expect(result!.alt).toBe("Provider alt text");
|
||||
});
|
||||
|
||||
it("returns value as-is for unknown provider", async () => {
|
||||
const value = {
|
||||
provider: "some-unknown-provider",
|
||||
id: "item-123",
|
||||
width: 800,
|
||||
height: 600,
|
||||
alt: "Some image",
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({}));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("does not fail when provider.get returns null", async () => {
|
||||
const local = mockProvider(null);
|
||||
|
||||
const value = {
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
alt: "My photo",
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({ local }));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("does not fail when provider has no get method", async () => {
|
||||
const local: MediaProvider = {
|
||||
list: vi.fn().mockResolvedValue({ items: [] }),
|
||||
getEmbed: vi.fn().mockReturnValue({ type: "image", src: "/test" }),
|
||||
// no get method
|
||||
};
|
||||
|
||||
const value = {
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
alt: "My photo",
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({ local }));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("returns external value with src as-is (no dimension detection)", async () => {
|
||||
const value = {
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "https://example.com/photo.jpg",
|
||||
alt: "A photo",
|
||||
width: 800,
|
||||
height: 600,
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({}));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("does not call provider for external values without dimensions", async () => {
|
||||
const value = {
|
||||
provider: "external",
|
||||
id: "",
|
||||
src: "https://example.com/photo.jpg",
|
||||
alt: "A photo",
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({}));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
|
||||
it("strips src from local media values", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue(
|
||||
{
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
src: "/_emdash/api/media/file/01ABC.jpg",
|
||||
alt: "My photo",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
},
|
||||
getProvider({ local }),
|
||||
);
|
||||
|
||||
// src should be removed for local media - it's derived at display time
|
||||
expect(result!.src).toBeUndefined();
|
||||
});
|
||||
|
||||
it("defaults provider to local when not specified", async () => {
|
||||
const providerItem: MediaProviderItem = {
|
||||
id: "01ABC",
|
||||
filename: "photo.jpg",
|
||||
mimeType: "image/jpeg",
|
||||
width: 1200,
|
||||
height: 800,
|
||||
meta: { storageKey: "01ABC.jpg" },
|
||||
};
|
||||
const local = mockProvider(providerItem);
|
||||
|
||||
const result = await normalizeMediaValue({ id: "01ABC" }, getProvider({ local }));
|
||||
|
||||
expect(result!.provider).toBe("local");
|
||||
expect(local.get).toHaveBeenCalledWith("01ABC");
|
||||
});
|
||||
|
||||
it("handles provider.get throwing gracefully", async () => {
|
||||
const local: MediaProvider = {
|
||||
list: vi.fn().mockResolvedValue({ items: [] }),
|
||||
get: vi.fn().mockRejectedValue(new Error("DB error")),
|
||||
getEmbed: vi.fn().mockReturnValue({ type: "image", src: "/test" }),
|
||||
};
|
||||
|
||||
const value = {
|
||||
provider: "local",
|
||||
id: "01ABC",
|
||||
alt: "My photo",
|
||||
};
|
||||
|
||||
const result = await normalizeMediaValue(value, getProvider({ local }));
|
||||
expect(result).toEqual(value);
|
||||
});
|
||||
});
|
||||
143
packages/core/tests/unit/media/placeholder.test.ts
Normal file
143
packages/core/tests/unit/media/placeholder.test.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { generatePlaceholder } from "../../../src/media/placeholder.js";
|
||||
|
||||
const CSS_RGB_PATTERN = /^rgb\(\d+,\s?\d+,\s?\d+\)$/;
|
||||
|
||||
/** Minimal 4x4 solid red JPEG */
|
||||
const JPEG_4x4 = Buffer.from(
|
||||
"/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/2wBDAQMDAwQDBAgEBAgQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/wAARCAAEAAQDAREAAhEBAxEB/8QAFAABAAAAAAAAAAAAAAAAAAAACP/EABQQAQAAAAAAAAAAAAAAAAAAAAD/xAAVAQEBAAAAAAAAAAAAAAAAAAAHCf/EABQRAQAAAAAAAAAAAAAAAAAAAAD/2gAMAwEAAhEDEQA/ADoDFU3/2Q==",
|
||||
"base64",
|
||||
);
|
||||
|
||||
/** Minimal 4x4 solid red PNG */
|
||||
const PNG_4x4 = Buffer.from(
|
||||
"iVBORw0KGgoAAAANSUhEUgAAAAQAAAAEAQMAAACTPww9AAAAIGNIUk0AAHomAACAhAAA+gAAAIDoAAB1MAAA6mAAADqYAAAXcJy6UTwAAAAGUExURf8AAP///0EdNBEAAAABYktHRAH/Ai3eAAAAB3RJTUUH6gIcETMVn1ZhnwAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAyNi0wMi0yOFQxNzo1MToyMCswMDowMJE6EiQAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMjYtMDItMjhUMTc6NTE6MjArMDA6MDDgZ6qYAAAAKHRFWHRkYXRlOnRpbWVzdGFtcAAyMDI2LTAyLTI4VDE3OjUxOjIwKzAwOjAwt3KLRwAAAAtJREFUCNdjYIAAAAAIAAEvIN0xAAAAAElFTkSuQmCC",
|
||||
"base64",
|
||||
);
|
||||
|
||||
/** 100x100 solid blue JPEG (for downsampling test) */
|
||||
const JPEG_100x100 = Buffer.from(
|
||||
"/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/2wBDAQMDAwQDBAgEBAgQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/wAARCABkAGQDAREAAhEBAxEB/8QAFQABAQAAAAAAAAAAAAAAAAAAAAn/xAAUEAEAAAAAAAAAAAAAAAAAAAAA/8QAFgEBAQEAAAAAAAAAAAAAAAAAAAYJ/8QAFBEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEQMRAD8Anu1TQ4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//2Q==",
|
||||
"base64",
|
||||
);
|
||||
|
||||
describe("generatePlaceholder", () => {
|
||||
it("generates blurhash and dominantColor from a JPEG", async () => {
|
||||
const result = await generatePlaceholder(new Uint8Array(JPEG_4x4), "image/jpeg");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.blurhash).toBeTruthy();
|
||||
expect(typeof result!.blurhash).toBe("string");
|
||||
expect(result!.dominantColor).toBeTruthy();
|
||||
expect(typeof result!.dominantColor).toBe("string");
|
||||
});
|
||||
|
||||
it("generates blurhash and dominantColor from a PNG", async () => {
|
||||
const result = await generatePlaceholder(new Uint8Array(PNG_4x4), "image/png");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.blurhash).toBeTruthy();
|
||||
expect(result!.dominantColor).toBeTruthy();
|
||||
});
|
||||
|
||||
it("returns a valid CSS color string for dominantColor", async () => {
|
||||
const result = await generatePlaceholder(new Uint8Array(JPEG_4x4), "image/jpeg");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
// Should be rgb() format from rgbColorToCssString
|
||||
expect(result!.dominantColor).toMatch(CSS_RGB_PATTERN);
|
||||
});
|
||||
|
||||
it("returns null for non-image MIME types", async () => {
|
||||
const buffer = new Uint8Array([0, 1, 2, 3]);
|
||||
const result = await generatePlaceholder(buffer, "application/pdf");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for unsupported image types", async () => {
|
||||
const buffer = new Uint8Array([0, 1, 2, 3]);
|
||||
const result = await generatePlaceholder(buffer, "image/svg+xml");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null for corrupt image data", async () => {
|
||||
const buffer = new Uint8Array([0xff, 0xd8, 0xff, 0xe0, 0, 0, 0]);
|
||||
const result = await generatePlaceholder(buffer, "image/jpeg");
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("handles larger images by downsampling", async () => {
|
||||
const result = await generatePlaceholder(new Uint8Array(JPEG_100x100), "image/jpeg");
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.blurhash).toBeTruthy();
|
||||
// Blurhash string length should be reasonable (not huge from 100x100)
|
||||
expect(result!.blurhash.length).toBeLessThan(50);
|
||||
});
|
||||
|
||||
it("returns null when image dimensions from headers exceed memory budget", async () => {
|
||||
// Minimal valid JPEG with SOF0 declaring 5000x4000 dimensions.
|
||||
// SOF0 marker (FFC0) stores height (2 bytes) then width (2 bytes).
|
||||
// 5000×4000×4 = 80 MB > 32 MB threshold.
|
||||
const sof0 = new Uint8Array([
|
||||
0xff,
|
||||
0xd8, // SOI
|
||||
0xff,
|
||||
0xe0,
|
||||
0x00,
|
||||
0x10, // APP0 marker + length
|
||||
0x4a,
|
||||
0x46,
|
||||
0x49,
|
||||
0x46,
|
||||
0x00, // "JFIF\0"
|
||||
0x01,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x01,
|
||||
0x00,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00, // JFIF fields
|
||||
0xff,
|
||||
0xc0,
|
||||
0x00,
|
||||
0x0b, // SOF0 marker + length
|
||||
0x08, // precision
|
||||
0x0f,
|
||||
0xa0, // height = 4000
|
||||
0x13,
|
||||
0x88, // width = 5000
|
||||
0x01, // number of components
|
||||
0x01,
|
||||
0x11,
|
||||
0x00, // component
|
||||
]);
|
||||
const result = await generatePlaceholder(sof0, "image/jpeg");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("returns null when fallback dimensions exceed memory budget", async () => {
|
||||
// Unrecognizable buffer — image-size can't parse it, so fallback dims are used
|
||||
const buffer = new Uint8Array([0x00, 0x01, 0x02, 0x03]);
|
||||
const result = await generatePlaceholder(buffer, "image/jpeg", {
|
||||
width: 5000,
|
||||
height: 4000,
|
||||
});
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("still generates placeholder for small images with dimensions param", async () => {
|
||||
const result = await generatePlaceholder(new Uint8Array(JPEG_4x4), "image/jpeg", {
|
||||
width: 4,
|
||||
height: 4,
|
||||
});
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.blurhash).toBeTruthy();
|
||||
});
|
||||
});
|
||||
57
packages/core/tests/unit/media/thumbnail.test.ts
Normal file
57
packages/core/tests/unit/media/thumbnail.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { THUMBNAIL_MAX_DIMENSION, computeThumbnailSize } from "../../../src/media/thumbnail.js";
|
||||
|
||||
describe("computeThumbnailSize", () => {
|
||||
it("scales a square image to the max dimension", () => {
|
||||
expect(computeThumbnailSize(5000, 5000)).toEqual({
|
||||
width: THUMBNAIL_MAX_DIMENSION,
|
||||
height: THUMBNAIL_MAX_DIMENSION,
|
||||
});
|
||||
});
|
||||
|
||||
it("scales a wide image to fit within the bounding box", () => {
|
||||
const result = computeThumbnailSize(4000, 2000);
|
||||
expect(result.width).toBe(THUMBNAIL_MAX_DIMENSION);
|
||||
expect(result.height).toBe(THUMBNAIL_MAX_DIMENSION / 2);
|
||||
});
|
||||
|
||||
it("scales a tall image to fit within the bounding box", () => {
|
||||
const result = computeThumbnailSize(2000, 4000);
|
||||
expect(result.width).toBe(THUMBNAIL_MAX_DIMENSION / 2);
|
||||
expect(result.height).toBe(THUMBNAIL_MAX_DIMENSION);
|
||||
});
|
||||
|
||||
it("clamps extreme tall aspect ratios to the bounding box", () => {
|
||||
// Without clamping, naive code would produce a 64×537600 canvas.
|
||||
const result = computeThumbnailSize(100, 840_000);
|
||||
expect(result.width).toBeLessThanOrEqual(THUMBNAIL_MAX_DIMENSION);
|
||||
expect(result.height).toBeLessThanOrEqual(THUMBNAIL_MAX_DIMENSION);
|
||||
expect(result.width).toBeGreaterThanOrEqual(1);
|
||||
expect(result.height).toBe(THUMBNAIL_MAX_DIMENSION);
|
||||
});
|
||||
|
||||
it("clamps extreme wide aspect ratios to the bounding box", () => {
|
||||
const result = computeThumbnailSize(840_000, 100);
|
||||
expect(result.width).toBe(THUMBNAIL_MAX_DIMENSION);
|
||||
expect(result.height).toBeGreaterThanOrEqual(1);
|
||||
expect(result.height).toBeLessThanOrEqual(THUMBNAIL_MAX_DIMENSION);
|
||||
});
|
||||
|
||||
it("never upscales smaller images", () => {
|
||||
expect(computeThumbnailSize(10, 20)).toEqual({ width: 10, height: 20 });
|
||||
expect(computeThumbnailSize(1, 1)).toEqual({ width: 1, height: 1 });
|
||||
});
|
||||
|
||||
it("returns a 1x1 fallback for zero or negative dimensions", () => {
|
||||
expect(computeThumbnailSize(0, 100)).toEqual({ width: 1, height: 1 });
|
||||
expect(computeThumbnailSize(100, 0)).toEqual({ width: 1, height: 1 });
|
||||
expect(computeThumbnailSize(-5, 10)).toEqual({ width: 1, height: 1 });
|
||||
});
|
||||
|
||||
it("rounds fractional dimensions", () => {
|
||||
const result = computeThumbnailSize(300, 199);
|
||||
expect(Number.isInteger(result.width)).toBe(true);
|
||||
expect(Number.isInteger(result.height)).toBe(true);
|
||||
});
|
||||
});
|
||||
133
packages/core/tests/unit/media/url.test.ts
Normal file
133
packages/core/tests/unit/media/url.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
buildRenderMediaUrl,
|
||||
createPublicMediaUrlResolver,
|
||||
resolvePublicMediaUrl,
|
||||
} from "../../../src/media/url.js";
|
||||
import type { Storage } from "../../../src/storage/types.js";
|
||||
|
||||
function storageWith(publicUrl: string): Storage {
|
||||
return {
|
||||
upload: async () => ({ key: "", url: "", size: 0 }),
|
||||
download: async () => {
|
||||
throw new Error("not used");
|
||||
},
|
||||
delete: async () => {},
|
||||
exists: async () => true,
|
||||
list: async () => ({ files: [] }),
|
||||
getSignedUploadUrl: async () => {
|
||||
throw new Error("not used");
|
||||
},
|
||||
getPublicUrl: (key) => `${publicUrl}/${key}`,
|
||||
};
|
||||
}
|
||||
|
||||
describe("resolvePublicMediaUrl", () => {
|
||||
it("returns an empty string when storageKey is empty", () => {
|
||||
expect(resolvePublicMediaUrl(null, "")).toBe("");
|
||||
});
|
||||
|
||||
it("uses the proxied media endpoint when no storage is provided", () => {
|
||||
expect(resolvePublicMediaUrl(null, "01ABC.jpg")).toBe("/_emdash/api/media/file/01ABC.jpg");
|
||||
});
|
||||
|
||||
it("uses storage.getPublicUrl when a storage adapter is provided", () => {
|
||||
const storage = storageWith("https://media.example.com");
|
||||
expect(resolvePublicMediaUrl(storage, "01ABC.jpg")).toBe("https://media.example.com/01ABC.jpg");
|
||||
});
|
||||
});
|
||||
|
||||
describe("createPublicMediaUrlResolver", () => {
|
||||
it("returns a closure that reuses the storage adapter", () => {
|
||||
const resolver = createPublicMediaUrlResolver(storageWith("https://media.example.com"));
|
||||
expect(resolver("01ABC.jpg")).toBe("https://media.example.com/01ABC.jpg");
|
||||
expect(resolver("01XYZ.png")).toBe("https://media.example.com/01XYZ.png");
|
||||
});
|
||||
|
||||
it("falls back to the internal proxy when no storage is given", () => {
|
||||
const resolver = createPublicMediaUrlResolver(null);
|
||||
expect(resolver("01ABC.jpg")).toBe("/_emdash/api/media/file/01ABC.jpg");
|
||||
});
|
||||
});
|
||||
|
||||
describe("buildRenderMediaUrl", () => {
|
||||
const resolveCdn = (key: string) => `https://media.example.com/${key}`;
|
||||
|
||||
it("routes an explicit storageKey through resolve", () => {
|
||||
expect(buildRenderMediaUrl(resolveCdn, { storageKey: "01ABC.jpg" })).toBe(
|
||||
"https://media.example.com/01ABC.jpg",
|
||||
);
|
||||
});
|
||||
|
||||
it("uses the internal proxy for storageKey when resolve is absent", () => {
|
||||
expect(buildRenderMediaUrl(undefined, { storageKey: "01ABC.jpg" })).toBe(
|
||||
"/_emdash/api/media/file/01ABC.jpg",
|
||||
);
|
||||
});
|
||||
|
||||
it("rewrites an internal url via resolve so publicUrl is honored", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "/_emdash/api/media/file/01ABC.jpg",
|
||||
id: "01ABC",
|
||||
}),
|
||||
).toBe("https://media.example.com/01ABC.jpg");
|
||||
});
|
||||
|
||||
it("leaves an external url untouched even when resolve is given", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "https://other-cdn.example.com/01ABC.jpg",
|
||||
}),
|
||||
).toBe("https://other-cdn.example.com/01ABC.jpg");
|
||||
});
|
||||
|
||||
it("returns an internal url as-is when no resolve is given", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(undefined, {
|
||||
url: "/_emdash/api/media/file/01ABC.jpg",
|
||||
}),
|
||||
).toBe("/_emdash/api/media/file/01ABC.jpg");
|
||||
});
|
||||
|
||||
it("uses the internal proxy for a bare id", () => {
|
||||
expect(buildRenderMediaUrl(resolveCdn, { id: "01ABC" })).toBe("/_emdash/api/media/file/01ABC");
|
||||
});
|
||||
|
||||
it("returns an empty string when no fields are usable", () => {
|
||||
expect(buildRenderMediaUrl(resolveCdn, {})).toBe("");
|
||||
});
|
||||
|
||||
it("does not rewrite a url that only shares the media prefix", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "/_emdash/api/media/file-list/01ABC.jpg",
|
||||
}),
|
||||
).toBe("/_emdash/api/media/file-list/01ABC.jpg");
|
||||
});
|
||||
|
||||
it("passes an internal url through when the captured key contains a slash", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "/_emdash/api/media/file/../other-tenant/secret.pdf",
|
||||
}),
|
||||
).toBe("/_emdash/api/media/file/../other-tenant/secret.pdf");
|
||||
});
|
||||
|
||||
it("passes an internal url through when the captured key contains a query string", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "/_emdash/api/media/file/01ABC.jpg?v=2",
|
||||
}),
|
||||
).toBe("/_emdash/api/media/file/01ABC.jpg?v=2");
|
||||
});
|
||||
|
||||
it("passes an internal url through when the captured key is percent-encoded", () => {
|
||||
expect(
|
||||
buildRenderMediaUrl(resolveCdn, {
|
||||
url: "/_emdash/api/media/file/01%2FABC.jpg",
|
||||
}),
|
||||
).toBe("/_emdash/api/media/file/01%2FABC.jpg");
|
||||
});
|
||||
});
|
||||
625
packages/core/tests/unit/menus/menus.test.ts
Normal file
625
packages/core/tests/unit/menus/menus.test.ts
Normal file
@@ -0,0 +1,625 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { ulid } from "ulidx";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createMenuItemBody, updateMenuItemBody } from "../../../src/api/schemas/menus.js";
|
||||
import { createDatabase } from "../../../src/database/connection.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import type { Database } from "../../../src/database/types.js";
|
||||
import { getMenuWithDb, getMenusWithDb } from "../../../src/menus/index.js";
|
||||
import { sanitizeHref } from "../../../src/utils/url.js";
|
||||
|
||||
describe("Navigation Menus", () => {
|
||||
let db: Kysely<Database>;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Fresh in-memory database for each test
|
||||
db = createDatabase({ url: ":memory:" });
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
});
|
||||
|
||||
describe("migration", () => {
|
||||
it("should create _emdash_menus table", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const menusTable = tables.find((t) => t.name === "_emdash_menus");
|
||||
expect(menusTable).toBeDefined();
|
||||
|
||||
const columns = menusTable!.columns.map((c) => c.name);
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("name");
|
||||
expect(columns).toContain("label");
|
||||
expect(columns).toContain("created_at");
|
||||
expect(columns).toContain("updated_at");
|
||||
});
|
||||
|
||||
it("should create _emdash_menu_items table", async () => {
|
||||
const tables = await db.introspection.getTables();
|
||||
const itemsTable = tables.find((t) => t.name === "_emdash_menu_items");
|
||||
expect(itemsTable).toBeDefined();
|
||||
|
||||
const columns = itemsTable!.columns.map((c) => c.name);
|
||||
expect(columns).toContain("id");
|
||||
expect(columns).toContain("menu_id");
|
||||
expect(columns).toContain("parent_id");
|
||||
expect(columns).toContain("sort_order");
|
||||
expect(columns).toContain("type");
|
||||
expect(columns).toContain("reference_collection");
|
||||
expect(columns).toContain("reference_id");
|
||||
expect(columns).toContain("custom_url");
|
||||
expect(columns).toContain("label");
|
||||
expect(columns).toContain("target");
|
||||
expect(columns).toContain("css_classes");
|
||||
});
|
||||
|
||||
it("should enforce unique constraint on menu name", async () => {
|
||||
const id1 = ulid();
|
||||
const id2 = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: id1,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
await expect(
|
||||
db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: id2,
|
||||
name: "primary",
|
||||
label: "Primary Again",
|
||||
})
|
||||
.execute(),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it("should cascade delete menu items when menu is deleted", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
// Create menu
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "test-menu",
|
||||
label: "Test Menu",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create menu item
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "https://example.com",
|
||||
label: "Test Link",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Delete menu
|
||||
await db.deleteFrom("_emdash_menus").where("id", "=", menuId).execute();
|
||||
|
||||
// Verify item was deleted
|
||||
const items = await db
|
||||
.selectFrom("_emdash_menu_items")
|
||||
.where("menu_id", "=", menuId)
|
||||
.selectAll()
|
||||
.execute();
|
||||
|
||||
expect(items).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMenus", () => {
|
||||
it("should return empty array when no menus exist", async () => {
|
||||
const menus = await getMenusWithDb(db);
|
||||
expect(menus).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return all menus ordered by name", async () => {
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values([
|
||||
{ id: ulid(), name: "footer", label: "Footer Links" },
|
||||
{ id: ulid(), name: "primary", label: "Primary Navigation" },
|
||||
{ id: ulid(), name: "social", label: "Social Links" },
|
||||
])
|
||||
.execute();
|
||||
|
||||
const menus = await getMenusWithDb(db);
|
||||
expect(menus).toHaveLength(3);
|
||||
expect(menus[0].name).toBe("footer");
|
||||
expect(menus[1].name).toBe("primary");
|
||||
expect(menus[2].name).toBe("social");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMenu", () => {
|
||||
it("should return null for non-existent menu", async () => {
|
||||
const menu = await getMenuWithDb("nonexistent", db);
|
||||
expect(menu).toBeNull();
|
||||
});
|
||||
|
||||
it("should return menu with empty items array", async () => {
|
||||
const menuId = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).toMatchObject({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
items: [],
|
||||
});
|
||||
});
|
||||
|
||||
it("should resolve custom URLs correctly", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "https://github.com",
|
||||
label: "GitHub",
|
||||
target: "_blank",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(1);
|
||||
expect(menu!.items[0]).toMatchObject({
|
||||
id: itemId,
|
||||
label: "GitHub",
|
||||
url: "https://github.com",
|
||||
target: "_blank",
|
||||
});
|
||||
});
|
||||
|
||||
it("should sanitize dangerous URLs from the database", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({ id: menuId, name: "primary", label: "Primary" })
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "javascript:alert(1)",
|
||||
label: "XSS",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(1);
|
||||
expect(menu!.items[0].url).toBe("#");
|
||||
});
|
||||
|
||||
it("should sanitize data: URLs from the database", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({ id: menuId, name: "primary", label: "Primary" })
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "data:text/html,<script>alert(1)</script>",
|
||||
label: "XSS",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(1);
|
||||
expect(menu!.items[0].url).toBe("#");
|
||||
});
|
||||
|
||||
it("should sanitize vbscript: URLs from the database", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({ id: menuId, name: "primary", label: "Primary" })
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "vbscript:MsgBox",
|
||||
label: "XSS",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(1);
|
||||
expect(menu!.items[0].url).toBe("#");
|
||||
});
|
||||
|
||||
it("should skip items with deleted content references", async () => {
|
||||
const menuId = ulid();
|
||||
const itemId = ulid();
|
||||
|
||||
// Create menu with item referencing non-existent content
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: itemId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "page",
|
||||
reference_collection: "pages",
|
||||
reference_id: "nonexistent",
|
||||
label: "Deleted Page",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
// Item should be filtered out because the page doesn't exist
|
||||
expect(menu!.items).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should build nested tree structure", async () => {
|
||||
const menuId = ulid();
|
||||
const parentId = ulid();
|
||||
const childId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create parent item
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: parentId,
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "/about",
|
||||
label: "About",
|
||||
})
|
||||
.execute();
|
||||
|
||||
// Create child item
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: childId,
|
||||
menu_id: menuId,
|
||||
parent_id: parentId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "/about/team",
|
||||
label: "Team",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(1);
|
||||
expect(menu!.items[0].label).toBe("About");
|
||||
expect(menu!.items[0].children).toHaveLength(1);
|
||||
expect(menu!.items[0].children[0].label).toBe("Team");
|
||||
});
|
||||
|
||||
it("should order items by sort_order", async () => {
|
||||
const menuId = ulid();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menus")
|
||||
.values({
|
||||
id: menuId,
|
||||
name: "primary",
|
||||
label: "Primary Navigation",
|
||||
})
|
||||
.execute();
|
||||
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values([
|
||||
{
|
||||
id: ulid(),
|
||||
menu_id: menuId,
|
||||
sort_order: 2,
|
||||
type: "custom",
|
||||
custom_url: "/contact",
|
||||
label: "Contact",
|
||||
},
|
||||
{
|
||||
id: ulid(),
|
||||
menu_id: menuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "/home",
|
||||
label: "Home",
|
||||
},
|
||||
{
|
||||
id: ulid(),
|
||||
menu_id: menuId,
|
||||
sort_order: 1,
|
||||
type: "custom",
|
||||
custom_url: "/about",
|
||||
label: "About",
|
||||
},
|
||||
])
|
||||
.execute();
|
||||
|
||||
const menu = await getMenuWithDb("primary", db);
|
||||
expect(menu).not.toBeNull();
|
||||
expect(menu!.items).toHaveLength(3);
|
||||
expect(menu!.items[0].label).toBe("Home");
|
||||
expect(menu!.items[1].label).toBe("About");
|
||||
expect(menu!.items[2].label).toBe("Contact");
|
||||
});
|
||||
});
|
||||
|
||||
describe("menu item URL validation", () => {
|
||||
it("should reject javascript: URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "XSS",
|
||||
customUrl: "javascript:alert(1)",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject data: URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "XSS",
|
||||
customUrl: "data:text/html,<script>alert(1)</script>",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject vbscript: URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "XSS",
|
||||
customUrl: "vbscript:MsgBox",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should allow https URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Link",
|
||||
customUrl: "https://example.com",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should allow relative paths", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Link",
|
||||
customUrl: "/about",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should allow fragment links", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Link",
|
||||
customUrl: "#section",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject case-varied javascript: URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "XSS",
|
||||
customUrl: "JAVASCRIPT:alert(1)",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should allow mailto URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Email",
|
||||
customUrl: "mailto:user@example.com",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject javascript: in update schema", () => {
|
||||
const result = updateMenuItemBody.safeParse({
|
||||
customUrl: "javascript:alert(1)",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should allow tel: URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Call",
|
||||
customUrl: "tel:+15551234567",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject empty string URLs", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Link",
|
||||
customUrl: "",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should trim whitespace before validating", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "Link",
|
||||
customUrl: " https://example.com ",
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data.customUrl).toBe("https://example.com");
|
||||
}
|
||||
});
|
||||
|
||||
it("should reject whitespace-prefixed javascript: after trim", () => {
|
||||
const result = createMenuItemBody.safeParse({
|
||||
type: "custom",
|
||||
label: "XSS",
|
||||
customUrl: " javascript:alert(1)",
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sanitizeHref", () => {
|
||||
it("should return # for null input", () => {
|
||||
expect(sanitizeHref(null)).toBe("#");
|
||||
});
|
||||
|
||||
it("should return # for undefined input", () => {
|
||||
expect(sanitizeHref(undefined)).toBe("#");
|
||||
});
|
||||
});
|
||||
|
||||
describe("handleMenuSetItems", () => {
|
||||
// The MCP boundary uses Zod with `.nonnegative()` so callers can't
|
||||
// pass a negative `parentIndex` from there. Direct handler callers
|
||||
// (REST routes, future programmatic users) bypass that guard, so
|
||||
// the handler enforces the same constraint.
|
||||
|
||||
async function setupMenu(name: string): Promise<string> {
|
||||
const id = ulid();
|
||||
await db.insertInto("_emdash_menus").values({ id, name, label: name }).execute();
|
||||
return id;
|
||||
}
|
||||
|
||||
it("rejects negative parentIndex", async () => {
|
||||
const { handleMenuSetItems } = await import("../../../src/api/handlers/menus.js");
|
||||
await setupMenu("main");
|
||||
const result = await handleMenuSetItems(db, "main", [
|
||||
{ label: "A", type: "custom", customUrl: "/a", parentIndex: -1 },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("VALIDATION_ERROR");
|
||||
expect(result.error?.message).toMatch(/parentIndex/);
|
||||
});
|
||||
|
||||
it("rejects parentIndex >= current index (forward reference)", async () => {
|
||||
const { handleMenuSetItems } = await import("../../../src/api/handlers/menus.js");
|
||||
await setupMenu("main");
|
||||
const result = await handleMenuSetItems(db, "main", [
|
||||
{ label: "A", type: "custom", customUrl: "/a" },
|
||||
{ label: "B", type: "custom", customUrl: "/b", parentIndex: 5 },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("VALIDATION_ERROR");
|
||||
expect(result.error?.message).toMatch(/parentIndex/);
|
||||
});
|
||||
|
||||
it("returns NOT_FOUND for missing menu and leaves unrelated items untouched", async () => {
|
||||
const { handleMenuSetItems } = await import("../../../src/api/handlers/menus.js");
|
||||
|
||||
// Seed a real menu with items so the rollback assertion has
|
||||
// something to potentially clobber. A regression where the
|
||||
// handler deleted ALL items before the existence check (the
|
||||
// shape of the bug we want to guard against) would wipe these.
|
||||
const otherMenuId = await setupMenu("real");
|
||||
const otherItemId = ulid();
|
||||
await db
|
||||
.insertInto("_emdash_menu_items")
|
||||
.values({
|
||||
id: otherItemId,
|
||||
menu_id: otherMenuId,
|
||||
sort_order: 0,
|
||||
type: "custom",
|
||||
custom_url: "/x",
|
||||
label: "X",
|
||||
})
|
||||
.execute();
|
||||
|
||||
const result = await handleMenuSetItems(db, "ghost", [
|
||||
{ label: "A", type: "custom", customUrl: "/a" },
|
||||
]);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.code).toBe("NOT_FOUND");
|
||||
|
||||
// Unrelated menu's item survives — confirms the transaction
|
||||
// rolled back (or never started its destructive phase).
|
||||
const items = await db.selectFrom("_emdash_menu_items").selectAll().execute();
|
||||
expect(items).toHaveLength(1);
|
||||
expect(items[0]?.id).toBe(otherItemId);
|
||||
expect(items[0]?.menu_id).toBe(otherMenuId);
|
||||
});
|
||||
});
|
||||
});
|
||||
30
packages/core/tests/unit/middleware/csp.test.ts
Normal file
30
packages/core/tests/unit/middleware/csp.test.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { buildEmDashCsp } from "../../../src/astro/middleware/csp.js";
|
||||
|
||||
describe("buildEmDashCsp", () => {
|
||||
it("includes https: in img-src to allow external images", () => {
|
||||
const csp = buildEmDashCsp();
|
||||
const imgSrc = csp.split("; ").find((d) => d.startsWith("img-src"));
|
||||
expect(imgSrc).toContain("https:");
|
||||
});
|
||||
|
||||
it("still includes self, data:, and blob: in img-src", () => {
|
||||
const csp = buildEmDashCsp();
|
||||
const imgSrc = csp.split("; ").find((d) => d.startsWith("img-src"));
|
||||
expect(imgSrc).toContain("'self'");
|
||||
expect(imgSrc).toContain("data:");
|
||||
expect(imgSrc).toContain("blob:");
|
||||
});
|
||||
|
||||
it("keeps connect-src restricted to self", () => {
|
||||
const csp = buildEmDashCsp();
|
||||
const connectSrc = csp.split("; ").find((d) => d.startsWith("connect-src"));
|
||||
expect(connectSrc).toBe("connect-src 'self'");
|
||||
});
|
||||
|
||||
it("blocks framing with frame-ancestors none", () => {
|
||||
const csp = buildEmDashCsp();
|
||||
expect(csp).toContain("frame-ancestors 'none'");
|
||||
});
|
||||
});
|
||||
145
packages/core/tests/unit/middleware/oauth-csrf.test.ts
Normal file
145
packages/core/tests/unit/middleware/oauth-csrf.test.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { beforeAll, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("virtual:emdash/auth", () => ({ authenticate: vi.fn() }));
|
||||
vi.mock("virtual:emdash/config", () => ({ default: {} }));
|
||||
vi.mock("astro:middleware", () => ({
|
||||
defineMiddleware: (handler: unknown) => handler,
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth", () => ({
|
||||
TOKEN_PREFIXES: {},
|
||||
generatePrefixedToken: vi.fn(),
|
||||
hashPrefixedToken: vi.fn(),
|
||||
VALID_SCOPES: [],
|
||||
validateScopes: vi.fn(),
|
||||
hasScope: vi.fn(() => false),
|
||||
computeS256Challenge: vi.fn(),
|
||||
Role: { ADMIN: 50 },
|
||||
}));
|
||||
vi.mock("@emdash-cms/auth/adapters/kysely", () => ({
|
||||
createKyselyAdapter: vi.fn(() => ({
|
||||
getUserById: vi.fn(),
|
||||
getUserByEmail: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
type AuthMiddlewareModule = typeof import("../../../src/astro/middleware/auth.js");
|
||||
|
||||
let onRequest: AuthMiddlewareModule["onRequest"];
|
||||
|
||||
beforeAll(async () => {
|
||||
({ onRequest } = await import("../../../src/astro/middleware/auth.js"));
|
||||
});
|
||||
|
||||
async function runAuthMiddleware(opts: {
|
||||
pathname: string;
|
||||
method?: string;
|
||||
origin?: string;
|
||||
extraHeaders?: Record<string, string>;
|
||||
}): Promise<{ response: Response; next: ReturnType<typeof vi.fn> }> {
|
||||
const url = new URL(opts.pathname, "https://site.example.com");
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
...opts.extraHeaders,
|
||||
};
|
||||
if (opts.origin) headers.Origin = opts.origin;
|
||||
|
||||
const session = {
|
||||
get: vi.fn().mockResolvedValue(null),
|
||||
set: vi.fn(),
|
||||
destroy: vi.fn(),
|
||||
};
|
||||
const next = vi.fn(async () => new Response("ok"));
|
||||
const response = await onRequest(
|
||||
{
|
||||
url,
|
||||
request: new Request(url, {
|
||||
method: opts.method ?? "POST",
|
||||
headers,
|
||||
body: "{}",
|
||||
}),
|
||||
locals: {
|
||||
emdash: { db: {}, config: {} },
|
||||
},
|
||||
session,
|
||||
redirect: (location: string) =>
|
||||
new Response(null, { status: 302, headers: { Location: location } }),
|
||||
} as Parameters<AuthMiddlewareModule["onRequest"]>[0],
|
||||
next,
|
||||
);
|
||||
|
||||
return { response, next };
|
||||
}
|
||||
|
||||
/**
|
||||
* OAuth protocol endpoints (RFC 6749, 7591, 8628) are designed to be called
|
||||
* cross-origin. They must bypass the Origin-based CSRF check that applies to
|
||||
* other public API routes.
|
||||
*
|
||||
* Regression test for PR #671: dynamic client registration and the token
|
||||
* endpoint were unreachable from real MCP clients because an Origin header
|
||||
* from a different origin triggered CSRF_REJECTED in middleware.
|
||||
*/
|
||||
describe("CSRF exemption for OAuth protocol endpoints", () => {
|
||||
const EXEMPT_PATHS = [
|
||||
"/_emdash/api/oauth/token",
|
||||
"/_emdash/api/oauth/register",
|
||||
"/_emdash/api/oauth/device/code",
|
||||
"/_emdash/api/oauth/device/token",
|
||||
] as const;
|
||||
|
||||
it.each(EXEMPT_PATHS)(
|
||||
"allows cross-origin POST to %s (passes request through to handler)",
|
||||
async (pathname) => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname,
|
||||
origin: "https://claude.ai",
|
||||
});
|
||||
|
||||
expect(next).toHaveBeenCalledOnce();
|
||||
expect(response.status).toBe(200);
|
||||
},
|
||||
);
|
||||
|
||||
it.each(EXEMPT_PATHS)("allows same-origin POST to %s", async (pathname) => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname,
|
||||
origin: "https://site.example.com",
|
||||
});
|
||||
|
||||
expect(next).toHaveBeenCalledOnce();
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it.each(EXEMPT_PATHS)("allows POST without any Origin header to %s", async (pathname) => {
|
||||
const { response, next } = await runAuthMiddleware({ pathname });
|
||||
|
||||
expect(next).toHaveBeenCalledOnce();
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it("still rejects cross-origin POST to non-exempt public routes (comments)", async () => {
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/comments/some-id",
|
||||
origin: "https://evil.example.com",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(403);
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: { code: "CSRF_REJECTED", message: "Cross-origin request blocked" },
|
||||
});
|
||||
});
|
||||
|
||||
it("still rejects cross-origin POST to device/authorize (session-authenticated consent step)", async () => {
|
||||
// /oauth/device/authorize is NOT in the exempt list — it's where the user
|
||||
// approves the CLI's device code from their browser session. It must be
|
||||
// protected by the normal CSRF check.
|
||||
const { response, next } = await runAuthMiddleware({
|
||||
pathname: "/_emdash/api/oauth/device/authorize",
|
||||
origin: "https://evil.example.com",
|
||||
});
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(response.status).toBe(403);
|
||||
});
|
||||
});
|
||||
108
packages/core/tests/unit/page/get-page-runtime.test.ts
Normal file
108
packages/core/tests/unit/page/get-page-runtime.test.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* getPageRuntime() Tests
|
||||
*
|
||||
* Tests the gatekeeper function that Astro components (EmDashHead, EmDashBodyStart,
|
||||
* EmDashBodyEnd) use to access plugin page contribution methods from locals.
|
||||
*
|
||||
* Bug context: The middleware's anonymous fast-path returned early without
|
||||
* initializing the runtime, so locals.emdash was never populated for anonymous
|
||||
* visitors. getPageRuntime() returned undefined, and all plugin page hooks
|
||||
* (page:metadata, page:fragments) were silently skipped.
|
||||
*
|
||||
* Fix: The middleware now always initializes the runtime, so locals.emdash
|
||||
* includes collectPageMetadata and collectPageFragments for all requests.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { getPageRuntime } from "../../../src/page/index.js";
|
||||
|
||||
describe("getPageRuntime", () => {
|
||||
it("returns undefined when locals has no emdash property", () => {
|
||||
const locals: Record<string, unknown> = {};
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined when locals.emdash is null", () => {
|
||||
const locals: Record<string, unknown> = { emdash: null };
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined when locals.emdash is missing collectPageMetadata", () => {
|
||||
const locals: Record<string, unknown> = {
|
||||
emdash: {
|
||||
collectPageFragments: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns undefined when locals.emdash is missing collectPageFragments", () => {
|
||||
const locals: Record<string, unknown> = {
|
||||
emdash: {
|
||||
collectPageMetadata: vi.fn(),
|
||||
},
|
||||
};
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns the runtime when both page contribution methods are present", () => {
|
||||
const collectPageMetadata = vi.fn();
|
||||
const collectPageFragments = vi.fn();
|
||||
const locals: Record<string, unknown> = {
|
||||
emdash: {
|
||||
collectPageMetadata,
|
||||
collectPageFragments,
|
||||
},
|
||||
};
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result!.collectPageMetadata).toBe(collectPageMetadata);
|
||||
expect(result!.collectPageFragments).toBe(collectPageFragments);
|
||||
});
|
||||
|
||||
it("returns the runtime from a full middleware-shaped locals.emdash", () => {
|
||||
// Simulate the full shape that the middleware binds to locals.emdash,
|
||||
// verifying that the page contribution methods are extractable even
|
||||
// alongside all the other handler bindings.
|
||||
const collectPageMetadata = vi.fn();
|
||||
const collectPageFragments = vi.fn();
|
||||
const locals: Record<string, unknown> = {
|
||||
emdash: {
|
||||
handleContentList: vi.fn(),
|
||||
handleContentGet: vi.fn(),
|
||||
handleContentCreate: vi.fn(),
|
||||
handleContentUpdate: vi.fn(),
|
||||
handleContentDelete: vi.fn(),
|
||||
handleMediaList: vi.fn(),
|
||||
handlePluginApiRoute: vi.fn(),
|
||||
collectPageMetadata,
|
||||
collectPageFragments,
|
||||
storage: null,
|
||||
db: {},
|
||||
hooks: {},
|
||||
config: {},
|
||||
},
|
||||
};
|
||||
|
||||
const result = getPageRuntime(locals);
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result!.collectPageMetadata).toBe(collectPageMetadata);
|
||||
expect(result!.collectPageFragments).toBe(collectPageFragments);
|
||||
});
|
||||
});
|
||||
88
packages/core/tests/unit/page/seo-contributions.test.ts
Normal file
88
packages/core/tests/unit/page/seo-contributions.test.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
/**
|
||||
* generateSiteSeoContributions() Tests
|
||||
*
|
||||
* Bug context: SiteSettings.seo.googleVerification and bingVerification are
|
||||
* stored in the database and editable in the admin UI, but were never emitted
|
||||
* as <meta> tags into <head>. This left Google Search Console and Bing
|
||||
* Webmaster Tools verification impossible via meta-tag method.
|
||||
*
|
||||
* Fix: A new pure function generates the verification meta contributions from
|
||||
* site SEO settings, and EmDashHead.astro loads settings and includes them.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { generateSiteSeoContributions } from "../../../src/page/seo-contributions.js";
|
||||
|
||||
describe("generateSiteSeoContributions", () => {
|
||||
it("returns empty array when no settings provided", () => {
|
||||
const result = generateSiteSeoContributions(undefined);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns empty array when seo settings are empty", () => {
|
||||
const result = generateSiteSeoContributions({});
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("emits google-site-verification meta when googleVerification is set", () => {
|
||||
const result = generateSiteSeoContributions({
|
||||
googleVerification: "abc123",
|
||||
});
|
||||
|
||||
expect(result).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "google-site-verification",
|
||||
content: "abc123",
|
||||
});
|
||||
});
|
||||
|
||||
it("emits msvalidate.01 meta when bingVerification is set", () => {
|
||||
const result = generateSiteSeoContributions({
|
||||
bingVerification: "xyz789",
|
||||
});
|
||||
|
||||
expect(result).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "msvalidate.01",
|
||||
content: "xyz789",
|
||||
});
|
||||
});
|
||||
|
||||
it("emits both verification tags when both are set", () => {
|
||||
const result = generateSiteSeoContributions({
|
||||
googleVerification: "g-token",
|
||||
bingVerification: "b-token",
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "google-site-verification",
|
||||
content: "g-token",
|
||||
});
|
||||
expect(result).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "msvalidate.01",
|
||||
content: "b-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("ignores empty string values", () => {
|
||||
const result = generateSiteSeoContributions({
|
||||
googleVerification: "",
|
||||
bingVerification: "",
|
||||
});
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("ignores unrelated seo settings without crashing", () => {
|
||||
const result = generateSiteSeoContributions({
|
||||
titleSeparator: " | ",
|
||||
robotsTxt: "User-agent: *\nAllow: /",
|
||||
});
|
||||
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
90
packages/core/tests/unit/page/site-identity.test.ts
Normal file
90
packages/core/tests/unit/page/site-identity.test.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* renderSiteIdentity() Tests
|
||||
*
|
||||
* Bug context (#831): user-configured site favicons were not emitted into
|
||||
* `<head>` by core. The 17 template `Base.astro` files emitted their own
|
||||
* `<link rel="icon">` but only when the template had been updated post
|
||||
* #448, and even then dropped the `type` attribute, so SVG favicons did
|
||||
* not render in Chromium browsers (which require `type="image/svg+xml"`
|
||||
* when the URL has no `.svg` extension).
|
||||
*
|
||||
* Fix: a first-party `renderSiteIdentity()` helper that emits the favicon
|
||||
* tag with the correct MIME type. Lives outside the plugin contribution
|
||||
* pipeline because that pipeline's `isSafeHref` check rejects same-origin
|
||||
* paths like `/_emdash/api/media/file/...`.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { renderSiteIdentity } from "../../../src/page/site-identity.js";
|
||||
|
||||
describe("renderSiteIdentity", () => {
|
||||
it("returns empty string when no input provided", () => {
|
||||
expect(renderSiteIdentity(undefined)).toBe("");
|
||||
});
|
||||
|
||||
it("returns empty string when input has no favicon", () => {
|
||||
expect(renderSiteIdentity({})).toBe("");
|
||||
});
|
||||
|
||||
it("returns empty string when favicon has no resolved URL", () => {
|
||||
// Unresolved MediaReference (no url field) should be a no-op.
|
||||
expect(
|
||||
renderSiteIdentity({
|
||||
favicon: { mediaId: "med_123" },
|
||||
}),
|
||||
).toBe("");
|
||||
});
|
||||
|
||||
it("emits link tag for favicon with URL", () => {
|
||||
const html = renderSiteIdentity({
|
||||
favicon: {
|
||||
mediaId: "med_123",
|
||||
url: "/_emdash/api/media/file/abc.png",
|
||||
contentType: "image/png",
|
||||
},
|
||||
});
|
||||
expect(html).toBe('<link rel="icon" href="/_emdash/api/media/file/abc.png" type="image/png">');
|
||||
});
|
||||
|
||||
it("includes type attribute for SVG favicons (the #831 bug)", () => {
|
||||
// SVG URLs from EmDash are extension-less (`/_emdash/api/media/file/<ulid>`),
|
||||
// so without `type="image/svg+xml"` Chromium will not render them.
|
||||
const html = renderSiteIdentity({
|
||||
favicon: {
|
||||
mediaId: "med_svg",
|
||||
url: "/_emdash/api/media/file/01KNTC51CKNJG1RFP3YV93BR17",
|
||||
contentType: "image/svg+xml",
|
||||
},
|
||||
});
|
||||
expect(html).toContain('type="image/svg+xml"');
|
||||
});
|
||||
|
||||
it("omits type attribute when contentType is not set", () => {
|
||||
// Tolerate older stored references that predate contentType resolution.
|
||||
const html = renderSiteIdentity({
|
||||
favicon: {
|
||||
mediaId: "med_legacy",
|
||||
url: "/_emdash/api/media/file/legacy.ico",
|
||||
},
|
||||
});
|
||||
expect(html).toBe('<link rel="icon" href="/_emdash/api/media/file/legacy.ico">');
|
||||
expect(html).not.toContain("type=");
|
||||
});
|
||||
|
||||
it("escapes hostile content in href and type", () => {
|
||||
// MediaReference URLs come from a controlled construction in
|
||||
// resolveMediaReference, but the renderer should still escape attribute
|
||||
// contents defensively.
|
||||
const html = renderSiteIdentity({
|
||||
favicon: {
|
||||
mediaId: "med_x",
|
||||
url: '/path"><script>alert(1)</script>',
|
||||
contentType: 'image/png"><x',
|
||||
},
|
||||
});
|
||||
expect(html).not.toContain("<script>");
|
||||
expect(html).toContain(""");
|
||||
expect(html).toContain("<");
|
||||
});
|
||||
});
|
||||
485
packages/core/tests/unit/plugins/adapt-sandbox-entry.test.ts
Normal file
485
packages/core/tests/unit/plugins/adapt-sandbox-entry.test.ts
Normal file
@@ -0,0 +1,485 @@
|
||||
/**
|
||||
* adaptSandboxEntry() Tests
|
||||
*
|
||||
* Tests the in-process adapter that converts standard-format plugins
|
||||
* ({ hooks, routes }) into ResolvedPlugin instances compatible with HookPipeline.
|
||||
*
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import type { PluginDescriptor } from "../../../src/astro/integration/runtime.js";
|
||||
import { adaptSandboxEntry } from "../../../src/plugins/adapt-sandbox-entry.js";
|
||||
import type { StandardPluginDefinition, StandardHookHandler } from "../../../src/plugins/types.js";
|
||||
|
||||
/** Create a properly typed mock hook handler */
|
||||
function mockHandler(): StandardHookHandler {
|
||||
return vi.fn(async () => {}) as unknown as StandardHookHandler;
|
||||
}
|
||||
|
||||
function createDescriptor(overrides?: Partial<PluginDescriptor>): PluginDescriptor {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
entrypoint: "@test/plugin",
|
||||
format: "standard",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("adaptSandboxEntry", () => {
|
||||
describe("basic adaptation", () => {
|
||||
it("produces a ResolvedPlugin with correct id and version", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {},
|
||||
routes: {},
|
||||
};
|
||||
const descriptor = createDescriptor({ id: "my-plugin", version: "2.1.0" });
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.id).toBe("my-plugin");
|
||||
expect(result.version).toBe("2.1.0");
|
||||
});
|
||||
|
||||
it("adapts an empty definition", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.hooks).toEqual({});
|
||||
expect(result.routes).toEqual({});
|
||||
expect(result.capabilities).toEqual([]);
|
||||
expect(result.allowedHosts).toEqual([]);
|
||||
expect(result.storage).toEqual({});
|
||||
});
|
||||
|
||||
it("carries capabilities from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
capabilities: ["content:read", "network:request"],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.capabilities).toEqual(["content:read", "network:request"]);
|
||||
});
|
||||
|
||||
it("carries allowedHosts from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
allowedHosts: ["api.example.com", "*.cdn.com"],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.allowedHosts).toEqual(["api.example.com", "*.cdn.com"]);
|
||||
});
|
||||
|
||||
it("carries storage config from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
storage: {
|
||||
events: { indexes: ["timestamp", "type"] },
|
||||
logs: { indexes: ["level"] },
|
||||
},
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.storage).toEqual({
|
||||
events: { indexes: ["timestamp", "type"] },
|
||||
logs: { indexes: ["level"] },
|
||||
});
|
||||
});
|
||||
|
||||
it("carries admin pages from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
adminPages: [{ path: "/settings", label: "Settings", icon: "gear" }],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.admin.pages).toEqual([{ path: "/settings", label: "Settings", icon: "gear" }]);
|
||||
});
|
||||
|
||||
it("carries admin widgets from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
adminWidgets: [{ id: "status", title: "Status", size: "half" }],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.admin.widgets).toEqual([{ id: "status", title: "Status", size: "half" }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hook adaptation", () => {
|
||||
it("resolves a bare function hook with defaults", () => {
|
||||
const handler = vi.fn();
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:afterSave": handler,
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
const hook = result.hooks["content:afterSave"];
|
||||
expect(hook).toBeDefined();
|
||||
expect(hook!.handler).toBe(handler);
|
||||
expect(hook!.priority).toBe(100);
|
||||
expect(hook!.timeout).toBe(5000);
|
||||
expect(hook!.dependencies).toEqual([]);
|
||||
expect(hook!.errorPolicy).toBe("abort");
|
||||
expect(hook!.exclusive).toBe(false);
|
||||
expect(hook!.pluginId).toBe("test-plugin");
|
||||
});
|
||||
|
||||
it("resolves a config object hook with custom settings", () => {
|
||||
const handler = vi.fn();
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler,
|
||||
priority: 1,
|
||||
timeout: 10000,
|
||||
dependencies: ["other-plugin"],
|
||||
errorPolicy: "continue",
|
||||
exclusive: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
const hook = result.hooks["content:beforeSave"];
|
||||
expect(hook).toBeDefined();
|
||||
expect(hook!.handler).toBe(handler);
|
||||
expect(hook!.priority).toBe(1);
|
||||
expect(hook!.timeout).toBe(10000);
|
||||
expect(hook!.dependencies).toEqual(["other-plugin"]);
|
||||
expect(hook!.errorPolicy).toBe("continue");
|
||||
});
|
||||
|
||||
it("resolves multiple hooks", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:beforeSave": mockHandler(),
|
||||
"content:afterSave": { handler: mockHandler(), priority: 200 },
|
||||
"content:afterDelete": mockHandler(),
|
||||
"media:afterUpload": mockHandler(),
|
||||
"plugin:install": mockHandler(),
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.hooks["content:beforeSave"]).toBeDefined();
|
||||
expect(result.hooks["content:afterSave"]).toBeDefined();
|
||||
expect(result.hooks["content:afterDelete"]).toBeDefined();
|
||||
expect(result.hooks["media:afterUpload"]).toBeDefined();
|
||||
expect(result.hooks["plugin:install"]).toBeDefined();
|
||||
});
|
||||
|
||||
it("sets pluginId on all hooks from descriptor", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:beforeSave": mockHandler(),
|
||||
"content:afterSave": { handler: mockHandler() },
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor({ id: "my-plugin" });
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.hooks["content:beforeSave"]!.pluginId).toBe("my-plugin");
|
||||
expect(result.hooks["content:afterSave"]!.pluginId).toBe("my-plugin");
|
||||
});
|
||||
|
||||
it("resolves exclusive hooks", () => {
|
||||
const handler = vi.fn();
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"email:deliver": {
|
||||
handler,
|
||||
exclusive: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.hooks["email:deliver"]!.exclusive).toBe(true);
|
||||
});
|
||||
|
||||
it("throws on unknown hook names", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"unknown:hook": mockHandler(),
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
expect(() => adaptSandboxEntry(def, descriptor)).toThrow("unknown hook");
|
||||
});
|
||||
|
||||
it("applies default config for partial config objects", () => {
|
||||
const handler = vi.fn();
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:afterSave": {
|
||||
handler,
|
||||
priority: 200,
|
||||
// timeout, dependencies, errorPolicy, exclusive use defaults
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
const hook = result.hooks["content:afterSave"];
|
||||
expect(hook!.priority).toBe(200);
|
||||
expect(hook!.timeout).toBe(5000);
|
||||
expect(hook!.dependencies).toEqual([]);
|
||||
expect(hook!.errorPolicy).toBe("abort");
|
||||
expect(hook!.exclusive).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("route adaptation", () => {
|
||||
it("wraps standard two-arg route handler into single-arg RouteContext handler", async () => {
|
||||
const standardHandler = vi.fn().mockResolvedValue({ ok: true });
|
||||
|
||||
const def: StandardPluginDefinition = {
|
||||
routes: {
|
||||
status: {
|
||||
handler: standardHandler,
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.routes.status).toBeDefined();
|
||||
|
||||
// Simulate calling the adapted handler with a RouteContext-like object
|
||||
const mockCtx = {
|
||||
input: { foo: "bar" },
|
||||
request: new Request("http://localhost/test"),
|
||||
requestMeta: { ip: null, userAgent: null, referer: null, geo: null },
|
||||
plugin: { id: "test-plugin", version: "1.0.0" },
|
||||
kv: {} as any,
|
||||
storage: {} as any,
|
||||
log: {} as any,
|
||||
site: { name: "", url: "", locale: "en" },
|
||||
url: (p: string) => p,
|
||||
};
|
||||
|
||||
await result.routes.status.handler(mockCtx as any);
|
||||
|
||||
// Verify the standard handler was called with (routeCtx, pluginCtx)
|
||||
expect(standardHandler).toHaveBeenCalledTimes(1);
|
||||
const [routeCtx, pluginCtx] = standardHandler.mock.calls[0];
|
||||
expect(routeCtx.input).toEqual({ foo: "bar" });
|
||||
expect(routeCtx.request).toBeDefined();
|
||||
expect(routeCtx.requestMeta).toBeDefined();
|
||||
// pluginCtx should be the stripped PluginContext (without route-specific fields)
|
||||
expect(pluginCtx.plugin.id).toBe("test-plugin");
|
||||
expect(pluginCtx.kv).toBeDefined();
|
||||
expect(pluginCtx.log).toBeDefined();
|
||||
// Route-specific fields should NOT leak into pluginCtx
|
||||
expect(pluginCtx).not.toHaveProperty("input");
|
||||
expect(pluginCtx).not.toHaveProperty("request");
|
||||
expect(pluginCtx).not.toHaveProperty("requestMeta");
|
||||
});
|
||||
|
||||
it("preserves public flag on routes", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
routes: {
|
||||
webhook: {
|
||||
handler: vi.fn(),
|
||||
public: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.routes.webhook.public).toBe(true);
|
||||
});
|
||||
|
||||
it("adapts multiple routes", () => {
|
||||
const def: StandardPluginDefinition = {
|
||||
routes: {
|
||||
status: { handler: vi.fn() },
|
||||
sync: { handler: vi.fn() },
|
||||
"admin/settings": { handler: vi.fn() },
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(Object.keys(result.routes)).toEqual(["status", "sync", "admin/settings"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability normalization", () => {
|
||||
it("normalizes content:write to include content:read", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({ capabilities: ["content:write"] });
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.capabilities).toContain("content:write");
|
||||
expect(result.capabilities).toContain("content:read");
|
||||
});
|
||||
|
||||
it("normalizes media:write to include media:read", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({ capabilities: ["media:write"] });
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.capabilities).toContain("media:write");
|
||||
expect(result.capabilities).toContain("media:read");
|
||||
});
|
||||
|
||||
it("normalizes network:request:unrestricted to include network:request", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({ capabilities: ["network:request:unrestricted"] });
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
expect(result.capabilities).toContain("network:request:unrestricted");
|
||||
expect(result.capabilities).toContain("network:request");
|
||||
});
|
||||
|
||||
it("does not duplicate implied capabilities", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
capabilities: ["content:read", "content:write"],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
const readCount = result.capabilities.filter((c) => c === "content:read").length;
|
||||
expect(readCount).toBe(1);
|
||||
});
|
||||
|
||||
it("throws on invalid capability", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
capabilities: ["invalid:capability"],
|
||||
});
|
||||
|
||||
expect(() => adaptSandboxEntry(def, descriptor)).toThrow("Invalid capability");
|
||||
});
|
||||
|
||||
// ── Deprecation alias layer ────────────────────────────────
|
||||
// Sandboxed plugins arrive via descriptors generated by older
|
||||
// builds (or older bundle versions). The adapter must accept
|
||||
// deprecated names and silently rewrite to canonical names so
|
||||
// the runtime only sees the new shape.
|
||||
|
||||
it("rewrites all deprecated capability names to current names", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
capabilities: [
|
||||
"read:content",
|
||||
"write:content",
|
||||
"read:media",
|
||||
"write:media",
|
||||
"read:users",
|
||||
"network:fetch",
|
||||
"network:fetch:any",
|
||||
"email:provide",
|
||||
"email:intercept",
|
||||
"page:inject",
|
||||
],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
// Canonical names present
|
||||
expect(result.capabilities).toContain("content:read");
|
||||
expect(result.capabilities).toContain("content:write");
|
||||
expect(result.capabilities).toContain("media:read");
|
||||
expect(result.capabilities).toContain("media:write");
|
||||
expect(result.capabilities).toContain("users:read");
|
||||
expect(result.capabilities).toContain("network:request");
|
||||
expect(result.capabilities).toContain("network:request:unrestricted");
|
||||
expect(result.capabilities).toContain("hooks.email-transport:register");
|
||||
expect(result.capabilities).toContain("hooks.email-events:register");
|
||||
expect(result.capabilities).toContain("hooks.page-fragments:register");
|
||||
|
||||
// Deprecated names absent
|
||||
for (const old of [
|
||||
"read:content",
|
||||
"write:content",
|
||||
"read:media",
|
||||
"write:media",
|
||||
"read:users",
|
||||
"network:fetch",
|
||||
"network:fetch:any",
|
||||
"email:provide",
|
||||
"email:intercept",
|
||||
"page:inject",
|
||||
]) {
|
||||
expect(result.capabilities).not.toContain(old);
|
||||
}
|
||||
});
|
||||
|
||||
it("deduplicates when both deprecated and current names are present", () => {
|
||||
const def: StandardPluginDefinition = {};
|
||||
const descriptor = createDescriptor({
|
||||
capabilities: ["read:content", "content:read"],
|
||||
});
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
const readCount = result.capabilities.filter((c) => c === "content:read").length;
|
||||
expect(readCount).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe("integration with HookPipeline", () => {
|
||||
it("produces hooks compatible with HookPipeline registration", () => {
|
||||
// HookPipeline stores hooks as ResolvedHook<unknown> internally.
|
||||
// The adapted hooks must have the expected shape.
|
||||
const handler = vi.fn().mockResolvedValue(undefined);
|
||||
const def: StandardPluginDefinition = {
|
||||
hooks: {
|
||||
"content:afterSave": {
|
||||
handler,
|
||||
priority: 50,
|
||||
},
|
||||
},
|
||||
};
|
||||
const descriptor = createDescriptor();
|
||||
|
||||
const result = adaptSandboxEntry(def, descriptor);
|
||||
|
||||
// Verify the hook shape matches what HookPipeline expects
|
||||
const hook = result.hooks["content:afterSave"]!;
|
||||
expect(typeof hook.handler).toBe("function");
|
||||
expect(typeof hook.priority).toBe("number");
|
||||
expect(typeof hook.timeout).toBe("number");
|
||||
expect(Array.isArray(hook.dependencies)).toBe(true);
|
||||
expect(typeof hook.errorPolicy).toBe("string");
|
||||
expect(typeof hook.exclusive).toBe("boolean");
|
||||
expect(typeof hook.pluginId).toBe("string");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,184 @@
|
||||
/**
|
||||
* Capability Normalization Tests
|
||||
*
|
||||
* Tests the deprecation alias layer for plugin capability names. The runtime
|
||||
* never sees deprecated names — `normalizeCapability()` rewrites them at
|
||||
* every external boundary (definePlugin, adaptSandboxEntry, marketplace
|
||||
* diff). These tests pin the rename map and the normalization helpers so
|
||||
* that the alias layer keeps working until the deprecated names are
|
||||
* removed in the next minor.
|
||||
*
|
||||
* @see Issue: "Plugin capability names are inconsistent"
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
CAPABILITY_RENAMES,
|
||||
isDeprecatedCapability,
|
||||
normalizeCapabilities,
|
||||
normalizeCapability,
|
||||
} from "../../../src/plugins/types.js";
|
||||
import type { DeprecatedPluginCapability } from "../../../src/plugins/types.js";
|
||||
|
||||
describe("CAPABILITY_RENAMES", () => {
|
||||
it("maps every deprecated name to its current replacement", () => {
|
||||
// Pin the rename table — if the issue's table changes, this test
|
||||
// catches the drift. Anyone adding a deprecation should update
|
||||
// this case explicitly.
|
||||
expect(CAPABILITY_RENAMES).toEqual({
|
||||
"network:fetch": "network:request",
|
||||
"network:fetch:any": "network:request:unrestricted",
|
||||
"read:content": "content:read",
|
||||
"write:content": "content:write",
|
||||
"read:media": "media:read",
|
||||
"write:media": "media:write",
|
||||
"read:users": "users:read",
|
||||
"email:provide": "hooks.email-transport:register",
|
||||
"email:intercept": "hooks.email-events:register",
|
||||
"page:inject": "hooks.page-fragments:register",
|
||||
});
|
||||
});
|
||||
|
||||
it("is frozen — cannot be mutated at runtime", () => {
|
||||
// `Object.freeze` makes the rename table tamper-proof.
|
||||
expect(Object.isFrozen(CAPABILITY_RENAMES)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isDeprecatedCapability", () => {
|
||||
it("returns true for every deprecated name in the rename table", () => {
|
||||
for (const cap of Object.keys(CAPABILITY_RENAMES) as DeprecatedPluginCapability[]) {
|
||||
expect(isDeprecatedCapability(cap)).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns false for current capability names", () => {
|
||||
const current = [
|
||||
"content:read",
|
||||
"content:write",
|
||||
"media:read",
|
||||
"media:write",
|
||||
"users:read",
|
||||
"network:request",
|
||||
"network:request:unrestricted",
|
||||
"email:send",
|
||||
"hooks.email-transport:register",
|
||||
"hooks.email-events:register",
|
||||
"hooks.page-fragments:register",
|
||||
];
|
||||
for (const cap of current) {
|
||||
expect(isDeprecatedCapability(cap)).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
it("returns false for unknown strings", () => {
|
||||
expect(isDeprecatedCapability("not:a:capability")).toBe(false);
|
||||
expect(isDeprecatedCapability("")).toBe(false);
|
||||
expect(isDeprecatedCapability("content")).toBe(false);
|
||||
});
|
||||
|
||||
it("does not match Object.prototype keys", () => {
|
||||
// Regression: an `in` check against CAPABILITY_RENAMES would
|
||||
// also match inherited properties. Using `Object.prototype.hasOwnProperty`
|
||||
// (or `Object.hasOwn`) keeps the check scoped to own properties.
|
||||
// Without the guard, `normalizeCapability("toString")` would return
|
||||
// the prototype function reference, breaking the contract that
|
||||
// unknown strings are returned as-is.
|
||||
expect(isDeprecatedCapability("toString")).toBe(false);
|
||||
expect(isDeprecatedCapability("constructor")).toBe(false);
|
||||
expect(isDeprecatedCapability("hasOwnProperty")).toBe(false);
|
||||
expect(isDeprecatedCapability("__proto__")).toBe(false);
|
||||
expect(isDeprecatedCapability("valueOf")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeCapability", () => {
|
||||
it("rewrites deprecated names to current names", () => {
|
||||
expect(normalizeCapability("read:content")).toBe("content:read");
|
||||
expect(normalizeCapability("write:content")).toBe("content:write");
|
||||
expect(normalizeCapability("read:media")).toBe("media:read");
|
||||
expect(normalizeCapability("write:media")).toBe("media:write");
|
||||
expect(normalizeCapability("read:users")).toBe("users:read");
|
||||
expect(normalizeCapability("network:fetch")).toBe("network:request");
|
||||
expect(normalizeCapability("network:fetch:any")).toBe("network:request:unrestricted");
|
||||
expect(normalizeCapability("email:provide")).toBe("hooks.email-transport:register");
|
||||
expect(normalizeCapability("email:intercept")).toBe("hooks.email-events:register");
|
||||
expect(normalizeCapability("page:inject")).toBe("hooks.page-fragments:register");
|
||||
});
|
||||
|
||||
it("leaves current names unchanged", () => {
|
||||
expect(normalizeCapability("content:read")).toBe("content:read");
|
||||
expect(normalizeCapability("network:request")).toBe("network:request");
|
||||
expect(normalizeCapability("hooks.email-transport:register")).toBe(
|
||||
"hooks.email-transport:register",
|
||||
);
|
||||
});
|
||||
|
||||
it("passes through unknown strings unchanged", () => {
|
||||
// Downstream validators throw on unknown capabilities; the
|
||||
// normalizer's job is purely to translate the alias map.
|
||||
expect(normalizeCapability("invalid:capability")).toBe("invalid:capability");
|
||||
expect(normalizeCapability("")).toBe("");
|
||||
});
|
||||
|
||||
it("returns Object.prototype keys as-is (does not return prototype values)", () => {
|
||||
// Regression: with an `in` check, `normalizeCapability("toString")`
|
||||
// would have returned `Object.prototype.toString` (a function).
|
||||
// The own-property guard ensures we always return a string.
|
||||
expect(normalizeCapability("toString")).toBe("toString");
|
||||
expect(normalizeCapability("constructor")).toBe("constructor");
|
||||
expect(normalizeCapability("__proto__")).toBe("__proto__");
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeCapabilities", () => {
|
||||
it("rewrites every deprecated name in an array", () => {
|
||||
const input = ["read:content", "write:content", "network:fetch"];
|
||||
const result = normalizeCapabilities(input);
|
||||
|
||||
expect(result).toEqual(["content:read", "content:write", "network:request"]);
|
||||
});
|
||||
|
||||
it("preserves order of first occurrence", () => {
|
||||
const result = normalizeCapabilities(["network:request", "read:content", "write:media"]);
|
||||
|
||||
expect(result).toEqual(["network:request", "content:read", "media:write"]);
|
||||
});
|
||||
|
||||
it("deduplicates by canonical name when both old and new are present", () => {
|
||||
// A plugin migrating from old to new might transiently declare
|
||||
// both — the normalizer must not produce duplicates.
|
||||
const result = normalizeCapabilities(["read:content", "content:read"]);
|
||||
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
|
||||
it("deduplicates two deprecated names that map to the same current name", () => {
|
||||
// Defensive: if someone declares the same alias twice, the result
|
||||
// must still contain it only once.
|
||||
const result = normalizeCapabilities(["read:content", "read:content"]);
|
||||
|
||||
expect(result).toEqual(["content:read"]);
|
||||
});
|
||||
|
||||
it("returns empty array for empty input", () => {
|
||||
expect(normalizeCapabilities([])).toEqual([]);
|
||||
});
|
||||
|
||||
it("does not mutate the input array", () => {
|
||||
const input = ["read:content", "write:content"];
|
||||
const snapshot = [...input];
|
||||
normalizeCapabilities(input);
|
||||
|
||||
expect(input).toEqual(snapshot);
|
||||
});
|
||||
|
||||
it("is idempotent — normalizing twice gives the same result", () => {
|
||||
const input = ["read:content", "write:media", "page:inject"];
|
||||
const once = normalizeCapabilities(input);
|
||||
const twice = normalizeCapabilities(once);
|
||||
|
||||
expect(twice).toEqual(once);
|
||||
});
|
||||
});
|
||||
518
packages/core/tests/unit/plugins/define-plugin.test.ts
Normal file
518
packages/core/tests/unit/plugins/define-plugin.test.ts
Normal file
@@ -0,0 +1,518 @@
|
||||
/**
|
||||
* definePlugin() Tests
|
||||
*
|
||||
* Tests the plugin definition helper for:
|
||||
* - ID validation (simple and scoped formats)
|
||||
* - Version validation (semver)
|
||||
* - Capability validation and normalization
|
||||
* - Hook resolution (function vs config object)
|
||||
* - Default value handling
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { definePlugin } from "../../../src/plugins/define-plugin.js";
|
||||
|
||||
// Error message patterns for test assertions
|
||||
const INVALID_PLUGIN_ID_PATTERN = /Invalid plugin id/;
|
||||
const INVALID_PLUGIN_VERSION_PATTERN = /Invalid plugin version/;
|
||||
const INVALID_CAPABILITY_PATTERN = /Invalid capability/;
|
||||
|
||||
describe("definePlugin", () => {
|
||||
describe("ID validation", () => {
|
||||
it("accepts valid simple ID", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "my-plugin",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.id).toBe("my-plugin");
|
||||
});
|
||||
|
||||
it("accepts valid simple ID with numbers", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "plugin-v2",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.id).toBe("plugin-v2");
|
||||
});
|
||||
|
||||
it("accepts valid scoped ID", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "@emdash-cms/seo-plugin",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.id).toBe("@emdash-cms/seo-plugin");
|
||||
});
|
||||
|
||||
it("accepts scoped ID with numbers", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "@my-org/plugin-v2",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.id).toBe("@my-org/plugin-v2");
|
||||
});
|
||||
|
||||
it("rejects ID with uppercase letters", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "MyPlugin",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects ID with underscores", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "my_plugin",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects ID with spaces", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "my plugin",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects empty ID", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects invalid scoped ID (missing name)", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "@my-org/",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects invalid scoped ID (missing scope)", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "@/my-plugin",
|
||||
version: "1.0.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_ID_PATTERN);
|
||||
});
|
||||
});
|
||||
|
||||
describe("version validation", () => {
|
||||
it("accepts valid semver", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.version).toBe("1.0.0");
|
||||
});
|
||||
|
||||
it("accepts semver with prerelease", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0-beta.1",
|
||||
});
|
||||
|
||||
expect(plugin.version).toBe("1.0.0-beta.1");
|
||||
});
|
||||
|
||||
it("accepts semver with build metadata", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0+build.123",
|
||||
});
|
||||
|
||||
expect(plugin.version).toBe("1.0.0+build.123");
|
||||
});
|
||||
|
||||
it("rejects invalid version format", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "test",
|
||||
version: "1.0",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_VERSION_PATTERN);
|
||||
});
|
||||
|
||||
it("rejects non-numeric version", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "test",
|
||||
version: "latest",
|
||||
}),
|
||||
).toThrow(INVALID_PLUGIN_VERSION_PATTERN);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability validation", () => {
|
||||
it("accepts valid capabilities", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:read", "content:write", "network:request"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("content:read");
|
||||
expect(plugin.capabilities).toContain("content:write");
|
||||
expect(plugin.capabilities).toContain("network:request");
|
||||
});
|
||||
|
||||
it("accepts media:read and media:write", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["media:read", "media:write"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("media:read");
|
||||
expect(plugin.capabilities).toContain("media:write");
|
||||
});
|
||||
|
||||
it("rejects invalid capability", () => {
|
||||
expect(() =>
|
||||
definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["invalid:capability" as any],
|
||||
}),
|
||||
).toThrow(INVALID_CAPABILITY_PATTERN);
|
||||
});
|
||||
|
||||
it("normalizes content:write to include content:read", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:write"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("content:write");
|
||||
expect(plugin.capabilities).toContain("content:read");
|
||||
});
|
||||
|
||||
it("normalizes media:write to include media:read", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["media:write"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("media:write");
|
||||
expect(plugin.capabilities).toContain("media:read");
|
||||
});
|
||||
|
||||
it("normalizes network:request:unrestricted to include network:request", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["network:request:unrestricted"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("network:request:unrestricted");
|
||||
expect(plugin.capabilities).toContain("network:request");
|
||||
});
|
||||
|
||||
it("does not duplicate read when already present", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:read", "content:write"],
|
||||
});
|
||||
|
||||
const readCount = plugin.capabilities.filter((c) => c === "content:read").length;
|
||||
expect(readCount).toBe(1);
|
||||
});
|
||||
|
||||
it("defaults to empty capabilities", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toEqual([]);
|
||||
});
|
||||
|
||||
// ── Deprecation alias layer ────────────────────────────────
|
||||
// During the deprecation window we accept the old names and
|
||||
// silently rewrite them to the new names. The runtime should
|
||||
// only ever see canonical (new) names.
|
||||
|
||||
it("accepts and normalizes deprecated capability names", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: [
|
||||
"read:content",
|
||||
"write:content",
|
||||
"read:media",
|
||||
"write:media",
|
||||
"read:users",
|
||||
"network:fetch",
|
||||
"network:fetch:any",
|
||||
"email:provide",
|
||||
"email:intercept",
|
||||
"page:inject",
|
||||
],
|
||||
});
|
||||
|
||||
// Normalized to current names
|
||||
expect(plugin.capabilities).toContain("content:read");
|
||||
expect(plugin.capabilities).toContain("content:write");
|
||||
expect(plugin.capabilities).toContain("media:read");
|
||||
expect(plugin.capabilities).toContain("media:write");
|
||||
expect(plugin.capabilities).toContain("users:read");
|
||||
expect(plugin.capabilities).toContain("network:request");
|
||||
expect(plugin.capabilities).toContain("network:request:unrestricted");
|
||||
expect(plugin.capabilities).toContain("hooks.email-transport:register");
|
||||
expect(plugin.capabilities).toContain("hooks.email-events:register");
|
||||
expect(plugin.capabilities).toContain("hooks.page-fragments:register");
|
||||
|
||||
// And the deprecated names do NOT appear in the resolved capabilities
|
||||
expect(plugin.capabilities).not.toContain("read:content");
|
||||
expect(plugin.capabilities).not.toContain("write:content");
|
||||
expect(plugin.capabilities).not.toContain("network:fetch");
|
||||
expect(plugin.capabilities).not.toContain("network:fetch:any");
|
||||
expect(plugin.capabilities).not.toContain("email:provide");
|
||||
expect(plugin.capabilities).not.toContain("email:intercept");
|
||||
expect(plugin.capabilities).not.toContain("page:inject");
|
||||
});
|
||||
|
||||
it("deduplicates when both deprecated and current names are passed", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
// Same capability, both spellings
|
||||
capabilities: ["read:content", "content:read"],
|
||||
});
|
||||
|
||||
const readCount = plugin.capabilities.filter((c) => c === "content:read").length;
|
||||
expect(readCount).toBe(1);
|
||||
});
|
||||
|
||||
it("normalizes deprecated names before applying implications", () => {
|
||||
// `write:content` (deprecated) should still imply `content:read`
|
||||
// after rewrite, not `read:content`.
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: ["write:content"],
|
||||
});
|
||||
|
||||
expect(plugin.capabilities).toContain("content:write");
|
||||
expect(plugin.capabilities).toContain("content:read");
|
||||
expect(plugin.capabilities).not.toContain("write:content");
|
||||
expect(plugin.capabilities).not.toContain("read:content");
|
||||
});
|
||||
});
|
||||
|
||||
describe("hook resolution", () => {
|
||||
it("resolves function shorthand to full config", () => {
|
||||
const handler = vi.fn();
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
hooks: {
|
||||
"content:beforeSave": handler,
|
||||
},
|
||||
});
|
||||
|
||||
const hook = plugin.hooks["content:beforeSave"];
|
||||
expect(hook).toBeDefined();
|
||||
expect(hook!.handler).toBe(handler);
|
||||
expect(hook!.priority).toBe(100);
|
||||
expect(hook!.timeout).toBe(5000);
|
||||
expect(hook!.dependencies).toEqual([]);
|
||||
expect(hook!.errorPolicy).toBe("abort");
|
||||
expect(hook!.pluginId).toBe("test");
|
||||
});
|
||||
|
||||
it("resolves full config object", () => {
|
||||
const handler = vi.fn();
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
hooks: {
|
||||
"content:beforeSave": {
|
||||
handler,
|
||||
priority: 50,
|
||||
timeout: 10000,
|
||||
dependencies: ["other-plugin"],
|
||||
errorPolicy: "continue",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const hook = plugin.hooks["content:beforeSave"];
|
||||
expect(hook).toBeDefined();
|
||||
expect(hook!.handler).toBe(handler);
|
||||
expect(hook!.priority).toBe(50);
|
||||
expect(hook!.timeout).toBe(10000);
|
||||
expect(hook!.dependencies).toEqual(["other-plugin"]);
|
||||
expect(hook!.errorPolicy).toBe("continue");
|
||||
});
|
||||
|
||||
it("applies defaults to partial config", () => {
|
||||
const handler = vi.fn();
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
hooks: {
|
||||
"content:afterSave": {
|
||||
handler,
|
||||
priority: 200,
|
||||
// timeout, dependencies, errorPolicy use defaults
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const hook = plugin.hooks["content:afterSave"];
|
||||
expect(hook!.priority).toBe(200);
|
||||
expect(hook!.timeout).toBe(5000);
|
||||
expect(hook!.dependencies).toEqual([]);
|
||||
expect(hook!.errorPolicy).toBe("abort");
|
||||
});
|
||||
|
||||
it("resolves multiple hooks", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
hooks: {
|
||||
"content:beforeSave": vi.fn(),
|
||||
"content:afterSave": vi.fn(),
|
||||
"plugin:install": vi.fn(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(plugin.hooks["content:beforeSave"]).toBeDefined();
|
||||
expect(plugin.hooks["content:afterSave"]).toBeDefined();
|
||||
expect(plugin.hooks["plugin:install"]).toBeDefined();
|
||||
});
|
||||
|
||||
it("sets pluginId on all resolved hooks", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "my-plugin",
|
||||
version: "1.0.0",
|
||||
hooks: {
|
||||
"content:beforeSave": vi.fn(),
|
||||
"media:afterUpload": { handler: vi.fn(), priority: 50 },
|
||||
},
|
||||
});
|
||||
|
||||
expect(plugin.hooks["content:beforeSave"]!.pluginId).toBe("my-plugin");
|
||||
expect(plugin.hooks["media:afterUpload"]!.pluginId).toBe("my-plugin");
|
||||
});
|
||||
});
|
||||
|
||||
describe("default values", () => {
|
||||
it("defaults allowedHosts to empty array", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.allowedHosts).toEqual([]);
|
||||
});
|
||||
|
||||
it("defaults storage to empty object", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.storage).toEqual({});
|
||||
});
|
||||
|
||||
it("defaults hooks to empty object", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.hooks).toEqual({});
|
||||
});
|
||||
|
||||
it("defaults routes to empty object", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
});
|
||||
|
||||
expect(plugin.routes).toEqual({});
|
||||
});
|
||||
|
||||
it("preserves provided allowedHosts", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
allowedHosts: ["api.example.com", "*.cdn.com"],
|
||||
});
|
||||
|
||||
expect(plugin.allowedHosts).toEqual(["api.example.com", "*.cdn.com"]);
|
||||
});
|
||||
|
||||
it("preserves provided storage config", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
storage: {
|
||||
items: { indexes: ["type", "status"] },
|
||||
cache: { indexes: ["key"] },
|
||||
},
|
||||
});
|
||||
|
||||
expect(plugin.storage).toEqual({
|
||||
items: { indexes: ["type", "status"] },
|
||||
cache: { indexes: ["key"] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("routes passthrough", () => {
|
||||
it("preserves route definitions", () => {
|
||||
const handler = vi.fn();
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
routes: {
|
||||
sync: { handler },
|
||||
webhook: { handler, input: {} as any },
|
||||
},
|
||||
});
|
||||
|
||||
expect(plugin.routes.sync).toBeDefined();
|
||||
expect(plugin.routes.sync.handler).toBe(handler);
|
||||
expect(plugin.routes.webhook).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("admin passthrough", () => {
|
||||
it("preserves admin config", () => {
|
||||
const plugin = definePlugin({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
admin: {
|
||||
entry: "@test/plugin/admin",
|
||||
pages: [{ id: "settings", title: "Settings" }],
|
||||
widgets: [{ id: "stats", title: "Stats", area: "dashboard" }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(plugin.admin.entry).toBe("@test/plugin/admin");
|
||||
expect(plugin.admin.pages).toHaveLength(1);
|
||||
expect(plugin.admin.widgets).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
1312
packages/core/tests/unit/plugins/email-pipeline.test.ts
Normal file
1312
packages/core/tests/unit/plugins/email-pipeline.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
821
packages/core/tests/unit/plugins/exclusive-hooks.test.ts
Normal file
821
packages/core/tests/unit/plugins/exclusive-hooks.test.ts
Normal file
@@ -0,0 +1,821 @@
|
||||
/**
|
||||
* Exclusive Hooks Tests
|
||||
*
|
||||
* Tests the exclusive hook system:
|
||||
* - HookPipeline: registration/tracking, selection, invokeExclusiveHook
|
||||
* - PluginManager.resolveExclusiveHooks(): single provider auto-select,
|
||||
* multi-provider no auto-select, stale selection clearing, preferred hints,
|
||||
* admin override beats preferred
|
||||
* - Lifecycle: activate → auto-select, deactivate → clears stale selection
|
||||
*/
|
||||
|
||||
import Database from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { extractManifest } from "../../../src/cli/commands/bundle-utils.js";
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import type { Database as DbSchema } from "../../../src/database/types.js";
|
||||
import { HookPipeline, resolveExclusiveHooks } from "../../../src/plugins/hooks.js";
|
||||
import { PluginManager } from "../../../src/plugins/manager.js";
|
||||
import { normalizeManifestHook } from "../../../src/plugins/manifest-schema.js";
|
||||
import type {
|
||||
ResolvedPlugin,
|
||||
ResolvedHook,
|
||||
PluginDefinition,
|
||||
ContentBeforeSaveHandler,
|
||||
ContentAfterSaveHandler,
|
||||
} from "../../../src/plugins/types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers — ResolvedPlugin (for HookPipeline tests)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:write", "content:read"],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
admin: {
|
||||
pages: [],
|
||||
widgets: [],
|
||||
},
|
||||
hooks: {},
|
||||
routes: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createTestHook<T>(
|
||||
pluginId: string,
|
||||
handler: T,
|
||||
overrides: Partial<ResolvedHook<T>> = {},
|
||||
): ResolvedHook<T> {
|
||||
return {
|
||||
pluginId,
|
||||
handler,
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "continue",
|
||||
exclusive: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers — PluginDefinition (for PluginManager tests)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function createTestDefinition(overrides: Partial<PluginDefinition> = {}): PluginDefinition {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:write", "content:read"],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// HookPipeline — exclusive behaviour
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("HookPipeline — exclusive hooks", () => {
|
||||
it("tracks exclusive hook names during registration", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "email-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("email-provider", vi.fn(), {
|
||||
exclusive: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
expect(pipeline.isExclusiveHook("content:beforeSave")).toBe(true);
|
||||
expect(pipeline.isExclusiveHook("content:afterSave")).toBe(false);
|
||||
expect(pipeline.getRegisteredExclusiveHooks()).toContain("content:beforeSave");
|
||||
});
|
||||
|
||||
it("does not track non-exclusive hooks as exclusive", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "normal-plugin",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("normal-plugin", vi.fn(), {
|
||||
exclusive: false,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
expect(pipeline.isExclusiveHook("content:beforeSave")).toBe(false);
|
||||
expect(pipeline.getRegisteredExclusiveHooks()).not.toContain("content:beforeSave");
|
||||
});
|
||||
|
||||
it("returns providers for an exclusive hook", () => {
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "provider-a",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("provider-a", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "provider-b",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("provider-b", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin1, plugin2]);
|
||||
|
||||
const providers = pipeline.getExclusiveHookProviders("content:beforeSave");
|
||||
expect(providers).toHaveLength(2);
|
||||
expect(providers.map((p) => p.pluginId)).toEqual(
|
||||
expect.arrayContaining(["provider-a", "provider-b"]),
|
||||
);
|
||||
});
|
||||
|
||||
it("set/get/clear exclusive selection", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "email-ses",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("email-ses", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBeUndefined();
|
||||
|
||||
pipeline.setExclusiveSelection("content:beforeSave", "email-ses");
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBe("email-ses");
|
||||
|
||||
pipeline.clearExclusiveSelection("content:beforeSave");
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBeUndefined();
|
||||
});
|
||||
|
||||
it("invokeExclusiveHook returns null when no selection", async () => {
|
||||
const handler = vi.fn().mockResolvedValue("result");
|
||||
const plugin = createTestPlugin({
|
||||
id: "provider-a",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("provider-a", handler, { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
const result = await pipeline.invokeExclusiveHook("content:beforeSave", { some: "event" });
|
||||
expect(result).toBeNull();
|
||||
expect(handler).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("invokeExclusiveHook dispatches only to selected provider", async () => {
|
||||
const handlerA = vi.fn().mockResolvedValue("result-a");
|
||||
const handlerB = vi.fn().mockResolvedValue("result-b");
|
||||
|
||||
const pluginA = createTestPlugin({
|
||||
id: "provider-a",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("provider-a", handlerA, { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pluginB = createTestPlugin({
|
||||
id: "provider-b",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("provider-b", handlerB, { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
// Context factory needs a db for PluginContextFactory
|
||||
const sqlite = new Database(":memory:");
|
||||
const db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([pluginA, pluginB], { db });
|
||||
|
||||
pipeline.setExclusiveSelection("content:afterSave", "provider-b");
|
||||
|
||||
const result = await pipeline.invokeExclusiveHook("content:afterSave", { some: "event" });
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.pluginId).toBe("provider-b");
|
||||
expect(result!.result).toBe("result-b");
|
||||
|
||||
expect(handlerB).toHaveBeenCalledTimes(1);
|
||||
expect(handlerA).not.toHaveBeenCalled();
|
||||
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
it("invokeExclusiveHook isolates errors — returns error result instead of throwing", async () => {
|
||||
const handler = vi
|
||||
.fn()
|
||||
.mockRejectedValue(new Error("provider crashed")) as unknown as ContentAfterSaveHandler;
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "broken-provider",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("broken-provider", handler, {
|
||||
exclusive: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const sqlite = new Database(":memory:");
|
||||
const db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
pipeline.setExclusiveSelection("content:afterSave", "broken-provider");
|
||||
|
||||
// Should NOT throw — error is isolated
|
||||
const result = await pipeline.invokeExclusiveHook("content:afterSave", {});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.pluginId).toBe("broken-provider");
|
||||
expect(result!.error).toBeInstanceOf(Error);
|
||||
expect(result!.error!.message).toBe("provider crashed");
|
||||
expect(result!.result).toBeUndefined();
|
||||
expect(result!.duration).toBeGreaterThanOrEqual(0);
|
||||
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
it("invokeExclusiveHook respects timeout", async () => {
|
||||
const handler = vi.fn(
|
||||
() =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, 10_000);
|
||||
}),
|
||||
) as unknown as ContentAfterSaveHandler;
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "slow-provider",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("slow-provider", handler, {
|
||||
exclusive: true,
|
||||
timeout: 50,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const sqlite = new Database(":memory:");
|
||||
const db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
pipeline.setExclusiveSelection("content:afterSave", "slow-provider");
|
||||
|
||||
const result = await pipeline.invokeExclusiveHook("content:afterSave", {});
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result!.error).toBeInstanceOf(Error);
|
||||
expect(result!.error!.message.toLowerCase()).toContain("timeout");
|
||||
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
it("exclusive hooks with a selection are skipped in regular pipeline", async () => {
|
||||
const exclusiveHandler = vi.fn().mockResolvedValue(undefined);
|
||||
const normalHandler = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const exclusivePlugin = createTestPlugin({
|
||||
id: "exclusive-plugin",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("exclusive-plugin", exclusiveHandler, {
|
||||
exclusive: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
const normalPlugin = createTestPlugin({
|
||||
id: "normal-plugin",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("normal-plugin", normalHandler, {
|
||||
exclusive: false,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const sqlite = new Database(":memory:");
|
||||
const db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([exclusivePlugin, normalPlugin], { db });
|
||||
|
||||
// Set a selection — this means the exclusive hook should NOT run in the regular pipeline
|
||||
pipeline.setExclusiveSelection("content:afterSave", "exclusive-plugin");
|
||||
|
||||
await pipeline.runContentAfterSave({ title: "test" }, "posts", true);
|
||||
|
||||
// Normal hook should run
|
||||
expect(normalHandler).toHaveBeenCalledTimes(1);
|
||||
// Exclusive hook should NOT have run in the regular pipeline
|
||||
expect(exclusiveHandler).not.toHaveBeenCalled();
|
||||
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
it("exclusive hooks without a selection DO run in regular pipeline", async () => {
|
||||
const exclusiveHandler = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "unselected-provider",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("unselected-provider", exclusiveHandler, {
|
||||
exclusive: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const sqlite = new Database(":memory:");
|
||||
const db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
|
||||
// No selection set — exclusive hooks should still run in regular pipeline
|
||||
await pipeline.runContentAfterSave({ title: "test" }, "posts", true);
|
||||
|
||||
expect(exclusiveHandler).toHaveBeenCalledTimes(1);
|
||||
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// HookPipeline — non-exclusive provider enumeration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("HookPipeline — getHookProviders", () => {
|
||||
it("returns non-exclusive providers registered for a hook", () => {
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "middleware-a",
|
||||
capabilities: ["hooks.email-events:register"],
|
||||
hooks: {
|
||||
"email:beforeSend": createTestHook("middleware-a", vi.fn()),
|
||||
},
|
||||
});
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "middleware-b",
|
||||
capabilities: ["hooks.email-events:register"],
|
||||
hooks: {
|
||||
"email:beforeSend": createTestHook("middleware-b", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin1, plugin2]);
|
||||
|
||||
const providers = pipeline.getHookProviders("email:beforeSend");
|
||||
expect(providers.map((p) => p.pluginId)).toEqual(
|
||||
expect.arrayContaining(["middleware-a", "middleware-b"]),
|
||||
);
|
||||
expect(providers).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("partitions with getExclusiveHookProviders — excludes exclusive registrations", () => {
|
||||
const exclusivePlugin = createTestPlugin({
|
||||
id: "exclusive-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("exclusive-provider", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const nonExclusivePlugin = createTestPlugin({
|
||||
id: "non-exclusive-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("non-exclusive-provider", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([exclusivePlugin, nonExclusivePlugin]);
|
||||
|
||||
expect(pipeline.getHookProviders("content:beforeSave").map((p) => p.pluginId)).toEqual([
|
||||
"non-exclusive-provider",
|
||||
]);
|
||||
expect(pipeline.getExclusiveHookProviders("content:beforeSave").map((p) => p.pluginId)).toEqual(
|
||||
["exclusive-provider"],
|
||||
);
|
||||
});
|
||||
|
||||
it("returns empty array for an unregistered hook", () => {
|
||||
const pipeline = new HookPipeline([]);
|
||||
expect(pipeline.getHookProviders("email:beforeSend")).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// normalizeManifestHook
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("normalizeManifestHook", () => {
|
||||
it("converts a plain string to an object", () => {
|
||||
const result = normalizeManifestHook("content:beforeSave");
|
||||
expect(result).toEqual({ name: "content:beforeSave" });
|
||||
});
|
||||
|
||||
it("passes through an object unchanged", () => {
|
||||
const entry = { name: "content:beforeSave", exclusive: true, priority: 50 };
|
||||
const result = normalizeManifestHook(entry);
|
||||
expect(result).toEqual(entry);
|
||||
});
|
||||
|
||||
it("handles object with only name", () => {
|
||||
const result = normalizeManifestHook({ name: "media:afterUpload" });
|
||||
expect(result).toEqual({ name: "media:afterUpload" });
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// extractManifest — exclusive hook metadata
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("extractManifest — exclusive hooks", () => {
|
||||
it("emits plain hook names for non-exclusive hooks with default settings", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "simple-plugin",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("simple-plugin", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.hooks).toEqual(["content:beforeSave"]);
|
||||
});
|
||||
|
||||
it("emits structured entries for exclusive hooks", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "email-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("email-provider", vi.fn(), {
|
||||
exclusive: true,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.hooks).toEqual([{ name: "content:beforeSave", exclusive: true }]);
|
||||
});
|
||||
|
||||
it("emits structured entries for hooks with custom priority or timeout", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "custom-plugin",
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("custom-plugin", vi.fn(), {
|
||||
priority: 50,
|
||||
timeout: 10000,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.hooks).toEqual([{ name: "content:afterSave", priority: 50, timeout: 10000 }]);
|
||||
});
|
||||
|
||||
it("handles mixed exclusive and non-exclusive hooks", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "mixed-plugin",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("mixed-plugin", vi.fn(), { exclusive: true }),
|
||||
"content:afterSave": createTestHook("mixed-plugin", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const manifest = extractManifest(plugin);
|
||||
expect(manifest.hooks).toHaveLength(2);
|
||||
|
||||
// One should be structured (exclusive), one should be a plain string
|
||||
const structured = manifest.hooks.filter((h) => typeof h === "object");
|
||||
const plain = manifest.hooks.filter((h) => typeof h === "string");
|
||||
expect(structured).toHaveLength(1);
|
||||
expect(plain).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// resolveExclusiveHooks (shared function)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("resolveExclusiveHooks — shared function", () => {
|
||||
it("auto-selects single active provider", async () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "only-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("only-provider", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
const store = new Map<string, string>();
|
||||
|
||||
await resolveExclusiveHooks({
|
||||
pipeline,
|
||||
isActive: () => true,
|
||||
getOption: async (key) => store.get(key) ?? null,
|
||||
setOption: async (key, value) => {
|
||||
store.set(key, value);
|
||||
},
|
||||
deleteOption: async (key) => {
|
||||
store.delete(key);
|
||||
},
|
||||
});
|
||||
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBe("only-provider");
|
||||
});
|
||||
|
||||
it("filters out inactive providers", async () => {
|
||||
const pluginA = createTestPlugin({
|
||||
id: "active-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("active-provider", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pluginB = createTestPlugin({
|
||||
id: "inactive-provider",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("inactive-provider", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pipeline = new HookPipeline([pluginA, pluginB]);
|
||||
|
||||
const store = new Map<string, string>();
|
||||
|
||||
await resolveExclusiveHooks({
|
||||
pipeline,
|
||||
isActive: (id) => id === "active-provider",
|
||||
getOption: async (key) => store.get(key) ?? null,
|
||||
setOption: async (key, value) => {
|
||||
store.set(key, value);
|
||||
},
|
||||
deleteOption: async (key) => {
|
||||
store.delete(key);
|
||||
},
|
||||
});
|
||||
|
||||
// Only active-provider is active, so it should be auto-selected
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBe("active-provider");
|
||||
});
|
||||
|
||||
it("clears stale selection when selected provider is inactive", async () => {
|
||||
const pluginA = createTestPlugin({
|
||||
id: "provider-a",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("provider-a", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pluginB = createTestPlugin({
|
||||
id: "provider-b",
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("provider-b", vi.fn(), { exclusive: true }),
|
||||
},
|
||||
});
|
||||
const pipeline = new HookPipeline([pluginA, pluginB]);
|
||||
|
||||
// Simulate existing selection for provider-a which is now inactive
|
||||
const store = new Map<string, string>([
|
||||
["emdash:exclusive_hook:content:beforeSave", "provider-a"],
|
||||
]);
|
||||
|
||||
await resolveExclusiveHooks({
|
||||
pipeline,
|
||||
isActive: (id) => id === "provider-b", // provider-a is inactive
|
||||
getOption: async (key) => store.get(key) ?? null,
|
||||
setOption: async (key, value) => {
|
||||
store.set(key, value);
|
||||
},
|
||||
deleteOption: async (key) => {
|
||||
store.delete(key);
|
||||
},
|
||||
});
|
||||
|
||||
// provider-a was stale, cleared. provider-b is the only active one → auto-selected
|
||||
expect(pipeline.getExclusiveSelection("content:beforeSave")).toBe("provider-b");
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginManager — resolveExclusiveHooks
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("PluginManager — resolveExclusiveHooks", () => {
|
||||
let db: Kysely<DbSchema>;
|
||||
let sqliteDb: Database.Database;
|
||||
|
||||
beforeEach(async () => {
|
||||
sqliteDb = new Database(":memory:");
|
||||
db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqliteDb }),
|
||||
});
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqliteDb.close();
|
||||
});
|
||||
|
||||
it("auto-selects when only one provider for an exclusive hook", async () => {
|
||||
const handler = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "email-ses",
|
||||
hooks: {
|
||||
"content:beforeSave": { handler, exclusive: true },
|
||||
},
|
||||
}),
|
||||
);
|
||||
await manager.activate("email-ses");
|
||||
|
||||
const selection = await manager.getExclusiveHookSelection("content:beforeSave");
|
||||
expect(selection).toBe("email-ses");
|
||||
});
|
||||
|
||||
it("keeps auto-selected provider when a second provider activates", async () => {
|
||||
const handlerA = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
const handlerB = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler: handlerA, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-b",
|
||||
hooks: { "content:beforeSave": { handler: handlerB, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
|
||||
// provider-a is the only one — gets auto-selected
|
||||
await manager.activate("provider-a");
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBe("provider-a");
|
||||
|
||||
// provider-b activates — existing valid selection is preserved
|
||||
await manager.activate("provider-b");
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBe("provider-a");
|
||||
});
|
||||
|
||||
it("leaves unselected when multiple providers activate simultaneously", async () => {
|
||||
// If no one was auto-selected before the second provider, there's no
|
||||
// selection to keep. Test this by registering both before activating.
|
||||
const handlerA = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
const handlerB = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler: handlerA, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-b",
|
||||
hooks: { "content:beforeSave": { handler: handlerB, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
|
||||
// Activate provider-a (auto-selects as sole provider)
|
||||
await manager.activate("provider-a");
|
||||
// Clear the auto-selection to simulate "no prior selection"
|
||||
await manager.setExclusiveHookSelection("content:beforeSave", null);
|
||||
|
||||
// Now activate provider-b — both active, no existing selection
|
||||
await manager.activate("provider-b");
|
||||
const selection = await manager.getExclusiveHookSelection("content:beforeSave");
|
||||
expect(selection).toBeNull();
|
||||
});
|
||||
|
||||
it("clears stale selection when selected plugin is deactivated", async () => {
|
||||
const handlerA = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
const handlerB = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler: handlerA, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-b",
|
||||
hooks: { "content:beforeSave": { handler: handlerB, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
|
||||
await manager.activate("provider-a");
|
||||
await manager.activate("provider-b");
|
||||
|
||||
// Manually set a selection
|
||||
await manager.setExclusiveHookSelection("content:beforeSave", "provider-a");
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBe("provider-a");
|
||||
|
||||
// Deactivate the selected plugin
|
||||
await manager.deactivate("provider-a");
|
||||
|
||||
// After deactivation, provider-b is the only one left → auto-selects
|
||||
const selection = await manager.getExclusiveHookSelection("content:beforeSave");
|
||||
expect(selection).toBe("provider-b");
|
||||
});
|
||||
|
||||
it("uses preferred hints when no selection exists", async () => {
|
||||
const handlerA = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
const handlerB = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler: handlerA, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-b",
|
||||
hooks: { "content:beforeSave": { handler: handlerB, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
|
||||
await manager.activate("provider-a");
|
||||
await manager.activate("provider-b");
|
||||
|
||||
// Clear any auto-selection from the first activate
|
||||
await manager.setExclusiveHookSelection("content:beforeSave", null);
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBeNull();
|
||||
|
||||
// Resolve with preferred hint
|
||||
const hints = new Map([["provider-b", ["content:beforeSave"]]]);
|
||||
await manager.resolveExclusiveHooks(hints);
|
||||
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBe("provider-b");
|
||||
});
|
||||
|
||||
it("admin override (DB selection) beats preferred hints", async () => {
|
||||
const handlerA = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
const handlerB = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler: handlerA, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-b",
|
||||
hooks: { "content:beforeSave": { handler: handlerB, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
|
||||
await manager.activate("provider-a");
|
||||
await manager.activate("provider-b");
|
||||
|
||||
// Admin explicitly sets provider-a
|
||||
await manager.setExclusiveHookSelection("content:beforeSave", "provider-a");
|
||||
|
||||
// Resolve with preferred hint for provider-b — admin choice should win
|
||||
const hints = new Map([["provider-b", ["content:beforeSave"]]]);
|
||||
await manager.resolveExclusiveHooks(hints);
|
||||
|
||||
expect(await manager.getExclusiveHookSelection("content:beforeSave")).toBe("provider-a");
|
||||
});
|
||||
|
||||
it("getExclusiveHooksInfo returns complete info", async () => {
|
||||
const handler = vi.fn() as unknown as ContentBeforeSaveHandler;
|
||||
|
||||
const manager = new PluginManager({ db });
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "provider-a",
|
||||
hooks: { "content:beforeSave": { handler, exclusive: true } },
|
||||
}),
|
||||
);
|
||||
await manager.activate("provider-a");
|
||||
|
||||
const info = await manager.getExclusiveHooksInfo();
|
||||
expect(info).toHaveLength(1);
|
||||
expect(info[0]!.hookName).toBe("content:beforeSave");
|
||||
expect(info[0]!.providers).toHaveLength(1);
|
||||
expect(info[0]!.providers[0]!.pluginId).toBe("provider-a");
|
||||
expect(info[0]!.selectedPluginId).toBe("provider-a");
|
||||
});
|
||||
});
|
||||
187
packages/core/tests/unit/plugins/field-widgets.test.ts
Normal file
187
packages/core/tests/unit/plugins/field-widgets.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
/**
|
||||
* Tests for the field widget plugin pipeline.
|
||||
*
|
||||
* Covers:
|
||||
* - Manifest schema validation for fieldWidgets
|
||||
* - definePlugin() with fieldWidgets
|
||||
* - FieldWidgetConfig type correctness
|
||||
*/
|
||||
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { pluginManifestSchema } from "../../../src/plugins/manifest-schema.js";
|
||||
|
||||
/** Minimal valid manifest */
|
||||
function makeManifest(admin: Record<string, unknown> = {}) {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin,
|
||||
};
|
||||
}
|
||||
|
||||
describe("pluginManifestSchema — fieldWidgets", () => {
|
||||
it("should accept manifest without fieldWidgets", () => {
|
||||
const result = pluginManifestSchema.safeParse(makeManifest());
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept manifest with empty fieldWidgets array", () => {
|
||||
const result = pluginManifestSchema.safeParse(makeManifest({ fieldWidgets: [] }));
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept a valid field widget definition", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "picker",
|
||||
label: "Color Picker",
|
||||
fieldTypes: ["string"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept multiple field widget definitions", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "picker",
|
||||
label: "Color Picker",
|
||||
fieldTypes: ["string"],
|
||||
},
|
||||
{
|
||||
name: "pricing",
|
||||
label: "Pricing Editor",
|
||||
fieldTypes: ["json"],
|
||||
elements: [{ type: "toggle", action_id: "enabled", label: "Enable" }],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept field widget with Block Kit elements", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "pricing",
|
||||
label: "Pricing",
|
||||
fieldTypes: ["json"],
|
||||
elements: [
|
||||
{ type: "toggle", action_id: "enabled", label: "Enable" },
|
||||
{ type: "text_input", action_id: "price", label: "Price" },
|
||||
{
|
||||
type: "select",
|
||||
action_id: "mode",
|
||||
label: "Mode",
|
||||
options: [{ value: "a", label: "A" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept field widget with multiple field types", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "hex",
|
||||
label: "Hex Input",
|
||||
fieldTypes: ["string", "json"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject field widget with empty name", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "",
|
||||
label: "Test",
|
||||
fieldTypes: ["string"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject field widget with empty label", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "test",
|
||||
label: "",
|
||||
fieldTypes: ["string"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject field widget without name", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
label: "Test",
|
||||
fieldTypes: ["string"],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject field widget without fieldTypes", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "test",
|
||||
label: "Test",
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should accept field widget with empty fieldTypes array", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
fieldWidgets: [
|
||||
{
|
||||
name: "test",
|
||||
label: "Test",
|
||||
fieldTypes: [],
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
719
packages/core/tests/unit/plugins/hooks.test.ts
Normal file
719
packages/core/tests/unit/plugins/hooks.test.ts
Normal file
@@ -0,0 +1,719 @@
|
||||
/**
|
||||
* HookPipeline Tests
|
||||
*
|
||||
* Tests the v2 hook pipeline for:
|
||||
* - Hook registration and sorting
|
||||
* - Hook execution with timeout
|
||||
* - Content hooks (beforeSave, afterSave, beforeDelete, afterDelete)
|
||||
* - Lifecycle hooks (install, activate, deactivate, uninstall)
|
||||
* - Error handling and error policies
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
|
||||
import { HookPipeline, createHookPipeline } from "../../../src/plugins/hooks.js";
|
||||
import type { ResolvedPlugin, ResolvedHook } from "../../../src/plugins/types.js";
|
||||
|
||||
/**
|
||||
* Create a minimal resolved plugin for testing
|
||||
*/
|
||||
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
admin: {
|
||||
pages: [],
|
||||
widgets: [],
|
||||
},
|
||||
hooks: {},
|
||||
routes: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a resolved hook with defaults
|
||||
*/
|
||||
function createTestHook<T>(
|
||||
pluginId: string,
|
||||
handler: T,
|
||||
overrides: Partial<ResolvedHook<T>> = {},
|
||||
): ResolvedHook<T> {
|
||||
return {
|
||||
pluginId,
|
||||
handler,
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "continue",
|
||||
exclusive: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("HookPipeline", () => {
|
||||
describe("construction and registration", () => {
|
||||
it("creates empty pipeline with no plugins", () => {
|
||||
const pipeline = new HookPipeline([]);
|
||||
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(false);
|
||||
expect(pipeline.getHookCount("content:beforeSave")).toBe(0);
|
||||
});
|
||||
|
||||
it("registers hooks from plugins", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["content:write", "content:read"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("test", vi.fn()),
|
||||
"content:afterSave": createTestHook("test", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:afterSave")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:beforeDelete")).toBe(false);
|
||||
});
|
||||
|
||||
it("tracks registered hook names", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["content:write", "media:read"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("test", vi.fn()),
|
||||
"media:afterUpload": createTestHook("test", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
const registered = pipeline.getRegisteredHooks();
|
||||
|
||||
expect(registered).toContain("content:beforeSave");
|
||||
expect(registered).toContain("media:afterUpload");
|
||||
expect(registered).not.toContain("content:afterSave");
|
||||
});
|
||||
});
|
||||
|
||||
describe("hook sorting", () => {
|
||||
it("sorts hooks by priority (lower first)", () => {
|
||||
const handler1 = vi.fn();
|
||||
const handler2 = vi.fn();
|
||||
const handler3 = vi.fn();
|
||||
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "plugin-1",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-1", handler1, {
|
||||
priority: 200,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "plugin-2",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-2", handler2, {
|
||||
priority: 50,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const plugin3 = createTestPlugin({
|
||||
id: "plugin-3",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-3", handler3, {
|
||||
priority: 100,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
// Create pipeline and manually verify order through execution
|
||||
const pipeline = new HookPipeline([plugin1, plugin2, plugin3]);
|
||||
|
||||
expect(pipeline.getHookCount("content:beforeSave")).toBe(3);
|
||||
});
|
||||
|
||||
it("respects dependencies when sorting", () => {
|
||||
const handler1 = vi.fn();
|
||||
const handler2 = vi.fn();
|
||||
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "plugin-1",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-1", handler1, {
|
||||
priority: 50, // Lower priority but...
|
||||
dependencies: ["plugin-2"], // depends on plugin-2
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "plugin-2",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-2", handler2, {
|
||||
priority: 100, // Higher priority
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin1, plugin2]);
|
||||
|
||||
// plugin-2 should run before plugin-1 despite priority
|
||||
// because plugin-1 depends on plugin-2
|
||||
expect(pipeline.getHookCount("content:beforeSave")).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("content:beforeSave", () => {
|
||||
it("runs hooks and returns modified content", async () => {
|
||||
const handler = vi.fn(async (event) => ({
|
||||
...event.content,
|
||||
modified: true,
|
||||
}));
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
// Need context factory for actual execution
|
||||
// Without it, getContext will throw
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
|
||||
// For unit test without DB, we can verify the hook count
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(true);
|
||||
});
|
||||
|
||||
it("chains content through multiple hooks", async () => {
|
||||
const handler1 = vi.fn(async (event) => ({
|
||||
...event.content,
|
||||
step1: true,
|
||||
}));
|
||||
|
||||
const handler2 = vi.fn(async (event) => ({
|
||||
...event.content,
|
||||
step2: true,
|
||||
}));
|
||||
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "plugin-1",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-1", handler1, {
|
||||
priority: 1,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "plugin-2",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-2", handler2, {
|
||||
priority: 2,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin1, plugin2]);
|
||||
expect(pipeline.getHookCount("content:beforeSave")).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("content:beforeDelete", () => {
|
||||
it("registers beforeDelete hooks", () => {
|
||||
const handler = vi.fn(async () => true);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["content:read"],
|
||||
hooks: {
|
||||
"content:beforeDelete": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeDelete")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("lifecycle hooks", () => {
|
||||
it("registers plugin:install hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
hooks: {
|
||||
"plugin:install": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("plugin:install")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers plugin:activate hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
hooks: {
|
||||
"plugin:activate": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("plugin:activate")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers plugin:deactivate hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
hooks: {
|
||||
"plugin:deactivate": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("plugin:deactivate")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers plugin:uninstall hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
hooks: {
|
||||
"plugin:uninstall": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("plugin:uninstall")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("media hooks", () => {
|
||||
it("registers media:beforeUpload hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["media:write"],
|
||||
hooks: {
|
||||
"media:beforeUpload": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:beforeUpload")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers media:afterUpload hook", () => {
|
||||
const handler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test",
|
||||
capabilities: ["media:read"],
|
||||
hooks: {
|
||||
"media:afterUpload": createTestHook("test", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:afterUpload")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createHookPipeline helper", () => {
|
||||
it("creates a HookPipeline instance", () => {
|
||||
const plugins = [createTestPlugin({ id: "test" })];
|
||||
const pipeline = createHookPipeline(plugins);
|
||||
|
||||
expect(pipeline).toBeInstanceOf(HookPipeline);
|
||||
});
|
||||
});
|
||||
|
||||
// =========================================================================
|
||||
// Capability enforcement for non-email hooks
|
||||
// =========================================================================
|
||||
|
||||
describe("capability enforcement — content hooks", () => {
|
||||
it("skips content:beforeSave without content:write capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(false);
|
||||
});
|
||||
|
||||
it("skips content:beforeSave with only content:read (requires content:write)", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "read-only",
|
||||
capabilities: ["content:read"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("read-only", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers content:beforeSave with content:write capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips content:afterSave without content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterSave")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers content:afterSave with content:read capability (read-only notification)", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["content:read"],
|
||||
hooks: {
|
||||
"content:afterSave": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterSave")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips content:beforeDelete without content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:beforeDelete": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeDelete")).toBe(false);
|
||||
});
|
||||
|
||||
it("skips content:afterDelete without content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:afterDelete": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterDelete")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers all content hooks with content:write + content:read", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "writer",
|
||||
capabilities: ["content:write", "content:read"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("writer", vi.fn()),
|
||||
"content:afterSave": createTestHook("writer", vi.fn()),
|
||||
"content:beforeDelete": createTestHook("writer", vi.fn()),
|
||||
"content:afterDelete": createTestHook("writer", vi.fn()),
|
||||
"content:afterPublish": createTestHook("writer", vi.fn()),
|
||||
"content:afterUnpublish": createTestHook("writer", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:beforeSave")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:afterSave")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:beforeDelete")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:afterDelete")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:afterPublish")).toBe(true);
|
||||
expect(pipeline.hasHooks("content:afterUnpublish")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips content:afterPublish without content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:afterPublish": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterPublish")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers content:afterPublish with content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["content:read"],
|
||||
hooks: {
|
||||
"content:afterPublish": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterPublish")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips content:afterUnpublish without content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"content:afterUnpublish": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterUnpublish")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers content:afterUnpublish with content:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["content:read"],
|
||||
hooks: {
|
||||
"content:afterUnpublish": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("content:afterUnpublish")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability enforcement — media hooks", () => {
|
||||
it("skips media:beforeUpload without media:write capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"media:beforeUpload": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:beforeUpload")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers media:beforeUpload with media:write capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["media:write"],
|
||||
hooks: {
|
||||
"media:beforeUpload": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:beforeUpload")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips media:afterUpload without media:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"media:afterUpload": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:afterUpload")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers media:afterUpload with media:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["media:read"],
|
||||
hooks: {
|
||||
"media:afterUpload": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("media:afterUpload")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability enforcement — comment hooks", () => {
|
||||
it("skips comment:beforeCreate without users:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"comment:beforeCreate": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("comment:beforeCreate")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers comment:beforeCreate with users:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["users:read"],
|
||||
hooks: {
|
||||
"comment:beforeCreate": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("comment:beforeCreate")).toBe(true);
|
||||
});
|
||||
|
||||
it("skips comment:moderate without users:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"comment:moderate": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("comment:moderate")).toBe(false);
|
||||
});
|
||||
|
||||
it("skips comment:afterCreate without users:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"comment:afterCreate": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("comment:afterCreate")).toBe(false);
|
||||
});
|
||||
|
||||
it("skips comment:afterModerate without users:read capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"comment:afterModerate": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("comment:afterModerate")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability enforcement — page:fragments", () => {
|
||||
it("skips page:fragments without hooks.page-fragments:register capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"page:fragments": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("page:fragments")).toBe(false);
|
||||
});
|
||||
|
||||
it("registers page:fragments with hooks.page-fragments:register capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "has-cap",
|
||||
capabilities: ["hooks.page-fragments:register"],
|
||||
hooks: {
|
||||
"page:fragments": createTestHook("has-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("page:fragments")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("capability enforcement — hooks without requirements", () => {
|
||||
it("registers lifecycle hooks without any capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"plugin:install": createTestHook("no-cap", vi.fn()),
|
||||
"plugin:activate": createTestHook("no-cap", vi.fn()),
|
||||
"plugin:deactivate": createTestHook("no-cap", vi.fn()),
|
||||
"plugin:uninstall": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("plugin:install")).toBe(true);
|
||||
expect(pipeline.hasHooks("plugin:activate")).toBe(true);
|
||||
expect(pipeline.hasHooks("plugin:deactivate")).toBe(true);
|
||||
expect(pipeline.hasHooks("plugin:uninstall")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers cron hook without any capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
cron: createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("cron")).toBe(true);
|
||||
});
|
||||
|
||||
it("registers page:metadata without any capability", () => {
|
||||
const plugin = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("no-cap", vi.fn()),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin]);
|
||||
expect(pipeline.hasHooks("page:metadata")).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Tests that plugin HTTP functions strip credential headers on cross-origin redirects.
|
||||
*
|
||||
* Both createHttpAccess and createUnrestrictedHttpAccess manually follow redirects.
|
||||
* When a redirect crosses origins, Authorization/Cookie/Proxy-Authorization headers
|
||||
* must be stripped to prevent credential leakage to untrusted hosts.
|
||||
*/
|
||||
|
||||
import { afterAll, afterEach, beforeAll, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { setDefaultDnsResolver } from "../../../src/import/ssrf.js";
|
||||
import { createHttpAccess, createUnrestrictedHttpAccess } from "../../../src/plugins/context.js";
|
||||
|
||||
// Intercept globalThis.fetch so we can simulate redirect chains
|
||||
const mockFetch = vi.fn<typeof globalThis.fetch>();
|
||||
vi.stubGlobal("fetch", mockFetch);
|
||||
|
||||
// Bypass DoH so the fetch mock only sees the calls these tests model.
|
||||
// Returns a fixed public IP so resolveAndValidateExternalUrl passes.
|
||||
const STUB_RESOLVER = async () => ["93.184.216.34"];
|
||||
let previousResolver: ReturnType<typeof setDefaultDnsResolver> | undefined;
|
||||
|
||||
beforeAll(() => {
|
||||
previousResolver = setDefaultDnsResolver(STUB_RESOLVER);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
setDefaultDnsResolver(previousResolver ?? null);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFetch.mockReset();
|
||||
});
|
||||
|
||||
/** Build a minimal redirect response */
|
||||
function redirectResponse(location: string, status = 302): Response {
|
||||
return new Response(null, {
|
||||
status,
|
||||
headers: { Location: location },
|
||||
});
|
||||
}
|
||||
|
||||
/** Build a 200 response */
|
||||
function okResponse(body = "ok"): Response {
|
||||
return new Response(body, { status: 200 });
|
||||
}
|
||||
|
||||
/** Extract the headers passed to the Nth fetch call */
|
||||
function headersOfCall(callIndex: number): Headers {
|
||||
const init = mockFetch.mock.calls[callIndex]?.[1] as RequestInit | undefined;
|
||||
return new Headers(init?.headers);
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// createHttpAccess – host-restricted
|
||||
// =============================================================================
|
||||
|
||||
describe("createHttpAccess host allowlist matching", () => {
|
||||
const pluginId = "test-plugin";
|
||||
|
||||
it('allows any hostname when allowedHosts contains standalone "*"', async () => {
|
||||
mockFetch.mockResolvedValue(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, ["*"]);
|
||||
await expect(http.fetch("https://api.example.com/v1")).resolves.toBeInstanceOf(Response);
|
||||
await expect(http.fetch("https://random.host.io/path")).resolves.toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
it('allows requests when "*" is mixed with explicit hosts', async () => {
|
||||
mockFetch.mockResolvedValue(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, ["*", "api.example.com"]);
|
||||
await expect(http.fetch("https://another.example.net/ok")).resolves.toBeInstanceOf(Response);
|
||||
});
|
||||
|
||||
it('still supports "*.domain" wildcard matching', async () => {
|
||||
mockFetch.mockResolvedValue(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, ["*.example.com"]);
|
||||
await expect(http.fetch("https://api.example.com/v1")).resolves.toBeInstanceOf(Response);
|
||||
await expect(http.fetch("https://evil.com")).rejects.toThrow(
|
||||
'is not allowed to fetch from host "evil.com"',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createHttpAccess credential stripping", () => {
|
||||
const pluginId = "test-plugin";
|
||||
const allowedHosts = ["a.example.com", "b.example.com"];
|
||||
|
||||
it("preserves credentials on same-origin redirect", async () => {
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://a.example.com/page2"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, allowedHosts);
|
||||
await http.fetch("https://a.example.com/page1", {
|
||||
headers: { Authorization: "Bearer secret", Cookie: "session=abc" },
|
||||
});
|
||||
|
||||
// Second call should still have credentials (same origin)
|
||||
const h = headersOfCall(1);
|
||||
expect(h.get("authorization")).toBe("Bearer secret");
|
||||
expect(h.get("cookie")).toBe("session=abc");
|
||||
});
|
||||
|
||||
it("strips credentials on cross-origin redirect", async () => {
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://b.example.com/landing"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, allowedHosts);
|
||||
await http.fetch("https://a.example.com/start", {
|
||||
headers: {
|
||||
Authorization: "Bearer secret",
|
||||
Cookie: "session=abc",
|
||||
"Proxy-Authorization": "Basic creds",
|
||||
"X-Custom": "keep-me",
|
||||
},
|
||||
});
|
||||
|
||||
const h = headersOfCall(1);
|
||||
expect(h.get("authorization")).toBeNull();
|
||||
expect(h.get("cookie")).toBeNull();
|
||||
expect(h.get("proxy-authorization")).toBeNull();
|
||||
// Non-credential headers survive
|
||||
expect(h.get("x-custom")).toBe("keep-me");
|
||||
});
|
||||
|
||||
it("strips credentials only once even with multiple same-origin hops after cross-origin", async () => {
|
||||
// a.example.com -> b.example.com -> b.example.com/final
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://b.example.com/step1"))
|
||||
.mockResolvedValueOnce(redirectResponse("https://b.example.com/step2"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createHttpAccess(pluginId, allowedHosts);
|
||||
await http.fetch("https://a.example.com/start", {
|
||||
headers: { Authorization: "Bearer secret" },
|
||||
});
|
||||
|
||||
// Call 0: original (has auth)
|
||||
expect(headersOfCall(0).get("authorization")).toBe("Bearer secret");
|
||||
// Call 1: after cross-origin hop (stripped)
|
||||
expect(headersOfCall(1).get("authorization")).toBeNull();
|
||||
// Call 2: same-origin hop on b (still stripped -- not re-added)
|
||||
expect(headersOfCall(2).get("authorization")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// createUnrestrictedHttpAccess – SSRF-protected but no host list
|
||||
// =============================================================================
|
||||
|
||||
describe("createUnrestrictedHttpAccess credential stripping", () => {
|
||||
const pluginId = "unrestricted-plugin";
|
||||
|
||||
it("preserves credentials on same-origin redirect", async () => {
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://api.example.com/v2"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createUnrestrictedHttpAccess(pluginId);
|
||||
await http.fetch("https://api.example.com/v1", {
|
||||
headers: { Authorization: "Bearer token" },
|
||||
});
|
||||
|
||||
expect(headersOfCall(1).get("authorization")).toBe("Bearer token");
|
||||
});
|
||||
|
||||
it("strips credentials on cross-origin redirect", async () => {
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://evil.example.com/steal"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createUnrestrictedHttpAccess(pluginId);
|
||||
await http.fetch("https://api.example.com/start", {
|
||||
headers: {
|
||||
Authorization: "Bearer token",
|
||||
Cookie: "session=xyz",
|
||||
"Proxy-Authorization": "Basic pw",
|
||||
Accept: "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
const h = headersOfCall(1);
|
||||
expect(h.get("authorization")).toBeNull();
|
||||
expect(h.get("cookie")).toBeNull();
|
||||
expect(h.get("proxy-authorization")).toBeNull();
|
||||
expect(h.get("accept")).toBe("application/json");
|
||||
});
|
||||
|
||||
it("handles redirect with no init gracefully", async () => {
|
||||
mockFetch
|
||||
.mockResolvedValueOnce(redirectResponse("https://other.example.com/"))
|
||||
.mockResolvedValueOnce(okResponse());
|
||||
|
||||
const http = createUnrestrictedHttpAccess(pluginId);
|
||||
// No init at all -- should not throw
|
||||
await http.fetch("https://api.example.com/bare");
|
||||
|
||||
expect(headersOfCall(1).get("authorization")).toBeNull();
|
||||
});
|
||||
});
|
||||
426
packages/core/tests/unit/plugins/manager.test.ts
Normal file
426
packages/core/tests/unit/plugins/manager.test.ts
Normal file
@@ -0,0 +1,426 @@
|
||||
/**
|
||||
* PluginManager Tests
|
||||
*
|
||||
* Tests the central plugin orchestrator for:
|
||||
* - Plugin registration
|
||||
* - Lifecycle management (install, activate, deactivate, uninstall)
|
||||
* - Query methods
|
||||
* - Hook and route delegation
|
||||
*/
|
||||
|
||||
import Database from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import type { Database as DbSchema } from "../../../src/database/types.js";
|
||||
import { PluginManager, createPluginManager } from "../../../src/plugins/manager.js";
|
||||
import type { PluginDefinition } from "../../../src/plugins/types.js";
|
||||
|
||||
// Test error message regex patterns
|
||||
const ALREADY_REGISTERED_REGEX = /already registered/;
|
||||
const DEACTIVATE_FIRST_REGEX = /Deactivate it first/;
|
||||
const NOT_FOUND_REGEX = /not found/;
|
||||
const ALREADY_INSTALLED_REGEX = /already installed/;
|
||||
|
||||
/**
|
||||
* Create a minimal plugin definition for testing
|
||||
*/
|
||||
function createTestDefinition(overrides: Partial<PluginDefinition> = {}): PluginDefinition {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("PluginManager", () => {
|
||||
let db: Kysely<DbSchema>;
|
||||
let sqliteDb: Database.Database;
|
||||
let manager: PluginManager;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create in-memory SQLite database
|
||||
sqliteDb = new Database(":memory:");
|
||||
|
||||
db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({
|
||||
database: sqliteDb,
|
||||
}),
|
||||
});
|
||||
|
||||
// Run migrations
|
||||
await runMigrations(db);
|
||||
|
||||
manager = new PluginManager({ db });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqliteDb.close();
|
||||
});
|
||||
|
||||
describe("register", () => {
|
||||
it("registers a plugin definition", () => {
|
||||
const resolved = manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
expect(resolved.id).toBe("my-plugin");
|
||||
expect(manager.hasPlugin("my-plugin")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns the resolved plugin", () => {
|
||||
const resolved = manager.register(
|
||||
createTestDefinition({
|
||||
id: "test",
|
||||
capabilities: ["content:write"],
|
||||
}),
|
||||
);
|
||||
|
||||
// content:write should add content:read
|
||||
expect(resolved.capabilities).toContain("content:write");
|
||||
expect(resolved.capabilities).toContain("content:read");
|
||||
});
|
||||
|
||||
it("throws on duplicate registration", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
expect(() => manager.register(createTestDefinition({ id: "my-plugin" }))).toThrow(
|
||||
ALREADY_REGISTERED_REGEX,
|
||||
);
|
||||
});
|
||||
|
||||
it("sets initial state to registered", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
expect(manager.getPluginState("my-plugin")).toBe("registered");
|
||||
});
|
||||
});
|
||||
|
||||
describe("registerAll", () => {
|
||||
it("registers multiple plugins", () => {
|
||||
manager.registerAll([
|
||||
createTestDefinition({ id: "plugin-a" }),
|
||||
createTestDefinition({ id: "plugin-b" }),
|
||||
createTestDefinition({ id: "plugin-c" }),
|
||||
]);
|
||||
|
||||
expect(manager.hasPlugin("plugin-a")).toBe(true);
|
||||
expect(manager.hasPlugin("plugin-b")).toBe(true);
|
||||
expect(manager.hasPlugin("plugin-c")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unregister", () => {
|
||||
it("returns false for non-existent plugin", () => {
|
||||
const result = manager.unregister("non-existent");
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("unregisters a registered plugin", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
const result = manager.unregister("my-plugin");
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(manager.hasPlugin("my-plugin")).toBe(false);
|
||||
});
|
||||
|
||||
it("throws when trying to unregister active plugin", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
expect(() => manager.unregister("my-plugin")).toThrow(DEACTIVATE_FIRST_REGEX);
|
||||
});
|
||||
});
|
||||
|
||||
describe("install", () => {
|
||||
it("throws for non-existent plugin", async () => {
|
||||
await expect(manager.install("non-existent")).rejects.toThrow(NOT_FOUND_REGEX);
|
||||
});
|
||||
|
||||
it("installs a registered plugin", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
await manager.install("my-plugin");
|
||||
|
||||
expect(manager.getPluginState("my-plugin")).toBe("installed");
|
||||
});
|
||||
|
||||
it("throws if plugin is already installed", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.install("my-plugin");
|
||||
|
||||
await expect(manager.install("my-plugin")).rejects.toThrow(ALREADY_INSTALLED_REGEX);
|
||||
});
|
||||
|
||||
it("runs plugin:install hook", async () => {
|
||||
const installHandler = vi.fn();
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "my-plugin",
|
||||
hooks: {
|
||||
"plugin:install": installHandler,
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
await manager.install("my-plugin");
|
||||
|
||||
// Hook should be registered but not called without context factory
|
||||
// In real usage, the hook would be called
|
||||
expect(manager.getPluginState("my-plugin")).toBe("installed");
|
||||
});
|
||||
});
|
||||
|
||||
describe("activate", () => {
|
||||
it("throws for non-existent plugin", async () => {
|
||||
await expect(manager.activate("non-existent")).rejects.toThrow(NOT_FOUND_REGEX);
|
||||
});
|
||||
|
||||
it("auto-installs if not installed", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
expect(manager.getPluginState("my-plugin")).toBe("active");
|
||||
});
|
||||
|
||||
it("activates an installed plugin", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.install("my-plugin");
|
||||
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
expect(manager.getPluginState("my-plugin")).toBe("active");
|
||||
});
|
||||
|
||||
it("returns empty array if already active", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
const results = await manager.activate("my-plugin");
|
||||
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deactivate", () => {
|
||||
it("throws for non-existent plugin", async () => {
|
||||
await expect(manager.deactivate("non-existent")).rejects.toThrow(NOT_FOUND_REGEX);
|
||||
});
|
||||
|
||||
it("returns empty array if not active", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
|
||||
const results = await manager.deactivate("my-plugin");
|
||||
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
|
||||
it("deactivates an active plugin", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
await manager.deactivate("my-plugin");
|
||||
|
||||
expect(manager.getPluginState("my-plugin")).toBe("inactive");
|
||||
});
|
||||
});
|
||||
|
||||
describe("uninstall", () => {
|
||||
it("throws for non-existent plugin", async () => {
|
||||
await expect(manager.uninstall("non-existent")).rejects.toThrow(NOT_FOUND_REGEX);
|
||||
});
|
||||
|
||||
it("deactivates before uninstalling if active", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
await manager.uninstall("my-plugin");
|
||||
|
||||
expect(manager.hasPlugin("my-plugin")).toBe(false);
|
||||
});
|
||||
|
||||
it("removes plugin from manager", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.install("my-plugin");
|
||||
|
||||
await manager.uninstall("my-plugin");
|
||||
|
||||
expect(manager.hasPlugin("my-plugin")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPlugin", () => {
|
||||
it("returns undefined for non-existent plugin", () => {
|
||||
expect(manager.getPlugin("non-existent")).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns the resolved plugin", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin", version: "2.0.0" }));
|
||||
|
||||
const plugin = manager.getPlugin("my-plugin");
|
||||
|
||||
expect(plugin).toBeDefined();
|
||||
expect(plugin!.id).toBe("my-plugin");
|
||||
expect(plugin!.version).toBe("2.0.0");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPluginState", () => {
|
||||
it("returns undefined for non-existent plugin", () => {
|
||||
expect(manager.getPluginState("non-existent")).toBeUndefined();
|
||||
});
|
||||
|
||||
it("returns current state", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
expect(manager.getPluginState("my-plugin")).toBe("registered");
|
||||
|
||||
await manager.install("my-plugin");
|
||||
expect(manager.getPluginState("my-plugin")).toBe("installed");
|
||||
|
||||
await manager.activate("my-plugin");
|
||||
expect(manager.getPluginState("my-plugin")).toBe("active");
|
||||
|
||||
await manager.deactivate("my-plugin");
|
||||
expect(manager.getPluginState("my-plugin")).toBe("inactive");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllPlugins", () => {
|
||||
it("returns empty array initially", () => {
|
||||
expect(manager.getAllPlugins()).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns all plugins with state", async () => {
|
||||
manager.register(createTestDefinition({ id: "plugin-a" }));
|
||||
manager.register(createTestDefinition({ id: "plugin-b" }));
|
||||
await manager.activate("plugin-b");
|
||||
|
||||
const all = manager.getAllPlugins();
|
||||
|
||||
expect(all).toHaveLength(2);
|
||||
|
||||
const pluginA = all.find((p) => p.plugin.id === "plugin-a");
|
||||
const pluginB = all.find((p) => p.plugin.id === "plugin-b");
|
||||
|
||||
expect(pluginA!.state).toBe("registered");
|
||||
expect(pluginB!.state).toBe("active");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getActivePlugins", () => {
|
||||
it("returns empty array when no active plugins", () => {
|
||||
manager.register(createTestDefinition({ id: "plugin-a" }));
|
||||
|
||||
expect(manager.getActivePlugins()).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns only active plugins", async () => {
|
||||
manager.register(createTestDefinition({ id: "plugin-a" }));
|
||||
manager.register(createTestDefinition({ id: "plugin-b" }));
|
||||
manager.register(createTestDefinition({ id: "plugin-c" }));
|
||||
|
||||
await manager.activate("plugin-a");
|
||||
await manager.activate("plugin-c");
|
||||
|
||||
const active = manager.getActivePlugins();
|
||||
|
||||
expect(active).toHaveLength(2);
|
||||
expect(active.map((p) => p.id).toSorted()).toEqual(["plugin-a", "plugin-c"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasPlugin", () => {
|
||||
it("returns false for non-existent plugin", () => {
|
||||
expect(manager.hasPlugin("non-existent")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns true for registered plugin", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
expect(manager.hasPlugin("my-plugin")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isActive", () => {
|
||||
it("returns false for non-existent plugin", () => {
|
||||
expect(manager.isActive("non-existent")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for registered but not active plugin", () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
expect(manager.isActive("my-plugin")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns true for active plugin", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
expect(manager.isActive("my-plugin")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false after deactivation", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
await manager.deactivate("my-plugin");
|
||||
|
||||
expect(manager.isActive("my-plugin")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPluginRoutes", () => {
|
||||
it("returns routes for active plugin", async () => {
|
||||
manager.register(
|
||||
createTestDefinition({
|
||||
id: "my-plugin",
|
||||
routes: {
|
||||
sync: { handler: vi.fn() },
|
||||
import: { handler: vi.fn() },
|
||||
},
|
||||
}),
|
||||
);
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
const routes = manager.getPluginRoutes("my-plugin");
|
||||
|
||||
expect(routes).toContain("sync");
|
||||
expect(routes).toContain("import");
|
||||
});
|
||||
});
|
||||
|
||||
describe("reinitialize", () => {
|
||||
it("can be called to force reinitialization", async () => {
|
||||
manager.register(createTestDefinition({ id: "my-plugin" }));
|
||||
await manager.activate("my-plugin");
|
||||
|
||||
// Should not throw
|
||||
manager.reinitialize();
|
||||
|
||||
expect(manager.isActive("my-plugin")).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("createPluginManager helper", () => {
|
||||
let db: Kysely<DbSchema>;
|
||||
let sqliteDb: Database.Database;
|
||||
|
||||
beforeEach(async () => {
|
||||
sqliteDb = new Database(":memory:");
|
||||
db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqliteDb }),
|
||||
});
|
||||
await runMigrations(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqliteDb.close();
|
||||
});
|
||||
|
||||
it("creates a PluginManager instance", () => {
|
||||
const manager = createPluginManager({ db });
|
||||
expect(manager).toBeInstanceOf(PluginManager);
|
||||
});
|
||||
});
|
||||
361
packages/core/tests/unit/plugins/manifest-schema.test.ts
Normal file
361
packages/core/tests/unit/plugins/manifest-schema.test.ts
Normal file
@@ -0,0 +1,361 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
pluginManifestSchema,
|
||||
normalizeManifestRoute,
|
||||
} from "../../../src/plugins/manifest-schema.js";
|
||||
|
||||
/** Minimal valid manifest for testing — only storage fields vary */
|
||||
function makeManifest(storage: Record<string, { indexes: Array<string | string[]> }>) {
|
||||
return {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage,
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
};
|
||||
}
|
||||
|
||||
describe("pluginManifestSchema — route entries", () => {
|
||||
it("should accept plain string routes", () => {
|
||||
const result = pluginManifestSchema.safeParse(makeManifest({}));
|
||||
// Baseline with empty routes is valid
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const withRoutes = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: ["webhook", "callback"],
|
||||
});
|
||||
expect(withRoutes.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept structured route objects", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: [{ name: "webhook", public: true }],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept a mix of strings and objects", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: ["callback", { name: "webhook", public: true }],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject route objects with empty name", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: [{ name: "", public: true }],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject route objects with missing name", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: [{ public: true }],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should accept route objects without public (defaults to private)", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: [{ name: "internal" }],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept route names with slashes and hyphens", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: ["auth/callback", "web-hook", { name: "api/v2/data", public: true }],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject route names with path traversal", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: ["../../admin/settings"],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject route names starting with special characters", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: ["/leading-slash"],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject route object names with path traversal", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
routes: [{ name: "../escape", public: true }],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeManifestRoute", () => {
|
||||
it("should convert a plain string to { name } object", () => {
|
||||
expect(normalizeManifestRoute("webhook")).toEqual({ name: "webhook" });
|
||||
});
|
||||
|
||||
it("should pass through a structured object unchanged", () => {
|
||||
expect(normalizeManifestRoute({ name: "webhook", public: true })).toEqual({
|
||||
name: "webhook",
|
||||
public: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("should pass through an object without public", () => {
|
||||
expect(normalizeManifestRoute({ name: "internal" })).toEqual({ name: "internal" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("pluginManifestSchema — storage index field names", () => {
|
||||
it("should accept valid simple index field names", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: ["status", "createdAt", "count"] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept valid composite index field names", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: [["status", "createdAt"]] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject index field names containing SQL injection payloads", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: ["'); DROP TABLE users--"] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject index field names with dots (JSON path traversal)", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: ["nested.field"] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject index field names with hyphens", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: ["my-field"] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject index field names starting with a number", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: ["1field"] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject empty index field names", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: [""] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it("should reject malicious field names in composite indexes", () => {
|
||||
const result = pluginManifestSchema.safeParse(
|
||||
makeManifest({
|
||||
items: { indexes: [["status", "'); DROP TABLE--"]] },
|
||||
}),
|
||||
);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
describe("pluginManifestSchema - admin.settingsSchema url/email field types", () => {
|
||||
it("should accept url setting field with label and description", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
website: {
|
||||
type: "url",
|
||||
label: "Website URL",
|
||||
description: "The plugin website",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept url setting field with default and placeholder", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
website: {
|
||||
type: "url",
|
||||
label: "Website URL",
|
||||
description: "The plugin website",
|
||||
default: "https://example.com",
|
||||
placeholder: "https://your-site.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
const parsed = result.data;
|
||||
expect(parsed.admin?.settingsSchema?.website).toEqual({
|
||||
type: "url",
|
||||
label: "Website URL",
|
||||
description: "The plugin website",
|
||||
default: "https://example.com",
|
||||
placeholder: "https://your-site.com",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should accept email setting field with label and description", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
supportEmail: {
|
||||
type: "email",
|
||||
label: "Support Email",
|
||||
description: "Email for support",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept email setting field with default and placeholder", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
supportEmail: {
|
||||
type: "email",
|
||||
label: "Support Email",
|
||||
description: "Email for support",
|
||||
default: "support@example.com",
|
||||
placeholder: "your@email.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
const parsed = result.data;
|
||||
expect(parsed.admin?.settingsSchema?.supportEmail).toEqual({
|
||||
type: "email",
|
||||
label: "Support Email",
|
||||
description: "Email for support",
|
||||
default: "support@example.com",
|
||||
placeholder: "your@email.com",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it("should accept both url and email in the same settingsSchema", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
website: {
|
||||
type: "url",
|
||||
label: "Website",
|
||||
default: "https://example.com",
|
||||
placeholder: "https://",
|
||||
},
|
||||
contactEmail: {
|
||||
type: "email",
|
||||
label: "Contact Email",
|
||||
default: "contact@example.com",
|
||||
placeholder: "email@domain.com",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
const parsed = result.data;
|
||||
expect(parsed.admin?.settingsSchema?.website.type).toBe("url");
|
||||
expect(parsed.admin?.settingsSchema?.contactEmail.type).toBe("email");
|
||||
expect(parsed.admin?.settingsSchema?.website.default).toBe("https://example.com");
|
||||
expect(parsed.admin?.settingsSchema?.contactEmail.default).toBe("contact@example.com");
|
||||
}
|
||||
});
|
||||
|
||||
it("should accept url field without optional fields", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
docs: {
|
||||
type: "url",
|
||||
label: "Documentation",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept email field without optional fields", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
notifications: {
|
||||
type: "email",
|
||||
label: "Notification Email",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept number field without optional fields", () => {
|
||||
const result = pluginManifestSchema.safeParse({
|
||||
...makeManifest({}),
|
||||
admin: {
|
||||
settingsSchema: {
|
||||
port: {
|
||||
type: "number",
|
||||
label: "Server Port",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
528
packages/core/tests/unit/plugins/marketplace-client.test.ts
Normal file
528
packages/core/tests/unit/plugins/marketplace-client.test.ts
Normal file
@@ -0,0 +1,528 @@
|
||||
/**
|
||||
* MarketplaceClient + tar parser tests
|
||||
*
|
||||
* Tests:
|
||||
* - createMarketplaceClient factory
|
||||
* - MarketplaceClient.search/getPlugin/getVersions
|
||||
* - Bundle download and extraction (tar + gzip)
|
||||
* - Error handling (unavailable, HTTP errors)
|
||||
* - reportInstall (fire-and-forget)
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import {
|
||||
createMarketplaceClient,
|
||||
MarketplaceError,
|
||||
MarketplaceUnavailableError,
|
||||
type MarketplaceClient,
|
||||
type MarketplacePluginDetail,
|
||||
type MarketplaceSearchResult,
|
||||
} from "../../../src/plugins/marketplace.js";
|
||||
|
||||
const HEX_64_PATTERN = /^[a-f0-9]{64}$/;
|
||||
const HEX_16_PATTERN = /^[a-f0-9]{16}$/;
|
||||
|
||||
// ── Helpers ───────────<E29480><E29480><EFBFBD>────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Create a minimal tar archive from a map of filename → content.
|
||||
* Returns an uncompressed tar buffer.
|
||||
*/
|
||||
function createTar(files: Record<string, string>): Uint8Array {
|
||||
const blocks: Uint8Array[] = [];
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
for (const [name, content] of Object.entries(files)) {
|
||||
const contentBytes = encoder.encode(content);
|
||||
const size = contentBytes.length;
|
||||
|
||||
// Create 512-byte header
|
||||
const header = new Uint8Array(512);
|
||||
// Name (bytes 0-99)
|
||||
const nameBytes = encoder.encode(name);
|
||||
header.set(nameBytes.subarray(0, 100), 0);
|
||||
|
||||
// File mode (bytes 100-107): "0000644\0"
|
||||
header.set(encoder.encode("0000644\0"), 100);
|
||||
|
||||
// UID (bytes 108-115): "0000000\0"
|
||||
header.set(encoder.encode("0000000\0"), 108);
|
||||
|
||||
// GID (bytes 116-123): "0000000\0"
|
||||
header.set(encoder.encode("0000000\0"), 116);
|
||||
|
||||
// Size in octal (bytes 124-135)
|
||||
const sizeOctal = size.toString(8).padStart(11, "0") + "\0";
|
||||
header.set(encoder.encode(sizeOctal), 124);
|
||||
|
||||
// Mtime (bytes 136-147): "00000000000\0"
|
||||
header.set(encoder.encode("00000000000\0"), 136);
|
||||
|
||||
// Type flag (byte 156): '0' for regular file
|
||||
header[156] = 0x30;
|
||||
|
||||
// Checksum (bytes 148-155): compute after setting spaces
|
||||
// Initially fill with spaces
|
||||
header.set(encoder.encode(" "), 148);
|
||||
|
||||
// Compute checksum (sum of all unsigned bytes in header)
|
||||
let checksum = 0;
|
||||
for (let i = 0; i < 512; i++) {
|
||||
checksum += header[i]!;
|
||||
}
|
||||
const checksumOctal = checksum.toString(8).padStart(6, "0") + "\0 ";
|
||||
header.set(encoder.encode(checksumOctal), 148);
|
||||
|
||||
blocks.push(header);
|
||||
|
||||
// File data (padded to 512-byte boundary)
|
||||
const paddedSize = Math.ceil(size / 512) * 512;
|
||||
const dataBlock = new Uint8Array(paddedSize);
|
||||
dataBlock.set(contentBytes, 0);
|
||||
blocks.push(dataBlock);
|
||||
}
|
||||
|
||||
// Two 512-byte zero blocks = end of archive
|
||||
blocks.push(new Uint8Array(1024));
|
||||
|
||||
// Concatenate all blocks
|
||||
const totalSize = blocks.reduce((sum, b) => sum + b.length, 0);
|
||||
const tar = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const block of blocks) {
|
||||
tar.set(block, offset);
|
||||
offset += block.length;
|
||||
}
|
||||
|
||||
return tar;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gzip compress data using CompressionStream
|
||||
*/
|
||||
async function gzip(data: Uint8Array): Promise<Uint8Array> {
|
||||
const cs = new CompressionStream("gzip");
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
const writePromise = writer.write(data).then(() => writer.close());
|
||||
const chunks: Uint8Array[] = [];
|
||||
let totalLength = 0;
|
||||
|
||||
for (;;) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
totalLength += value.length;
|
||||
}
|
||||
await writePromise;
|
||||
|
||||
const result = new Uint8Array(totalLength);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const BASE_URL = "https://marketplace.example.com";
|
||||
|
||||
function mockPlugin(): MarketplacePluginDetail {
|
||||
return {
|
||||
id: "test-seo",
|
||||
name: "Test SEO",
|
||||
description: "SEO plugin",
|
||||
author: { name: "Test Author", verified: true, avatarUrl: null },
|
||||
capabilities: ["hooks"],
|
||||
keywords: ["seo"],
|
||||
installCount: 42,
|
||||
hasIcon: false,
|
||||
iconUrl: `${BASE_URL}/api/v1/plugins/test-seo/icon`,
|
||||
createdAt: "2026-01-01T00:00:00Z",
|
||||
updatedAt: "2026-02-01T00:00:00Z",
|
||||
repositoryUrl: "https://github.com/test/test-seo",
|
||||
homepageUrl: null,
|
||||
license: "MIT",
|
||||
latestVersion: {
|
||||
version: "1.0.0",
|
||||
minEmDashVersion: null,
|
||||
bundleSize: 1234,
|
||||
checksum: "abc123",
|
||||
changelog: "Initial release",
|
||||
readme: "# Test SEO",
|
||||
hasIcon: false,
|
||||
screenshotCount: 0,
|
||||
screenshotUrls: [],
|
||||
capabilities: ["hooks"],
|
||||
auditVerdict: "pass",
|
||||
imageAuditVerdict: "pass",
|
||||
publishedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe("MarketplaceClient", () => {
|
||||
let client: MarketplaceClient;
|
||||
let fetchSpy: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
client = createMarketplaceClient(BASE_URL);
|
||||
fetchSpy = vi.fn();
|
||||
vi.stubGlobal("fetch", fetchSpy);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe("search", () => {
|
||||
it("fetches plugins from marketplace", async () => {
|
||||
const searchResult: MarketplaceSearchResult = {
|
||||
items: [
|
||||
{
|
||||
id: "test-seo",
|
||||
name: "Test SEO",
|
||||
description: "SEO plugin",
|
||||
author: { name: "Test", verified: true, avatarUrl: null },
|
||||
capabilities: ["hooks"],
|
||||
keywords: ["seo"],
|
||||
installCount: 10,
|
||||
hasIcon: false,
|
||||
iconUrl: `${BASE_URL}/api/v1/plugins/test-seo/icon`,
|
||||
createdAt: "2026-01-01T00:00:00Z",
|
||||
updatedAt: "2026-02-01T00:00:00Z",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify(searchResult), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await client.search("seo");
|
||||
expect(result.items).toHaveLength(1);
|
||||
expect(result.items[0]!.id).toBe("test-seo");
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
`${BASE_URL}/api/v1/plugins?q=seo`,
|
||||
expect.objectContaining({ headers: { Accept: "application/json" } }),
|
||||
);
|
||||
});
|
||||
|
||||
it("passes category and limit as query params", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify({ items: [] }), { status: 200 }));
|
||||
|
||||
await client.search(undefined, { category: "analytics", limit: 10 });
|
||||
|
||||
const [url] = fetchSpy.mock.calls[0]!;
|
||||
expect(url).toContain("category=analytics");
|
||||
expect(url).toContain("limit=10");
|
||||
});
|
||||
|
||||
it("throws MarketplaceUnavailableError on network failure", async () => {
|
||||
fetchSpy.mockRejectedValueOnce(new Error("Network error"));
|
||||
|
||||
await expect(client.search("test")).rejects.toThrow(MarketplaceUnavailableError);
|
||||
});
|
||||
|
||||
it("throws MarketplaceError on HTTP error", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(JSON.stringify({ error: "Rate limited" }), { status: 429 }),
|
||||
);
|
||||
|
||||
await expect(client.search("test")).rejects.toThrow(MarketplaceError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPlugin", () => {
|
||||
it("fetches plugin detail", async () => {
|
||||
const plugin = mockPlugin();
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(plugin), { status: 200 }));
|
||||
|
||||
const result = await client.getPlugin("test-seo");
|
||||
expect(result.id).toBe("test-seo");
|
||||
expect(result.latestVersion?.version).toBe("1.0.0");
|
||||
});
|
||||
|
||||
it("encodes plugin ID in URL", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify(mockPlugin()), { status: 200 }));
|
||||
|
||||
await client.getPlugin("@scope/plugin");
|
||||
|
||||
const [url] = fetchSpy.mock.calls[0]!;
|
||||
expect(url).toContain("%40scope%2Fplugin");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getVersions", () => {
|
||||
it("fetches version list", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(
|
||||
JSON.stringify({
|
||||
items: [
|
||||
{
|
||||
version: "1.0.0",
|
||||
minEmDashVersion: null,
|
||||
bundleSize: 1234,
|
||||
checksum: "abc",
|
||||
changelog: "First",
|
||||
capabilities: ["hooks"],
|
||||
auditVerdict: "pass",
|
||||
imageAuditVerdict: "pass",
|
||||
publishedAt: "2026-01-01T00:00:00Z",
|
||||
},
|
||||
],
|
||||
}),
|
||||
{ status: 200 },
|
||||
),
|
||||
);
|
||||
|
||||
const versions = await client.getVersions("test-seo");
|
||||
expect(versions).toHaveLength(1);
|
||||
expect(versions[0]!.version).toBe("1.0.0");
|
||||
});
|
||||
});
|
||||
|
||||
describe("downloadBundle", () => {
|
||||
it("downloads, decompresses, and extracts a bundle tarball", async () => {
|
||||
const manifest = {
|
||||
id: "test-seo",
|
||||
version: "1.0.0",
|
||||
capabilities: ["content:read"],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
};
|
||||
|
||||
const tarData = createTar({
|
||||
"manifest.json": JSON.stringify(manifest),
|
||||
"backend.js": 'export default function() { return "hello"; }',
|
||||
});
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(
|
||||
new Response(gzipped, {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/gzip" },
|
||||
}),
|
||||
);
|
||||
|
||||
const bundle = await client.downloadBundle("test-seo", "1.0.0");
|
||||
|
||||
expect(bundle.manifest.id).toBe("test-seo");
|
||||
expect(bundle.manifest.version).toBe("1.0.0");
|
||||
expect(bundle.backendCode).toContain("hello");
|
||||
expect(bundle.checksum).toMatch(HEX_64_PATTERN);
|
||||
});
|
||||
|
||||
it("extracts optional admin.js", async () => {
|
||||
const manifest = {
|
||||
id: "test-seo",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
};
|
||||
|
||||
const tarData = createTar({
|
||||
"manifest.json": JSON.stringify(manifest),
|
||||
"backend.js": "export default {};",
|
||||
"admin.js": "export const Admin = {};",
|
||||
});
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
|
||||
const bundle = await client.downloadBundle("test-seo", "1.0.0");
|
||||
expect(bundle.adminCode).toContain("Admin");
|
||||
});
|
||||
|
||||
it("throws on missing manifest.json", async () => {
|
||||
const tarData = createTar({
|
||||
"backend.js": "export default {};",
|
||||
});
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
|
||||
await expect(client.downloadBundle("test-seo", "1.0.0")).rejects.toThrow(
|
||||
"missing manifest.json",
|
||||
);
|
||||
});
|
||||
|
||||
it("throws on missing backend.js", async () => {
|
||||
const tarData = createTar({
|
||||
"manifest.json": JSON.stringify({
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
}),
|
||||
});
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
|
||||
await expect(client.downloadBundle("test-seo", "1.0.0")).rejects.toThrow(
|
||||
"missing backend.js",
|
||||
);
|
||||
});
|
||||
|
||||
it("throws on malformed manifest.json", async () => {
|
||||
const tarData = createTar({
|
||||
"manifest.json": "not-json{{{",
|
||||
"backend.js": "export default {};",
|
||||
});
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
fetchSpy.mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
|
||||
await expect(client.downloadBundle("test-seo", "1.0.0")).rejects.toThrow(
|
||||
"malformed manifest.json",
|
||||
);
|
||||
});
|
||||
|
||||
it("throws MarketplaceUnavailableError on network failure", async () => {
|
||||
fetchSpy.mockRejectedValueOnce(new Error("Connection refused"));
|
||||
|
||||
await expect(client.downloadBundle("test-seo", "1.0.0")).rejects.toThrow(
|
||||
MarketplaceUnavailableError,
|
||||
);
|
||||
});
|
||||
|
||||
it("throws on HTTP error from bundle download", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(new Response("Not Found", { status: 404 }));
|
||||
|
||||
await expect(client.downloadBundle("test-seo", "1.0.0")).rejects.toThrow(MarketplaceError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("reportInstall", () => {
|
||||
it("sends install stat without throwing", async () => {
|
||||
fetchSpy.mockResolvedValueOnce(new Response("OK", { status: 200 }));
|
||||
|
||||
// Should not throw even if we await it
|
||||
await client.reportInstall("test-seo", "1.0.0");
|
||||
|
||||
expect(fetchSpy).toHaveBeenCalledWith(
|
||||
`${BASE_URL}/api/v1/plugins/test-seo/installs`,
|
||||
expect.objectContaining({
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("does not throw on network failure", async () => {
|
||||
fetchSpy.mockRejectedValueOnce(new Error("Network error"));
|
||||
|
||||
// Should not throw
|
||||
await client.reportInstall("test-seo", "1.0.0");
|
||||
});
|
||||
|
||||
it("sends a stable site hash across multiple calls", async () => {
|
||||
const clientWithOrigin = createMarketplaceClient(BASE_URL, "https://myblog.example.com");
|
||||
|
||||
fetchSpy.mockResolvedValue(new Response("OK", { status: 200 }));
|
||||
|
||||
await clientWithOrigin.reportInstall("test-seo", "1.0.0");
|
||||
await clientWithOrigin.reportInstall("test-seo", "1.0.0");
|
||||
|
||||
const calls = fetchSpy.mock.calls;
|
||||
expect(calls.length).toBe(2);
|
||||
|
||||
const body1 = JSON.parse(calls[0]![1]!.body as string);
|
||||
const body2 = JSON.parse(calls[1]![1]!.body as string);
|
||||
|
||||
// Same origin produces the same hash every time
|
||||
expect(body1.siteHash).toBe(body2.siteHash);
|
||||
expect(body1.siteHash).toMatch(HEX_16_PATTERN);
|
||||
});
|
||||
|
||||
it("produces different hashes for different site origins", async () => {
|
||||
const client1 = createMarketplaceClient(BASE_URL, "https://site-a.example.com");
|
||||
const client2 = createMarketplaceClient(BASE_URL, "https://site-b.example.com");
|
||||
|
||||
fetchSpy.mockResolvedValue(new Response("OK", { status: 200 }));
|
||||
|
||||
await client1.reportInstall("test-seo", "1.0.0");
|
||||
await client2.reportInstall("test-seo", "1.0.0");
|
||||
|
||||
const body1 = JSON.parse(fetchSpy.mock.calls[0]![1]!.body as string);
|
||||
const body2 = JSON.parse(fetchSpy.mock.calls[1]![1]!.body as string);
|
||||
|
||||
expect(body1.siteHash).not.toBe(body2.siteHash);
|
||||
});
|
||||
});
|
||||
|
||||
describe("trailing slash handling", () => {
|
||||
it("strips trailing slashes from base URL", async () => {
|
||||
const clientWithSlash = createMarketplaceClient("https://example.com/");
|
||||
fetchSpy.mockResolvedValueOnce(new Response(JSON.stringify({ items: [] }), { status: 200 }));
|
||||
|
||||
await clientWithSlash.search("test");
|
||||
|
||||
const [url] = fetchSpy.mock.calls[0]!;
|
||||
expect(url).toContain("https://example.com/api/v1/plugins");
|
||||
expect(url).not.toContain("//api");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("tar parser", () => {
|
||||
it("handles files with ./ prefix in paths", async () => {
|
||||
// Create tar with ./ prefixed paths (common from tar tools)
|
||||
const manifest = {
|
||||
id: "test",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [],
|
||||
routes: [],
|
||||
admin: {},
|
||||
};
|
||||
const files: Record<string, string> = {};
|
||||
files["./manifest.json"] = JSON.stringify(manifest);
|
||||
files["./backend.js"] = "export default {};";
|
||||
|
||||
const tarData = createTar(files);
|
||||
const gzipped = await gzip(tarData);
|
||||
|
||||
const fetchSpy = vi.fn().mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
vi.stubGlobal("fetch", fetchSpy);
|
||||
|
||||
const client = createMarketplaceClient("https://example.com");
|
||||
const bundle = await client.downloadBundle("test", "1.0.0");
|
||||
|
||||
expect(bundle.manifest.id).toBe("test");
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("handles empty tar archive gracefully", async () => {
|
||||
// Just two zero blocks (empty archive)
|
||||
const emptyTar = new Uint8Array(1024);
|
||||
const gzipped = await gzip(emptyTar);
|
||||
|
||||
const fetchSpy = vi.fn().mockResolvedValueOnce(new Response(gzipped, { status: 200 }));
|
||||
vi.stubGlobal("fetch", fetchSpy);
|
||||
|
||||
const client = createMarketplaceClient("https://example.com");
|
||||
await expect(client.downloadBundle("test", "1.0.0")).rejects.toThrow("missing manifest.json");
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
108
packages/core/tests/unit/plugins/marketplace-state.test.ts
Normal file
108
packages/core/tests/unit/plugins/marketplace-state.test.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Marketplace plugin state tests
|
||||
*
|
||||
* Tests the PluginStateRepository marketplace extensions:
|
||||
* - source/marketplaceVersion fields in upsert
|
||||
* - getMarketplacePlugins filter
|
||||
* - Migration 022 columns
|
||||
*/
|
||||
|
||||
import BetterSqlite3 from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { runMigrations } from "../../../src/database/migrations/runner.js";
|
||||
import type { Database as DbSchema } from "../../../src/database/types.js";
|
||||
import { PluginStateRepository } from "../../../src/plugins/state.js";
|
||||
|
||||
describe("PluginStateRepository – marketplace extensions", () => {
|
||||
let db: Kysely<DbSchema>;
|
||||
let sqliteDb: BetterSqlite3.Database;
|
||||
let repo: PluginStateRepository;
|
||||
|
||||
beforeEach(async () => {
|
||||
sqliteDb = new BetterSqlite3(":memory:");
|
||||
db = new Kysely<DbSchema>({
|
||||
dialect: new SqliteDialect({ database: sqliteDb }),
|
||||
});
|
||||
await runMigrations(db);
|
||||
repo = new PluginStateRepository(db);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqliteDb.close();
|
||||
});
|
||||
|
||||
describe("upsert with marketplace source", () => {
|
||||
it("defaults source to 'config' when not specified", async () => {
|
||||
const state = await repo.upsert("test-plugin", "1.0.0", "active");
|
||||
expect(state.source).toBe("config");
|
||||
expect(state.marketplaceVersion).toBeNull();
|
||||
});
|
||||
|
||||
it("stores source='marketplace' and marketplaceVersion", async () => {
|
||||
const state = await repo.upsert("mp-plugin", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
expect(state.source).toBe("marketplace");
|
||||
expect(state.marketplaceVersion).toBe("1.0.0");
|
||||
});
|
||||
|
||||
it("updates marketplaceVersion on subsequent upsert", async () => {
|
||||
await repo.upsert("mp-plugin", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
const updated = await repo.upsert("mp-plugin", "2.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "2.0.0",
|
||||
});
|
||||
|
||||
expect(updated.version).toBe("2.0.0");
|
||||
expect(updated.marketplaceVersion).toBe("2.0.0");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMarketplacePlugins", () => {
|
||||
it("returns empty array when no marketplace plugins", async () => {
|
||||
await repo.upsert("config-plugin", "1.0.0", "active");
|
||||
const result = await repo.getMarketplacePlugins();
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("returns only marketplace-sourced plugins", async () => {
|
||||
await repo.upsert("config-plugin", "1.0.0", "active");
|
||||
await repo.upsert("mp-plugin-a", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
await repo.upsert("mp-plugin-b", "2.0.0", "inactive", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "2.0.0",
|
||||
});
|
||||
|
||||
const result = await repo.getMarketplacePlugins();
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result.map((p) => p.pluginId).toSorted()).toEqual(["mp-plugin-a", "mp-plugin-b"]);
|
||||
expect(result.every((p) => p.source === "marketplace")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("delete marketplace plugin", () => {
|
||||
it("deletes marketplace plugin state", async () => {
|
||||
await repo.upsert("mp-plugin", "1.0.0", "active", {
|
||||
source: "marketplace",
|
||||
marketplaceVersion: "1.0.0",
|
||||
});
|
||||
|
||||
const deleted = await repo.delete("mp-plugin");
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
const state = await repo.get("mp-plugin");
|
||||
expect(state).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
156
packages/core/tests/unit/plugins/page-context.test.ts
Normal file
156
packages/core/tests/unit/plugins/page-context.test.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
/**
|
||||
* Page Context Tests
|
||||
*
|
||||
* Tests the public page context builder for:
|
||||
* - Astro-like input handling
|
||||
* - URL string and object input
|
||||
* - Default pageType resolution
|
||||
* - Null normalization for optional fields
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { createPublicPageContext } from "../../../src/page/context.js";
|
||||
|
||||
describe("createPublicPageContext", () => {
|
||||
it("accepts Astro-like input and extracts url/path/locale", () => {
|
||||
const result = createPublicPageContext({
|
||||
Astro: {
|
||||
url: new URL("https://example.com/blog/hello"),
|
||||
currentLocale: "en",
|
||||
},
|
||||
kind: "content",
|
||||
title: "Hello",
|
||||
pageTitle: "Hello",
|
||||
});
|
||||
|
||||
expect(result.url).toBe("https://example.com/blog/hello");
|
||||
expect(result.path).toBe("/blog/hello");
|
||||
expect(result.locale).toBe("en");
|
||||
expect(result.title).toBe("Hello");
|
||||
expect(result.pageTitle).toBe("Hello");
|
||||
});
|
||||
|
||||
it("accepts URL string input", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/about",
|
||||
kind: "custom",
|
||||
locale: "fr",
|
||||
});
|
||||
|
||||
expect(result.url).toBe("https://example.com/about");
|
||||
expect(result.path).toBe("/about");
|
||||
expect(result.locale).toBe("fr");
|
||||
});
|
||||
|
||||
it("accepts URL object input", () => {
|
||||
const urlObj = new URL("https://example.com/products?page=2");
|
||||
|
||||
const result = createPublicPageContext({
|
||||
url: urlObj,
|
||||
kind: "custom",
|
||||
});
|
||||
|
||||
expect(result.url).toBe("https://example.com/products?page=2");
|
||||
expect(result.path).toBe("/products");
|
||||
});
|
||||
|
||||
it('defaults pageType to "article" for content kind', () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/post/1",
|
||||
kind: "content",
|
||||
});
|
||||
|
||||
expect(result.pageType).toBe("article");
|
||||
});
|
||||
|
||||
it('defaults pageType to "website" for custom kind', () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/",
|
||||
kind: "custom",
|
||||
});
|
||||
|
||||
expect(result.pageType).toBe("website");
|
||||
});
|
||||
|
||||
it("normalizes undefined locale to null", () => {
|
||||
const result = createPublicPageContext({
|
||||
Astro: {
|
||||
url: new URL("https://example.com/"),
|
||||
// currentLocale not set
|
||||
},
|
||||
kind: "custom",
|
||||
});
|
||||
|
||||
expect(result.locale).toBeNull();
|
||||
});
|
||||
|
||||
it("normalizes undefined pageTitle to null", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/about",
|
||||
kind: "custom",
|
||||
title: "About | My Site",
|
||||
});
|
||||
|
||||
expect(result.pageTitle).toBeNull();
|
||||
});
|
||||
|
||||
it("normalizes content slug undefined to null", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/post/1",
|
||||
kind: "content",
|
||||
content: { collection: "posts", id: "abc123" },
|
||||
});
|
||||
|
||||
expect(result.content).toBeDefined();
|
||||
expect(result.content!.slug).toBeNull();
|
||||
expect(result.content!.collection).toBe("posts");
|
||||
expect(result.content!.id).toBe("abc123");
|
||||
});
|
||||
|
||||
it("sets content to undefined for custom kind", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/about",
|
||||
kind: "custom",
|
||||
});
|
||||
|
||||
expect(result.content).toBeUndefined();
|
||||
});
|
||||
|
||||
it("passes breadcrumbs through verbatim when provided", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/blog/hello",
|
||||
kind: "content",
|
||||
breadcrumbs: [
|
||||
{ name: "Home", url: "/" },
|
||||
{ name: "Blog", url: "/blog/" },
|
||||
{ name: "Hello", url: "/blog/hello" },
|
||||
],
|
||||
});
|
||||
|
||||
expect(result.breadcrumbs).toEqual([
|
||||
{ name: "Home", url: "/" },
|
||||
{ name: "Blog", url: "/blog/" },
|
||||
{ name: "Hello", url: "/blog/hello" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("leaves breadcrumbs undefined when not provided", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/about",
|
||||
kind: "custom",
|
||||
});
|
||||
|
||||
expect(result.breadcrumbs).toBeUndefined();
|
||||
});
|
||||
|
||||
it("preserves explicit empty breadcrumbs array (opt-out signal)", () => {
|
||||
const result = createPublicPageContext({
|
||||
url: "https://example.com/",
|
||||
kind: "custom",
|
||||
breadcrumbs: [],
|
||||
});
|
||||
|
||||
expect(result.breadcrumbs).toEqual([]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,71 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
/**
|
||||
* Tests for the sandbox boundary enforcement of page contribution hooks.
|
||||
*
|
||||
* page:metadata is sandbox-safe.
|
||||
* page:fragments is trusted-only but valid in manifests (enforcement happens
|
||||
* at runtime via capability checks and at bundle time via CLI warnings).
|
||||
*
|
||||
* The enforcement happens at multiple layers:
|
||||
* 1. Manifest schema: HOOK_NAMES includes both page:metadata and page:fragments
|
||||
* 2. Capability enforcement: page:fragments requires page:inject capability
|
||||
* 3. Bundle CLI: warns when page:fragments is declared in a sandbox-targeted plugin
|
||||
* 4. Fragment collector: never invokes sandboxed plugins for page:fragments
|
||||
*/
|
||||
|
||||
describe("page contribution sandbox boundary", () => {
|
||||
describe("manifest schema validation", () => {
|
||||
it("should accept page:metadata in manifests", async () => {
|
||||
const { pluginManifestSchema } = await import("../../../src/plugins/manifest-schema.js");
|
||||
|
||||
const manifest = {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [{ name: "page:metadata" }],
|
||||
routes: [],
|
||||
admin: { pages: [], widgets: [] },
|
||||
};
|
||||
|
||||
const result = pluginManifestSchema.safeParse(manifest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it("should accept page:fragments in manifests (enforcement is at runtime)", async () => {
|
||||
const { pluginManifestSchema } = await import("../../../src/plugins/manifest-schema.js");
|
||||
|
||||
const manifest = {
|
||||
id: "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
hooks: [{ name: "page:fragments" }],
|
||||
routes: [],
|
||||
admin: { pages: [], widgets: [] },
|
||||
};
|
||||
|
||||
// Manifest validation accepts page:fragments — trusted-only enforcement
|
||||
// happens via capability checks (requires page:inject) and the bundle CLI
|
||||
// warns when this hook is used in a sandbox-targeted plugin.
|
||||
const result = pluginManifestSchema.safeParse(manifest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("fragment collector defense-in-depth", () => {
|
||||
it("resolveFragments only processes contributions it receives", async () => {
|
||||
// The fragment collector in page/fragments.ts is a pure function that
|
||||
// processes whatever contributions are passed to it. The defense-in-depth
|
||||
// is that the runtime never passes sandboxed plugin contributions to it.
|
||||
// This test verifies the pure function works correctly.
|
||||
const { resolveFragments } = await import("../../../src/page/fragments.js");
|
||||
|
||||
const result = resolveFragments([], "head");
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
222
packages/core/tests/unit/plugins/page-fragments.test.ts
Normal file
222
packages/core/tests/unit/plugins/page-fragments.test.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
/**
|
||||
* Page Fragments Tests
|
||||
*
|
||||
* Tests the fragment collector for:
|
||||
* - Filtering contributions by placement
|
||||
* - Deduplication by key and src
|
||||
* - HTML rendering of script and raw HTML fragments
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import { resolveFragments, renderFragments } from "../../../src/page/fragments.js";
|
||||
import type { PageFragmentContribution } from "../../../src/plugins/types.js";
|
||||
|
||||
describe("resolveFragments", () => {
|
||||
it("filters by placement", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{ kind: "html", placement: "head", html: "<link>" },
|
||||
{ kind: "html", placement: "body:end", html: "<div>footer</div>" },
|
||||
{ kind: "html", placement: "head", html: "<style></style>" },
|
||||
];
|
||||
|
||||
const result = resolveFragments(contributions, "head");
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]!.kind).toBe("html");
|
||||
expect((result[0] as { html: string }).html).toBe("<link>");
|
||||
expect((result[1] as { html: string }).html).toBe("<style></style>");
|
||||
});
|
||||
|
||||
it("dedupes by key + placement", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{ kind: "html", placement: "head", html: "<link first>", key: "my-styles" },
|
||||
{ kind: "html", placement: "head", html: "<link second>", key: "my-styles" },
|
||||
];
|
||||
|
||||
const result = resolveFragments(contributions, "head");
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as { html: string }).html).toBe("<link first>");
|
||||
});
|
||||
|
||||
it("dedupes external scripts by src", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "body:end",
|
||||
src: "https://cdn.example.com/lib.js",
|
||||
async: true,
|
||||
},
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "body:end",
|
||||
src: "https://cdn.example.com/lib.js",
|
||||
defer: true,
|
||||
},
|
||||
];
|
||||
|
||||
const result = resolveFragments(contributions, "body:end");
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect((result[0] as { async?: boolean }).async).toBe(true);
|
||||
});
|
||||
|
||||
it("allows different placements of same key", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{ kind: "html", placement: "head", html: "<meta>", key: "seo" },
|
||||
{ kind: "html", placement: "body:end", html: "<noscript>", key: "seo" },
|
||||
];
|
||||
|
||||
const headResult = resolveFragments(contributions, "head");
|
||||
const bodyResult = resolveFragments(contributions, "body:end");
|
||||
|
||||
expect(headResult).toHaveLength(1);
|
||||
expect(bodyResult).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("preserves order", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{ kind: "html", placement: "head", html: "<first>" },
|
||||
{ kind: "html", placement: "head", html: "<second>" },
|
||||
{ kind: "html", placement: "head", html: "<third>" },
|
||||
];
|
||||
|
||||
const result = resolveFragments(contributions, "head");
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect((result[0] as { html: string }).html).toBe("<first>");
|
||||
expect((result[1] as { html: string }).html).toBe("<second>");
|
||||
expect((result[2] as { html: string }).html).toBe("<third>");
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderFragments", () => {
|
||||
it("renders external script with async/defer", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "head",
|
||||
src: "https://cdn.example.com/analytics.js",
|
||||
async: true,
|
||||
defer: true,
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "head");
|
||||
|
||||
expect(html).toBe('<script src="https://cdn.example.com/analytics.js" async defer></script>');
|
||||
});
|
||||
|
||||
it("renders external script with attributes", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "head",
|
||||
src: "https://cdn.example.com/widget.js",
|
||||
attributes: { "data-site-id": "abc123", crossorigin: "anonymous" },
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "head");
|
||||
|
||||
expect(html).toContain('src="https://cdn.example.com/widget.js"');
|
||||
expect(html).toContain('data-site-id="abc123"');
|
||||
expect(html).toContain('crossorigin="anonymous"');
|
||||
expect(html).toContain("</script>");
|
||||
});
|
||||
|
||||
it("renders inline script", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "inline-script",
|
||||
placement: "body:end",
|
||||
code: "console.log('hello');",
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "body:end");
|
||||
|
||||
expect(html).toBe("<script>console.log('hello');</script>");
|
||||
});
|
||||
|
||||
it("escapes </script> in inline script code", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "inline-script",
|
||||
placement: "head",
|
||||
code: 'var x = "</script><script>alert(1)</script>";',
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "head");
|
||||
|
||||
// The </ sequence should be escaped to <\/ to prevent tag breakout
|
||||
expect(html).not.toContain("</script><script>");
|
||||
expect(html).toContain("<\\/script>");
|
||||
});
|
||||
|
||||
it("renders raw HTML", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "html",
|
||||
placement: "body:start",
|
||||
html: '<div id="overlay"></div>',
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "body:start");
|
||||
|
||||
expect(html).toBe('<div id="overlay"></div>');
|
||||
});
|
||||
|
||||
it("escapes attribute names and values", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "head",
|
||||
src: "https://example.com/x.js",
|
||||
attributes: { 'data-"key': 'val<ue&"more' },
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "head");
|
||||
|
||||
expect(html).toContain("data-"key");
|
||||
expect(html).toContain("val<ue&"more");
|
||||
expect(html).not.toContain('data-"key');
|
||||
});
|
||||
|
||||
it("strips event handler attributes", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{
|
||||
kind: "external-script",
|
||||
placement: "head",
|
||||
src: "https://example.com/x.js",
|
||||
attributes: {
|
||||
onload: "alert(1)",
|
||||
onerror: "alert(2)",
|
||||
"data-id": "safe",
|
||||
crossorigin: "anonymous",
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "head");
|
||||
|
||||
expect(html).not.toContain("onload");
|
||||
expect(html).not.toContain("onerror");
|
||||
expect(html).toContain('data-id="safe"');
|
||||
expect(html).toContain('crossorigin="anonymous"');
|
||||
});
|
||||
|
||||
it("returns empty string for no matching placement", () => {
|
||||
const contributions: PageFragmentContribution[] = [
|
||||
{ kind: "html", placement: "head", html: "<link>" },
|
||||
];
|
||||
|
||||
const html = renderFragments(contributions, "body:end");
|
||||
|
||||
expect(html).toBe("");
|
||||
});
|
||||
});
|
||||
318
packages/core/tests/unit/plugins/page-hooks-execution.test.ts
Normal file
318
packages/core/tests/unit/plugins/page-hooks-execution.test.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
/**
|
||||
* Page Hooks Execution Tests
|
||||
*
|
||||
* Tests that page:metadata and page:fragments hooks fire correctly through
|
||||
* the HookPipeline, returning plugin contributions that EmDashHead,
|
||||
* EmDashBodyStart, and EmDashBodyEnd render into HTML.
|
||||
*
|
||||
* Bug context: The middleware's anonymous fast-path skipped runtime init,
|
||||
* so collectPageMetadata/collectPageFragments were never available to
|
||||
* anonymous visitors. These tests verify the hook pipeline actually runs
|
||||
* plugin handlers and collects their contributions — the path that was
|
||||
* broken before the fix.
|
||||
*/
|
||||
|
||||
import Database from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { HookPipeline } from "../../../src/plugins/hooks.js";
|
||||
import type {
|
||||
ResolvedPlugin,
|
||||
ResolvedHook,
|
||||
PageMetadataHandler,
|
||||
PageFragmentHandler,
|
||||
PublicPageContext,
|
||||
} from "../../../src/plugins/types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
admin: { pages: [], widgets: [] },
|
||||
hooks: {},
|
||||
routes: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createTestHook<T>(
|
||||
pluginId: string,
|
||||
handler: T,
|
||||
overrides: Partial<ResolvedHook<T>> = {},
|
||||
): ResolvedHook<T> {
|
||||
return {
|
||||
pluginId,
|
||||
handler,
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "continue",
|
||||
exclusive: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createPageContext(overrides: Partial<PublicPageContext> = {}): PublicPageContext {
|
||||
return {
|
||||
url: "https://example.com/blog/hello",
|
||||
path: "/blog/hello",
|
||||
locale: null,
|
||||
kind: "content",
|
||||
pageType: "post",
|
||||
title: "Hello World",
|
||||
description: null,
|
||||
canonical: null,
|
||||
image: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// DB setup (required for PluginContextFactory)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
let db: Kysely<any>;
|
||||
let sqlite: InstanceType<typeof Database>;
|
||||
|
||||
beforeEach(() => {
|
||||
sqlite = new Database(":memory:");
|
||||
db = new Kysely({ dialect: new SqliteDialect({ database: sqlite }) });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("page:metadata hook execution", () => {
|
||||
it("runs page:metadata handler and collects contributions", async () => {
|
||||
const metaHandler: PageMetadataHandler = vi.fn(async () => ({
|
||||
kind: "meta" as const,
|
||||
name: "x-page-hook-test",
|
||||
content: "present",
|
||||
}));
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test-meta",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("test-meta", metaHandler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
const results = await pipeline.runPageMetadata({ page });
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.pluginId).toBe("test-meta");
|
||||
expect(results[0]!.contributions).toEqual([
|
||||
{ kind: "meta", name: "x-page-hook-test", content: "present" },
|
||||
]);
|
||||
expect(metaHandler).toHaveBeenCalledOnce();
|
||||
});
|
||||
|
||||
it("collects contributions from multiple plugins", async () => {
|
||||
const handler1: PageMetadataHandler = vi.fn(async () => ({
|
||||
kind: "meta" as const,
|
||||
name: "plugin-1",
|
||||
content: "first",
|
||||
}));
|
||||
|
||||
const handler2: PageMetadataHandler = vi.fn(async () => [
|
||||
{ kind: "meta" as const, name: "plugin-2a", content: "second-a" },
|
||||
{ kind: "link" as const, rel: "alternate" as const, href: "/fr/blog/hello", hreflang: "fr" },
|
||||
]);
|
||||
|
||||
const plugin1 = createTestPlugin({
|
||||
id: "plugin-1",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("plugin-1", handler1, { priority: 1 }),
|
||||
},
|
||||
});
|
||||
|
||||
const plugin2 = createTestPlugin({
|
||||
id: "plugin-2",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("plugin-2", handler2, { priority: 2 }),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin1, plugin2], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
const results = await pipeline.runPageMetadata({ page });
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]!.pluginId).toBe("plugin-1");
|
||||
expect(results[1]!.pluginId).toBe("plugin-2");
|
||||
expect(results[1]!.contributions).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("passes page context to the handler", async () => {
|
||||
const metaHandler: PageMetadataHandler = vi.fn(async () => null);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "ctx-test",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("ctx-test", metaHandler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
const page = createPageContext({ title: "Test Page", path: "/test" });
|
||||
|
||||
await pipeline.runPageMetadata({ page });
|
||||
|
||||
expect(metaHandler).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
page: expect.objectContaining({ title: "Test Page", path: "/test" }),
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it("handles null return from handler (no contributions)", async () => {
|
||||
const metaHandler: PageMetadataHandler = vi.fn(async () => null);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "null-return",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("null-return", metaHandler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
const results = await pipeline.runPageMetadata({ page });
|
||||
|
||||
expect(results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("isolates errors from individual plugin handlers", async () => {
|
||||
const badHandler: PageMetadataHandler = vi.fn(async () => {
|
||||
throw new Error("Plugin crashed");
|
||||
});
|
||||
|
||||
const goodHandler: PageMetadataHandler = vi.fn(async () => ({
|
||||
kind: "meta" as const,
|
||||
name: "still-works",
|
||||
content: "yes",
|
||||
}));
|
||||
|
||||
const badPlugin = createTestPlugin({
|
||||
id: "bad-plugin",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("bad-plugin", badHandler, { priority: 1 }),
|
||||
},
|
||||
});
|
||||
|
||||
const goodPlugin = createTestPlugin({
|
||||
id: "good-plugin",
|
||||
hooks: {
|
||||
"page:metadata": createTestHook("good-plugin", goodHandler, { priority: 2 }),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([badPlugin, goodPlugin], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
// Should not throw — errors are logged, not propagated
|
||||
const results = await pipeline.runPageMetadata({ page });
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.pluginId).toBe("good-plugin");
|
||||
});
|
||||
});
|
||||
|
||||
describe("page:fragments hook execution", () => {
|
||||
it("runs page:fragments handler and collects contributions", async () => {
|
||||
const fragmentHandler: PageFragmentHandler = vi.fn(async () => ({
|
||||
kind: "html" as const,
|
||||
placement: "head" as const,
|
||||
html: '<link rel="webmention" href="https://example.com/webmention">',
|
||||
}));
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "test-fragment",
|
||||
capabilities: ["hooks.page-fragments:register"],
|
||||
hooks: {
|
||||
"page:fragments": createTestHook("test-fragment", fragmentHandler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
const results = await pipeline.runPageFragments({ page });
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.pluginId).toBe("test-fragment");
|
||||
expect(results[0]!.contributions).toEqual([
|
||||
{
|
||||
kind: "html",
|
||||
placement: "head",
|
||||
html: '<link rel="webmention" href="https://example.com/webmention">',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it("requires hooks.page-fragments:register capability for page:fragments", () => {
|
||||
const handler: PageFragmentHandler = vi.fn(async () => null);
|
||||
|
||||
const pluginWithoutCap = createTestPlugin({
|
||||
id: "no-cap",
|
||||
capabilities: [],
|
||||
hooks: {
|
||||
"page:fragments": createTestHook("no-cap", handler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([pluginWithoutCap], { db });
|
||||
|
||||
expect(pipeline.hasHooks("page:fragments")).toBe(false);
|
||||
});
|
||||
|
||||
it("collects external script contributions", async () => {
|
||||
const fragmentHandler: PageFragmentHandler = vi.fn(async () => ({
|
||||
kind: "external-script" as const,
|
||||
placement: "body:end" as const,
|
||||
src: "https://cdn.example.com/analytics.js",
|
||||
async: true,
|
||||
}));
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "analytics",
|
||||
capabilities: ["hooks.page-fragments:register"],
|
||||
hooks: {
|
||||
"page:fragments": createTestHook("analytics", fragmentHandler),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = new HookPipeline([plugin], { db });
|
||||
const page = createPageContext();
|
||||
|
||||
const results = await pipeline.runPageFragments({ page });
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]!.contributions[0]).toEqual({
|
||||
kind: "external-script",
|
||||
placement: "body:end",
|
||||
src: "https://cdn.example.com/analytics.js",
|
||||
async: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
314
packages/core/tests/unit/plugins/page-metadata.test.ts
Normal file
314
packages/core/tests/unit/plugins/page-metadata.test.ts
Normal file
@@ -0,0 +1,314 @@
|
||||
/**
|
||||
* Page Metadata Tests
|
||||
*
|
||||
* Tests the metadata collector for:
|
||||
* - Resolving contributions into deduplicated metadata
|
||||
* - HTML rendering with proper escaping
|
||||
* - Safe JSON-LD serialization
|
||||
* - HTML attribute escaping
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from "vitest";
|
||||
|
||||
import {
|
||||
resolvePageMetadata,
|
||||
renderPageMetadata,
|
||||
safeJsonLdSerialize,
|
||||
escapeHtmlAttr,
|
||||
} from "../../../src/page/metadata.js";
|
||||
import type { PageMetadataContribution } from "../../../src/plugins/types.js";
|
||||
|
||||
describe("resolvePageMetadata", () => {
|
||||
it("resolves meta tags correctly", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "meta", name: "description", content: "A test page" },
|
||||
{ kind: "meta", name: "robots", content: "index, follow" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.meta).toEqual([
|
||||
{ name: "description", content: "A test page" },
|
||||
{ name: "robots", content: "index, follow" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("resolves property tags correctly", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "property", property: "og:title", content: "My Page" },
|
||||
{ kind: "property", property: "og:type", content: "article" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.properties).toEqual([
|
||||
{ property: "og:title", content: "My Page" },
|
||||
{ property: "og:type", content: "article" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("resolves canonical link", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "canonical", href: "https://example.com/page" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toEqual([{ rel: "canonical", href: "https://example.com/page" }]);
|
||||
});
|
||||
|
||||
it("resolves alternate links with hreflang", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "alternate", href: "https://example.com/en/page", hreflang: "en" },
|
||||
{ kind: "link", rel: "alternate", href: "https://example.com/fr/page", hreflang: "fr" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toEqual([
|
||||
{ rel: "alternate", href: "https://example.com/en/page", hreflang: "en" },
|
||||
{ rel: "alternate", href: "https://example.com/fr/page", hreflang: "fr" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("resolves nlweb link for agent discovery", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "nlweb", href: "https://example.com/nlweb" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toEqual([{ rel: "nlweb", href: "https://example.com/nlweb" }]);
|
||||
});
|
||||
|
||||
it("resolves JSON-LD", () => {
|
||||
const graph = { "@type": "Article", name: "Test" };
|
||||
const contributions: PageMetadataContribution[] = [{ kind: "jsonld", id: "article", graph }];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.jsonld).toHaveLength(1);
|
||||
expect(result.jsonld[0]!.id).toBe("article");
|
||||
expect(JSON.parse(result.jsonld[0]!.json)).toEqual(graph);
|
||||
});
|
||||
|
||||
it("first-wins dedupe for meta by name", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "meta", name: "description", content: "First" },
|
||||
{ kind: "meta", name: "description", content: "Second" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.meta).toHaveLength(1);
|
||||
expect(result.meta[0]!.content).toBe("First");
|
||||
});
|
||||
|
||||
it("first-wins dedupe for meta by explicit key", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "meta", name: "description", content: "First", key: "seo-desc" },
|
||||
{ kind: "meta", name: "og-description", content: "Second", key: "seo-desc" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.meta).toHaveLength(1);
|
||||
expect(result.meta[0]!.content).toBe("First");
|
||||
});
|
||||
|
||||
it("first-wins dedupe for property", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "property", property: "og:title", content: "First" },
|
||||
{ kind: "property", property: "og:title", content: "Second" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.properties).toHaveLength(1);
|
||||
expect(result.properties[0]!.content).toBe("First");
|
||||
});
|
||||
|
||||
it("canonical is singleton (second canonical ignored)", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "canonical", href: "https://example.com/first" },
|
||||
{ kind: "link", rel: "canonical", href: "https://example.com/second" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toHaveLength(1);
|
||||
expect(result.links[0]!.href).toBe("https://example.com/first");
|
||||
});
|
||||
|
||||
it("alternate links deduped by hreflang", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "alternate", href: "https://example.com/en/v1", hreflang: "en" },
|
||||
{ kind: "link", rel: "alternate", href: "https://example.com/en/v2", hreflang: "en" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toHaveLength(1);
|
||||
expect(result.links[0]!.href).toBe("https://example.com/en/v1");
|
||||
});
|
||||
|
||||
it("JSON-LD deduped by id", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "jsonld", id: "article", graph: { "@type": "Article", name: "First" } },
|
||||
{ kind: "jsonld", id: "article", graph: { "@type": "Article", name: "Second" } },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.jsonld).toHaveLength(1);
|
||||
expect(JSON.parse(result.jsonld[0]!.json)).toEqual({
|
||||
"@type": "Article",
|
||||
name: "First",
|
||||
});
|
||||
});
|
||||
|
||||
it("JSON-LD without id is always appended", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "jsonld", graph: { "@type": "Article", name: "First" } },
|
||||
{ kind: "jsonld", graph: { "@type": "BreadcrumbList", name: "Second" } },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.jsonld).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("rejects non-HTTP link href (javascript:, data:, blob:)", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "canonical", href: "javascript:alert(1)" },
|
||||
{ kind: "link", rel: "alternate", href: "data:text/html,<h1>hi</h1>", hreflang: "en" },
|
||||
{ kind: "link", rel: "alternate", href: "blob:https://example.com/abc", hreflang: "fr" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("accepts valid HTTP and HTTPS hrefs", () => {
|
||||
const contributions: PageMetadataContribution[] = [
|
||||
{ kind: "link", rel: "canonical", href: "https://example.com/page" },
|
||||
{ kind: "link", rel: "alternate", href: "http://example.com/en", hreflang: "en" },
|
||||
];
|
||||
|
||||
const result = resolvePageMetadata(contributions);
|
||||
|
||||
expect(result.links).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("renderPageMetadata", () => {
|
||||
it("renders meta tags with escaped attributes", () => {
|
||||
const html = renderPageMetadata({
|
||||
meta: [{ name: 'desc"ription', content: "A <test> & page" }],
|
||||
properties: [],
|
||||
links: [],
|
||||
jsonld: [],
|
||||
});
|
||||
|
||||
expect(html).toBe('<meta name="desc"ription" content="A <test> & page">');
|
||||
});
|
||||
|
||||
it("renders property tags", () => {
|
||||
const html = renderPageMetadata({
|
||||
meta: [],
|
||||
properties: [{ property: "og:title", content: "My Page" }],
|
||||
links: [],
|
||||
jsonld: [],
|
||||
});
|
||||
|
||||
expect(html).toBe('<meta property="og:title" content="My Page">');
|
||||
});
|
||||
|
||||
it("renders link tags with hreflang", () => {
|
||||
const html = renderPageMetadata({
|
||||
meta: [],
|
||||
properties: [],
|
||||
links: [{ rel: "alternate", href: "https://example.com/fr", hreflang: "fr" }],
|
||||
jsonld: [],
|
||||
});
|
||||
|
||||
expect(html).toBe('<link rel="alternate" href="https://example.com/fr" hreflang="fr">');
|
||||
});
|
||||
|
||||
it("renders JSON-LD script tags", () => {
|
||||
const json = JSON.stringify({ "@type": "Article" });
|
||||
const html = renderPageMetadata({
|
||||
meta: [],
|
||||
properties: [],
|
||||
links: [],
|
||||
jsonld: [{ id: "article", json }],
|
||||
});
|
||||
|
||||
expect(html).toBe(`<script type="application/ld+json">${json}</script>`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("safeJsonLdSerialize", () => {
|
||||
it("escapes </script> in nested values", () => {
|
||||
const result = safeJsonLdSerialize({ text: "</script><script>alert(1)</script>" });
|
||||
|
||||
expect(result).not.toContain("</script>");
|
||||
expect(result).toContain("\\u003c");
|
||||
expect(result).toContain("\\u003e");
|
||||
});
|
||||
|
||||
it("escapes <!-- sequences", () => {
|
||||
const result = safeJsonLdSerialize({ text: "<!-- comment -->" });
|
||||
|
||||
expect(result).not.toContain("<!--");
|
||||
expect(result).toContain("\\u003c");
|
||||
});
|
||||
|
||||
it("escapes U+2028 line separator", () => {
|
||||
const result = safeJsonLdSerialize({ text: "before\u2028after" });
|
||||
|
||||
expect(result).not.toContain("\u2028");
|
||||
expect(result).toContain("\\u2028");
|
||||
});
|
||||
|
||||
it("escapes U+2029 paragraph separator", () => {
|
||||
const result = safeJsonLdSerialize({ text: "before\u2029after" });
|
||||
|
||||
expect(result).not.toContain("\u2029");
|
||||
expect(result).toContain("\\u2029");
|
||||
});
|
||||
|
||||
it("handles normal objects correctly", () => {
|
||||
const obj = { "@type": "Article", name: "Hello World", count: 42 };
|
||||
const result = safeJsonLdSerialize(obj);
|
||||
|
||||
// The result should be parseable back to the same object
|
||||
// (angle brackets are escaped but that's fine for JSON-LD consumers)
|
||||
expect(result).toContain('"@type"');
|
||||
expect(result).toContain('"Hello World"');
|
||||
expect(result).toContain("42");
|
||||
});
|
||||
});
|
||||
|
||||
describe("escapeHtmlAttr", () => {
|
||||
it("escapes double quotes", () => {
|
||||
expect(escapeHtmlAttr('say "hello"')).toBe("say "hello"");
|
||||
});
|
||||
|
||||
it("escapes angle brackets", () => {
|
||||
expect(escapeHtmlAttr("<script>")).toBe("<script>");
|
||||
});
|
||||
|
||||
it("escapes ampersands", () => {
|
||||
expect(escapeHtmlAttr("foo & bar")).toBe("foo & bar");
|
||||
});
|
||||
|
||||
it("escapes single quotes", () => {
|
||||
expect(escapeHtmlAttr("it's here")).toBe("it's here");
|
||||
});
|
||||
|
||||
it("passes through safe strings unchanged", () => {
|
||||
expect(escapeHtmlAttr("hello world")).toBe("hello world");
|
||||
});
|
||||
});
|
||||
85
packages/core/tests/unit/plugins/page-seo.test.ts
Normal file
85
packages/core/tests/unit/plugins/page-seo.test.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { buildBlogPostingJsonLd } from "../../../src/page/jsonld.js";
|
||||
import { generateBaseSeoContributions } from "../../../src/page/seo-contributions.js";
|
||||
import type { PublicPageContext } from "../../../src/plugins/types.js";
|
||||
|
||||
function createPage(overrides: Partial<PublicPageContext> = {}): PublicPageContext {
|
||||
return {
|
||||
url: "https://example.com/posts/hello",
|
||||
path: "/posts/hello",
|
||||
locale: null,
|
||||
kind: "content",
|
||||
pageType: "article",
|
||||
title: "Hello World | My Site",
|
||||
description: "Test description",
|
||||
canonical: "https://example.com/posts/hello",
|
||||
image: "https://example.com/og.png",
|
||||
siteName: "My Site",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("page SEO metadata", () => {
|
||||
it("uses pageTitle for og:title and twitter:title", () => {
|
||||
const page = createPage({ pageTitle: "Hello World" });
|
||||
|
||||
const contributions = generateBaseSeoContributions(page);
|
||||
|
||||
expect(contributions).toContainEqual({
|
||||
kind: "property",
|
||||
property: "og:title",
|
||||
content: "Hello World",
|
||||
});
|
||||
expect(contributions).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "twitter:title",
|
||||
content: "Hello World",
|
||||
});
|
||||
});
|
||||
|
||||
it("prefers explicit seo.ogTitle over pageTitle", () => {
|
||||
const page = createPage({
|
||||
seo: { ogTitle: "Custom OG Title" },
|
||||
pageTitle: "Hello World",
|
||||
});
|
||||
|
||||
const contributions = generateBaseSeoContributions(page);
|
||||
|
||||
expect(contributions).toContainEqual({
|
||||
kind: "property",
|
||||
property: "og:title",
|
||||
content: "Custom OG Title",
|
||||
});
|
||||
expect(contributions).toContainEqual({
|
||||
kind: "meta",
|
||||
name: "twitter:title",
|
||||
content: "Custom OG Title",
|
||||
});
|
||||
});
|
||||
|
||||
it("falls back to title when pageTitle is absent", () => {
|
||||
const page = createPage();
|
||||
|
||||
const contributions = generateBaseSeoContributions(page);
|
||||
|
||||
expect(contributions).toContainEqual({
|
||||
kind: "property",
|
||||
property: "og:title",
|
||||
content: "Hello World | My Site",
|
||||
});
|
||||
});
|
||||
|
||||
it("uses pageTitle for article JSON-LD headline", () => {
|
||||
const page = createPage({
|
||||
articleMeta: { publishedTime: "2026-04-03T12:00:00.000Z" },
|
||||
pageTitle: "Hello World",
|
||||
});
|
||||
|
||||
const graph = buildBlogPostingJsonLd(page);
|
||||
|
||||
expect(graph).toMatchObject({
|
||||
headline: "Hello World",
|
||||
});
|
||||
});
|
||||
});
|
||||
332
packages/core/tests/unit/plugins/pipeline-rebuild.test.ts
Normal file
332
packages/core/tests/unit/plugins/pipeline-rebuild.test.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
/**
|
||||
* Pipeline Rebuild Tests
|
||||
*
|
||||
* Verifies that rebuilding the HookPipeline after plugin enable/disable
|
||||
* correctly includes/excludes hooks from the affected plugins.
|
||||
*
|
||||
* This tests the fix for #105: disabled plugins' hooks kept firing because
|
||||
* the pipeline was constructed once at startup and never rebuilt.
|
||||
*/
|
||||
|
||||
import Database from "better-sqlite3";
|
||||
import { Kysely, SqliteDialect } from "kysely";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
|
||||
import { createHookPipeline, resolveExclusiveHooks } from "../../../src/plugins/hooks.js";
|
||||
import type { ResolvedPlugin, ResolvedHook, ContentHookEvent } from "../../../src/plugins/types.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
|
||||
return {
|
||||
id: overrides.id ?? "test-plugin",
|
||||
version: "1.0.0",
|
||||
capabilities: [],
|
||||
allowedHosts: [],
|
||||
storage: {},
|
||||
admin: {
|
||||
pages: [],
|
||||
widgets: [],
|
||||
},
|
||||
hooks: {},
|
||||
routes: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createTestHook<T>(
|
||||
pluginId: string,
|
||||
handler: T,
|
||||
overrides: Partial<ResolvedHook<T>> = {},
|
||||
): ResolvedHook<T> {
|
||||
return {
|
||||
pluginId,
|
||||
handler,
|
||||
priority: 100,
|
||||
timeout: 5000,
|
||||
dependencies: [],
|
||||
errorPolicy: "continue",
|
||||
exclusive: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("HookPipeline rebuild on plugin disable/enable (#105)", () => {
|
||||
let sqlite: InstanceType<typeof Database>;
|
||||
let db: Kysely<Record<string, unknown>>;
|
||||
|
||||
beforeEach(() => {
|
||||
sqlite = new Database(":memory:");
|
||||
db = new Kysely<Record<string, unknown>>({
|
||||
dialect: new SqliteDialect({ database: sqlite }),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await db.destroy();
|
||||
sqlite.close();
|
||||
});
|
||||
|
||||
it("hooks from disabled plugin do not fire after pipeline rebuild", async () => {
|
||||
const handlerA = vi.fn(async (event: ContentHookEvent) => ({
|
||||
...event.content,
|
||||
pluginA: true,
|
||||
}));
|
||||
const handlerB = vi.fn(async (event: ContentHookEvent) => ({
|
||||
...event.content,
|
||||
pluginB: true,
|
||||
}));
|
||||
|
||||
const pluginA = createTestPlugin({
|
||||
id: "plugin-a",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-a", handlerA),
|
||||
},
|
||||
});
|
||||
|
||||
const pluginB = createTestPlugin({
|
||||
id: "plugin-b",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-b", handlerB),
|
||||
},
|
||||
});
|
||||
|
||||
const allPlugins = [pluginA, pluginB];
|
||||
|
||||
// Initial pipeline with both plugins enabled
|
||||
const pipeline1 = createHookPipeline(allPlugins, { db });
|
||||
expect(pipeline1.hasHooks("content:beforeSave")).toBe(true);
|
||||
expect(pipeline1.getHookCount("content:beforeSave")).toBe(2);
|
||||
|
||||
// Run hooks — both should fire
|
||||
const result1 = await pipeline1.runContentBeforeSave({ title: "test" }, "posts", true);
|
||||
expect(handlerA).toHaveBeenCalledTimes(1);
|
||||
expect(handlerB).toHaveBeenCalledTimes(1);
|
||||
expect(result1.content).toEqual({ title: "test", pluginA: true, pluginB: true });
|
||||
|
||||
handlerA.mockClear();
|
||||
handlerB.mockClear();
|
||||
|
||||
// Simulate disabling plugin-b: rebuild pipeline with only plugin-a
|
||||
const enabledPlugins = allPlugins.filter((p) => p.id !== "plugin-b");
|
||||
const pipeline2 = createHookPipeline(enabledPlugins, { db });
|
||||
expect(pipeline2.hasHooks("content:beforeSave")).toBe(true);
|
||||
expect(pipeline2.getHookCount("content:beforeSave")).toBe(1);
|
||||
|
||||
// Run hooks — only plugin-a should fire
|
||||
const result2 = await pipeline2.runContentBeforeSave({ title: "test" }, "posts", true);
|
||||
expect(handlerA).toHaveBeenCalledTimes(1);
|
||||
expect(handlerB).not.toHaveBeenCalled();
|
||||
expect(result2.content).toEqual({ title: "test", pluginA: true });
|
||||
});
|
||||
|
||||
it("hooks from re-enabled plugin fire after pipeline rebuild", async () => {
|
||||
const handlerA = vi.fn(async (event: ContentHookEvent) => ({
|
||||
...event.content,
|
||||
pluginA: true,
|
||||
}));
|
||||
const handlerB = vi.fn(async (event: ContentHookEvent) => ({
|
||||
...event.content,
|
||||
pluginB: true,
|
||||
}));
|
||||
|
||||
const pluginA = createTestPlugin({
|
||||
id: "plugin-a",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-a", handlerA),
|
||||
},
|
||||
});
|
||||
|
||||
const pluginB = createTestPlugin({
|
||||
id: "plugin-b",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("plugin-b", handlerB),
|
||||
},
|
||||
});
|
||||
|
||||
const allPlugins = [pluginA, pluginB];
|
||||
|
||||
// Start with only plugin-a (plugin-b is disabled)
|
||||
const pipeline1 = createHookPipeline([pluginA], { db });
|
||||
const result1 = await pipeline1.runContentBeforeSave({ title: "test" }, "posts", true);
|
||||
expect(handlerA).toHaveBeenCalledTimes(1);
|
||||
expect(handlerB).not.toHaveBeenCalled();
|
||||
expect(result1.content).toEqual({ title: "test", pluginA: true });
|
||||
|
||||
handlerA.mockClear();
|
||||
|
||||
// Re-enable plugin-b: rebuild pipeline with both
|
||||
const pipeline2 = createHookPipeline(allPlugins, { db });
|
||||
const result2 = await pipeline2.runContentBeforeSave({ title: "test" }, "posts", true);
|
||||
expect(handlerA).toHaveBeenCalledTimes(1);
|
||||
expect(handlerB).toHaveBeenCalledTimes(1);
|
||||
expect(result2.content).toEqual({ title: "test", pluginA: true, pluginB: true });
|
||||
});
|
||||
|
||||
it("exclusive hook selections are re-resolved after rebuild", async () => {
|
||||
const handlerA = vi.fn().mockResolvedValue(undefined);
|
||||
const handlerB = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const pluginA = createTestPlugin({
|
||||
id: "provider-a",
|
||||
capabilities: ["hooks.email-transport:register"],
|
||||
hooks: {
|
||||
"email:deliver": createTestHook("provider-a", handlerA, { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
const pluginB = createTestPlugin({
|
||||
id: "provider-b",
|
||||
capabilities: ["hooks.email-transport:register"],
|
||||
hooks: {
|
||||
"email:deliver": createTestHook("provider-b", handlerB, { exclusive: true }),
|
||||
},
|
||||
});
|
||||
|
||||
// Both enabled — two providers, no auto-select
|
||||
const pipeline1 = createHookPipeline([pluginA, pluginB], { db });
|
||||
expect(pipeline1.getExclusiveHookProviders("email:deliver")).toHaveLength(2);
|
||||
|
||||
// Manually select provider-b (simulating admin selection)
|
||||
pipeline1.setExclusiveSelection("email:deliver", "provider-b");
|
||||
expect(pipeline1.getExclusiveSelection("email:deliver")).toBe("provider-b");
|
||||
|
||||
// Disable provider-b: rebuild with only provider-a
|
||||
const pipeline2 = createHookPipeline([pluginA], { db });
|
||||
expect(pipeline2.getExclusiveHookProviders("email:deliver")).toHaveLength(1);
|
||||
|
||||
// Run exclusive hook resolution — should auto-select the sole provider
|
||||
const options = new Map<string, string>();
|
||||
await resolveExclusiveHooks({
|
||||
pipeline: pipeline2,
|
||||
isActive: () => true,
|
||||
getOption: async (key) => options.get(key) ?? null,
|
||||
setOption: async (key, value) => {
|
||||
options.set(key, value);
|
||||
},
|
||||
deleteOption: async (key) => {
|
||||
options.delete(key);
|
||||
},
|
||||
});
|
||||
|
||||
expect(pipeline2.getExclusiveSelection("email:deliver")).toBe("provider-a");
|
||||
});
|
||||
|
||||
it("disabling all plugins with a hook removes that hook entirely", async () => {
|
||||
const handler = vi.fn(async () => undefined);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "only-plugin",
|
||||
capabilities: ["content:write"],
|
||||
hooks: {
|
||||
"content:beforeSave": createTestHook("only-plugin", handler),
|
||||
},
|
||||
});
|
||||
|
||||
// Pipeline with the plugin
|
||||
const pipeline1 = createHookPipeline([plugin], { db });
|
||||
expect(pipeline1.hasHooks("content:beforeSave")).toBe(true);
|
||||
|
||||
// Disable it: rebuild with empty list
|
||||
const pipeline2 = createHookPipeline([], { db });
|
||||
expect(pipeline2.hasHooks("content:beforeSave")).toBe(false);
|
||||
expect(pipeline2.getHookCount("content:beforeSave")).toBe(0);
|
||||
});
|
||||
|
||||
it("lifecycle hooks for disabled plugin are excluded from pipeline", async () => {
|
||||
const installHandler = vi.fn();
|
||||
const activateHandler = vi.fn();
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "lifecycle-plugin",
|
||||
hooks: {
|
||||
"plugin:install": createTestHook("lifecycle-plugin", installHandler),
|
||||
"plugin:activate": createTestHook("lifecycle-plugin", activateHandler),
|
||||
},
|
||||
});
|
||||
|
||||
// Pipeline with plugin
|
||||
const pipeline1 = createHookPipeline([plugin], { db });
|
||||
expect(pipeline1.hasHooks("plugin:install")).toBe(true);
|
||||
expect(pipeline1.hasHooks("plugin:activate")).toBe(true);
|
||||
|
||||
// Pipeline without plugin (disabled)
|
||||
const pipeline2 = createHookPipeline([], { db });
|
||||
expect(pipeline2.hasHooks("plugin:install")).toBe(false);
|
||||
expect(pipeline2.hasHooks("plugin:activate")).toBe(false);
|
||||
});
|
||||
|
||||
it("plugin:activate fires when runPluginActivate is called after pipeline rebuild", async () => {
|
||||
const activateHandler = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "my-plugin",
|
||||
hooks: {
|
||||
"plugin:activate": createTestHook("my-plugin", activateHandler),
|
||||
},
|
||||
});
|
||||
|
||||
// Simulate enabling: rebuild pipeline with plugin included, then invoke activate
|
||||
const pipeline = createHookPipeline([plugin], { db });
|
||||
await pipeline.runPluginActivate("my-plugin");
|
||||
|
||||
expect(activateHandler).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("plugin:deactivate fires when runPluginDeactivate is called before pipeline rebuild", async () => {
|
||||
const deactivateHandler = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const plugin = createTestPlugin({
|
||||
id: "my-plugin",
|
||||
hooks: {
|
||||
"plugin:deactivate": createTestHook("my-plugin", deactivateHandler),
|
||||
},
|
||||
});
|
||||
|
||||
// Simulate disabling: invoke deactivate on the current pipeline, then rebuild without the plugin
|
||||
const pipeline = createHookPipeline([plugin], { db });
|
||||
await pipeline.runPluginDeactivate("my-plugin");
|
||||
|
||||
expect(deactivateHandler).toHaveBeenCalledTimes(1);
|
||||
|
||||
// Rebuild without the plugin — hook should no longer be registered
|
||||
const disabledPipeline = createHookPipeline([], { db });
|
||||
expect(disabledPipeline.hasHooks("plugin:deactivate")).toBe(false);
|
||||
});
|
||||
|
||||
it("plugin:activate only fires for the targeted plugin, not others", async () => {
|
||||
const activateA = vi.fn().mockResolvedValue(undefined);
|
||||
const activateB = vi.fn().mockResolvedValue(undefined);
|
||||
|
||||
const pluginA = createTestPlugin({
|
||||
id: "plugin-a",
|
||||
hooks: {
|
||||
"plugin:activate": createTestHook("plugin-a", activateA),
|
||||
},
|
||||
});
|
||||
const pluginB = createTestPlugin({
|
||||
id: "plugin-b",
|
||||
hooks: {
|
||||
"plugin:activate": createTestHook("plugin-b", activateB),
|
||||
},
|
||||
});
|
||||
|
||||
const pipeline = createHookPipeline([pluginA, pluginB], { db });
|
||||
|
||||
// Enabling only plugin-a should not fire plugin-b's activate
|
||||
await pipeline.runPluginActivate("plugin-a");
|
||||
|
||||
expect(activateA).toHaveBeenCalledTimes(1);
|
||||
expect(activateB).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user