first commit

This commit is contained in:
Matt Kane
2026-04-01 10:44:22 +01:00
commit 43fcb9a131
1789 changed files with 395041 additions and 0 deletions

View File

@@ -0,0 +1,309 @@
/**
* Integration tests for API token handlers.
*
* Tests token CRUD and resolution against a real in-memory SQLite database.
*/
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
handleApiTokenCreate,
handleApiTokenList,
handleApiTokenRevoke,
resolveApiToken,
resolveOAuthToken,
} from "../../../src/api/handlers/api-tokens.js";
import { generatePrefixedToken, TOKEN_PREFIXES } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
// Regex patterns for token validation
const PAT_PREFIX_REGEX = /^ec_pat_/;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user_1",
email: "admin@test.com",
name: "Admin",
role: 50, // ADMIN
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
describe("handleApiTokenCreate", () => {
it("creates a token and returns the raw value", async () => {
const result = await handleApiTokenCreate(db, "user_1", {
name: "Test Token",
scopes: ["content:read", "content:write"],
});
expect(result.success).toBe(true);
expect(result.data).toBeDefined();
expect(result.data!.token).toMatch(PAT_PREFIX_REGEX);
expect(result.data!.info.name).toBe("Test Token");
expect(result.data!.info.scopes).toEqual(["content:read", "content:write"]);
expect(result.data!.info.userId).toBe("user_1");
expect(result.data!.info.prefix).toMatch(PAT_PREFIX_REGEX);
});
it("creates tokens with different hashes", async () => {
const result1 = await handleApiTokenCreate(db, "user_1", {
name: "Token 1",
scopes: ["content:read"],
});
const result2 = await handleApiTokenCreate(db, "user_1", {
name: "Token 2",
scopes: ["content:read"],
});
expect(result1.data!.token).not.toBe(result2.data!.token);
});
it("stores expiry date when provided", async () => {
const expiresAt = new Date(Date.now() + 86400000).toISOString();
const result = await handleApiTokenCreate(db, "user_1", {
name: "Expiring Token",
scopes: ["content:read"],
expiresAt,
});
expect(result.data!.info.expiresAt).toBe(expiresAt);
});
});
describe("handleApiTokenList", () => {
it("lists tokens for a user", async () => {
await handleApiTokenCreate(db, "user_1", {
name: "Token A",
scopes: ["content:read"],
});
await handleApiTokenCreate(db, "user_1", {
name: "Token B",
scopes: ["admin"],
});
const result = await handleApiTokenList(db, "user_1");
expect(result.success).toBe(true);
expect(result.data!.items).toHaveLength(2);
const names = result.data!.items.map((t) => t.name).toSorted();
expect(names).toEqual(["Token A", "Token B"]);
});
it("does not return tokens for other users", async () => {
await db
.insertInto("users")
.values({
id: "user_2",
email: "other@test.com",
name: "Other",
role: 50,
email_verified: 1,
})
.execute();
await handleApiTokenCreate(db, "user_1", {
name: "User 1 Token",
scopes: ["content:read"],
});
await handleApiTokenCreate(db, "user_2", {
name: "User 2 Token",
scopes: ["content:read"],
});
const result = await handleApiTokenList(db, "user_1");
expect(result.data!.items).toHaveLength(1);
expect(result.data!.items[0].name).toBe("User 1 Token");
});
it("never returns the token hash", async () => {
await handleApiTokenCreate(db, "user_1", {
name: "Test",
scopes: ["content:read"],
});
const result = await handleApiTokenList(db, "user_1");
const item = result.data!.items[0];
// Ensure no hash or raw token is exposed
expect(item).not.toHaveProperty("token_hash");
expect(item).not.toHaveProperty("tokenHash");
expect(item).not.toHaveProperty("token");
});
});
describe("handleApiTokenRevoke", () => {
it("revokes a token", async () => {
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "To Revoke",
scopes: ["content:read"],
});
const tokenId = createResult.data!.info.id;
const result = await handleApiTokenRevoke(db, tokenId, "user_1");
expect(result.success).toBe(true);
// Should be gone from the list
const list = await handleApiTokenList(db, "user_1");
expect(list.data!.items).toHaveLength(0);
});
it("returns error for non-existent token", async () => {
const result = await handleApiTokenRevoke(db, "nonexistent", "user_1");
expect(result.success).toBe(false);
expect(result.error!.code).toBe("NOT_FOUND");
});
it("cannot revoke another user's token", async () => {
await db
.insertInto("users")
.values({
id: "user_2",
email: "other@test.com",
name: "Other",
role: 50,
email_verified: 1,
})
.execute();
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "User 1 Token",
scopes: ["content:read"],
});
const tokenId = createResult.data!.info.id;
// User 2 tries to revoke user 1's token
const result = await handleApiTokenRevoke(db, tokenId, "user_2");
expect(result.success).toBe(false);
expect(result.error!.code).toBe("NOT_FOUND");
// Token should still exist
const list = await handleApiTokenList(db, "user_1");
expect(list.data!.items).toHaveLength(1);
});
});
describe("resolveApiToken", () => {
it("resolves a valid token to user and scopes", async () => {
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Test",
scopes: ["content:read", "media:write"],
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).not.toBeNull();
expect(resolved!.userId).toBe("user_1");
expect(resolved!.scopes).toEqual(["content:read", "media:write"]);
});
it("returns null for invalid token", async () => {
const resolved = await resolveApiToken(db, "ec_pat_invalidtoken123");
expect(resolved).toBeNull();
});
it("returns null for expired token", async () => {
const pastDate = new Date(Date.now() - 86400000).toISOString(); // Yesterday
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Expired",
scopes: ["content:read"],
expiresAt: pastDate,
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).toBeNull();
});
it("resolves non-expired token", async () => {
const futureDate = new Date(Date.now() + 86400000).toISOString(); // Tomorrow
const createResult = await handleApiTokenCreate(db, "user_1", {
name: "Future",
scopes: ["admin"],
expiresAt: futureDate,
});
const rawToken = createResult.data!.token;
const resolved = await resolveApiToken(db, rawToken);
expect(resolved).not.toBeNull();
expect(resolved!.scopes).toEqual(["admin"]);
});
});
describe("resolveOAuthToken", () => {
it("resolves a valid OAuth access token", async () => {
// Insert directly since we don't have a Device Flow handler yet
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const futureDate = new Date(Date.now() + 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "access",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: futureDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).not.toBeNull();
expect(resolved!.userId).toBe("user_1");
expect(resolved!.scopes).toEqual(["content:read"]);
});
it("returns null for expired OAuth token", async () => {
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const pastDate = new Date(Date.now() - 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "access",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: pastDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).toBeNull();
});
it("does not resolve refresh tokens", async () => {
const { raw, hash } = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
const futureDate = new Date(Date.now() + 3600000).toISOString();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: hash,
token_type: "refresh",
user_id: "user_1",
scopes: JSON.stringify(["content:read"]),
client_type: "cli",
expires_at: futureDate,
})
.execute();
const resolved = await resolveOAuthToken(db, raw);
expect(resolved).toBeNull();
});
});

View File

@@ -0,0 +1,475 @@
/**
* Integration tests for OAuth 2.1 Authorization Code + PKCE handlers.
*
* Tests the full authorization code flow lifecycle against a real
* in-memory SQLite database.
*/
import { computeS256Challenge, Role } from "@emdashcms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
buildDeniedRedirect,
cleanupExpiredAuthorizationCodes,
handleAuthorizationApproval,
handleAuthorizationCodeExchange,
} from "../../../src/api/handlers/oauth-authorization.js";
import { handleOAuthClientCreate } from "../../../src/api/handlers/oauth-clients.js";
import { hashApiToken } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
const ACCESS_TOKEN_PREFIX_REGEX = /^ec_oat_/;
const REFRESH_TOKEN_PREFIX_REGEX = /^ec_ort_/;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
// Register OAuth clients used by tests
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["http://127.0.0.1:8080/callback", "https://myapp.example.com/callback"],
});
await handleOAuthClientCreate(db, {
id: "test",
name: "Test",
redirectUris: ["http://127.0.0.1:8080/callback"],
});
});
afterEach(async () => {
await db.destroy();
});
describe("Authorization Approval", () => {
it("should create an authorization code with valid params", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write",
state: "random-state-value",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
if (!result.success) return;
const redirectUrl = new URL(result.data.redirect_url);
expect(redirectUrl.origin).toBe("http://127.0.0.1:8080");
expect(redirectUrl.pathname).toBe("/callback");
expect(redirectUrl.searchParams.get("code")).toBeTruthy();
expect(redirectUrl.searchParams.get("state")).toBe("random-state-value");
});
it("should reject unsupported response_type", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "token",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_RESPONSE_TYPE");
});
it("should reject plain HTTP redirect to non-localhost", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://evil.com/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REDIRECT_URI");
});
it("should allow HTTPS redirects", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://myapp.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
});
it("should reject plain code challenge method", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: "test",
code_challenge_method: "plain",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REQUEST");
});
it("should reject invalid scopes", async () => {
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "invalid:scope",
code_challenge: "test",
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_SCOPE");
});
});
describe("Authorization Code Exchange: Full Flow", () => {
it("should exchange code for tokens with valid PKCE", async () => {
// Step 1: Generate PKCE pair
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Step 2: Get authorization code
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write media:read",
state: "state123",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// Step 3: Exchange code for tokens
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(true);
if (!exchangeResult.success) return;
expect(exchangeResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(exchangeResult.data.refresh_token).toMatch(REFRESH_TOKEN_PREFIX_REGEX);
expect(exchangeResult.data.token_type).toBe("Bearer");
expect(exchangeResult.data.expires_in).toBe(3600);
expect(exchangeResult.data.scope).toBe("content:read content:write media:read");
// Step 4: Verify tokens are stored
const accessHash = hashApiToken(exchangeResult.data.access_token);
const accessRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(accessRow).toBeTruthy();
expect(accessRow!.token_type).toBe("access");
expect(accessRow!.user_id).toBe("user-1");
expect(accessRow!.client_id).toBe("test-client");
// Step 5: Authorization code is consumed (single-use)
const codeHash = hashApiToken(code);
const codeRow = await db
.selectFrom("_emdash_authorization_codes")
.selectAll()
.where("code_hash", "=", codeHash)
.executeTakeFirst();
expect(codeRow).toBeUndefined();
});
it("should reject wrong code verifier (PKCE failure)", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// Use a DIFFERENT code verifier
const wrongVerifier = generateCodeVerifier();
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: wrongVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("PKCE");
// Code should be deleted after failed PKCE verification
const codeHash = hashApiToken(code);
const codeRow = await db
.selectFrom("_emdash_authorization_codes")
.selectAll()
.where("code_hash", "=", codeHash)
.executeTakeFirst();
expect(codeRow).toBeUndefined();
});
it("should reject mismatched redirect_uri", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:9999/different",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("redirect_uri");
});
it("should reject mismatched client_id", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "different-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("client_id");
});
it("should reject expired authorization code", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Insert an expired code directly
const code = generateCodeVerifier();
const codeHash = hashApiToken(code);
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: codeHash,
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() - 1000).toISOString(), // Already expired
})
.execute();
const exchangeResult = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(exchangeResult.success).toBe(false);
if (exchangeResult.success) return;
expect(exchangeResult.error.code).toBe("invalid_grant");
expect(exchangeResult.error.message).toContain("expired");
});
it("should reject code reuse (single-use enforcement)", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const approvalResult = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(approvalResult.success).toBe(true);
if (!approvalResult.success) return;
const redirectUrl = new URL(approvalResult.data.redirect_url);
const code = redirectUrl.searchParams.get("code")!;
// First exchange succeeds
const first = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(first.success).toBe(true);
// Second exchange with same code fails
const second = await handleAuthorizationCodeExchange(db, {
grant_type: "authorization_code",
code,
redirect_uri: "http://127.0.0.1:8080/callback",
client_id: "test-client",
code_verifier: codeVerifier,
});
expect(second.success).toBe(false);
if (second.success) return;
expect(second.error.code).toBe("invalid_grant");
});
});
describe("buildDeniedRedirect", () => {
it("should include error and state params", () => {
const url = buildDeniedRedirect("http://127.0.0.1:8080/callback", "state123");
const parsed = new URL(url);
expect(parsed.searchParams.get("error")).toBe("access_denied");
expect(parsed.searchParams.get("error_description")).toBeTruthy();
expect(parsed.searchParams.get("state")).toBe("state123");
});
it("should omit state when not provided", () => {
const url = buildDeniedRedirect("http://127.0.0.1:8080/callback");
const parsed = new URL(url);
expect(parsed.searchParams.get("error")).toBe("access_denied");
expect(parsed.searchParams.has("state")).toBe(false);
});
});
describe("cleanupExpiredAuthorizationCodes", () => {
it("should delete expired codes", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
// Insert an expired code
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: "expired-hash",
client_id: "test",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() - 1000).toISOString(),
})
.execute();
// Insert a valid code
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: "valid-hash",
client_id: "test",
redirect_uri: "http://127.0.0.1:8080/callback",
user_id: "user-1",
scopes: JSON.stringify(["content:read"]),
code_challenge: codeChallenge,
code_challenge_method: "S256",
resource: null,
expires_at: new Date(Date.now() + 600000).toISOString(),
})
.execute();
const deleted = await cleanupExpiredAuthorizationCodes(db);
expect(deleted).toBe(1);
// Valid code should remain
const remaining = await db.selectFrom("_emdash_authorization_codes").selectAll().execute();
expect(remaining).toHaveLength(1);
expect(remaining[0]!.code_hash).toBe("valid-hash");
});
});

View File

@@ -0,0 +1,584 @@
/**
* Integration tests for OAuth Device Flow handlers.
*
* Tests the full device flow lifecycle against a real in-memory SQLite database.
*/
import { Role } from "@emdashcms/auth";
import type { RoleLevel } from "@emdashcms/auth";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
handleDeviceAuthorize,
handleDeviceCodeRequest,
handleDeviceTokenExchange,
handleTokenRefresh,
handleTokenRevoke,
} from "../../../src/api/handlers/device-flow.js";
import { hashApiToken } from "../../../src/auth/api-tokens.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
const USER_CODE_FORMAT_REGEX = /^[A-Z0-9]{4}-[A-Z0-9]{4}$/;
const ACCESS_TOKEN_PREFIX_REGEX = /^ec_oat_/;
const REFRESH_TOKEN_PREFIX_REGEX = /^ec_ort_/;
const HYPHEN_REGEX = /-/g;
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
describe("Device Code Request", () => {
it("should create a device code with default scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.device_code).toBeTruthy();
expect(result.data.user_code).toMatch(USER_CODE_FORMAT_REGEX);
expect(result.data.verification_uri).toBe("https://example.com/_emdash/device");
expect(result.data.expires_in).toBe(900); // 15 minutes
expect(result.data.interval).toBe(5);
});
it("should create a device code with custom scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ scope: "content:read media:read" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(true);
if (!result.success) return;
// Verify scopes were stored
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", result.data.device_code)
.executeTakeFirst();
expect(row).toBeTruthy();
expect(JSON.parse(row!.scopes)).toEqual(["content:read", "media:read"]);
});
it("should reject invalid scopes", async () => {
const result = await handleDeviceCodeRequest(
db,
{ scope: "invalid:scope" },
"https://example.com/_emdash/device",
);
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_SCOPE");
});
});
describe("Device Flow: Full Lifecycle", () => {
it("should complete the full device flow: code → authorize → exchange", async () => {
// Step 1: Request device code
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code, user_code } = codeResult.data;
// Step 2: Poll before authorization → pending
const pendingResult = await handleDeviceTokenExchange(db, {
device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(pendingResult.success).toBe(false);
expect(pendingResult.deviceFlowError).toBe("authorization_pending");
// Step 3: User authorizes (admin role = 50)
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code,
});
expect(authResult.success).toBe(true);
if (!authResult.success) return;
expect(authResult.data.authorized).toBe(true);
// Step 4: Exchange for tokens
const tokenResult = await handleDeviceTokenExchange(db, {
device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(true);
if (!tokenResult.success) return;
expect(tokenResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(tokenResult.data.refresh_token).toMatch(REFRESH_TOKEN_PREFIX_REGEX);
expect(tokenResult.data.token_type).toBe("Bearer");
expect(tokenResult.data.expires_in).toBe(3600);
expect(tokenResult.data.scope).toBeTruthy();
// Step 5: Device code should be consumed
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row).toBeUndefined();
// Step 6: Tokens should be stored
const accessHash = hashApiToken(tokenResult.data.access_token);
const accessRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(accessRow).toBeTruthy();
expect(accessRow!.token_type).toBe("access");
expect(accessRow!.user_id).toBe("user-1");
const refreshHash = hashApiToken(tokenResult.data.refresh_token);
const refreshRow = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", refreshHash)
.executeTakeFirst();
expect(refreshRow).toBeTruthy();
expect(refreshRow!.token_type).toBe("refresh");
});
it("should handle denied authorization", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{},
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// User denies
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
action: "deny",
});
expect(authResult.success).toBe(true);
if (!authResult.success) return;
expect(authResult.data.authorized).toBe(false);
// Exchange should return access_denied
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(false);
expect(tokenResult.deviceFlowError).toBe("access_denied");
});
it("should normalize user codes (strip hyphens, case-insensitive)", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{},
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// Submit lowercase without hyphen
const code = codeResult.data.user_code.replace(HYPHEN_REGEX, "").toLowerCase();
const authResult = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: code,
});
expect(authResult.success).toBe(true);
});
});
describe("Device Token Exchange: Error Cases", () => {
it("should reject invalid grant_type", async () => {
const result = await handleDeviceTokenExchange(db, {
device_code: "whatever",
grant_type: "invalid",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_GRANT_TYPE");
});
it("should reject unknown device codes", async () => {
const result = await handleDeviceTokenExchange(db, {
device_code: "nonexistent",
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
it("should report expired device codes", async () => {
// Create a device code that's already expired
await db
.insertInto("_emdash_device_codes")
.values({
device_code: "expired-code",
user_code: "AAAA-BBBB",
scopes: JSON.stringify(["content:read"]),
status: "pending",
expires_at: new Date(Date.now() - 1000).toISOString(),
interval: 5,
})
.execute();
const result = await handleDeviceTokenExchange(db, {
device_code: "expired-code",
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(result.success).toBe(false);
expect(result.deviceFlowError).toBe("expired_token");
});
});
describe("Token Refresh", () => {
it("should exchange a refresh token for a new access token", async () => {
// Complete a device flow first to get tokens
const codeResult = await handleDeviceCodeRequest(
db,
{},
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
expect(tokenResult.success).toBe(true);
if (!tokenResult.success) return;
// Refresh
const refreshResult = await handleTokenRefresh(db, {
refresh_token: tokenResult.data.refresh_token,
grant_type: "refresh_token",
});
expect(refreshResult.success).toBe(true);
if (!refreshResult.success) return;
expect(refreshResult.data.access_token).toMatch(ACCESS_TOKEN_PREFIX_REGEX);
expect(refreshResult.data.access_token).not.toBe(tokenResult.data.access_token);
expect(refreshResult.data.refresh_token).toBe(tokenResult.data.refresh_token);
expect(refreshResult.data.expires_in).toBe(3600);
});
it("should reject invalid refresh tokens", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_ort_invalid",
grant_type: "refresh_token",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
it("should reject wrong grant_type", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_ort_whatever",
grant_type: "authorization_code",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("UNSUPPORTED_GRANT_TYPE");
});
it("should reject wrong token prefix", async () => {
const result = await handleTokenRefresh(db, {
refresh_token: "ec_pat_notarefresh",
grant_type: "refresh_token",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_GRANT");
});
});
describe("Token Revoke", () => {
it("should revoke an access token", async () => {
// Get tokens via device flow
const codeResult = await handleDeviceCodeRequest(
db,
{},
"https://example.com/_emdash/device",
);
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return;
// Revoke the access token
const revokeResult = await handleTokenRevoke(db, {
token: tokenResult.data.access_token,
});
expect(revokeResult.success).toBe(true);
// Access token should be gone
const accessHash = hashApiToken(tokenResult.data.access_token);
const row = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", accessHash)
.executeTakeFirst();
expect(row).toBeUndefined();
});
it("should revoke a refresh token and its access tokens", async () => {
// Get tokens via device flow
const codeResult = await handleDeviceCodeRequest(
db,
{},
"https://example.com/_emdash/device",
);
if (!codeResult.success) return;
await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: codeResult.data.user_code,
});
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return;
// Revoke the refresh token
const revokeResult = await handleTokenRevoke(db, {
token: tokenResult.data.refresh_token,
});
expect(revokeResult.success).toBe(true);
// Both tokens should be gone
const tokenCount = await db
.selectFrom("_emdash_oauth_tokens")
.select(db.fn.count("token_hash").as("count"))
.executeTakeFirst();
expect(Number(tokenCount?.count ?? 0)).toBe(0);
});
it("should return success for unknown tokens (RFC 7009)", async () => {
const result = await handleTokenRevoke(db, {
token: "ec_oat_nonexistent",
});
expect(result.success).toBe(true);
});
});
describe("Device Authorize: Error Cases", () => {
it("should reject invalid user codes", async () => {
const result = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: "INVALID-CODE",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_CODE");
});
it("should reject expired device codes", async () => {
await db
.insertInto("_emdash_device_codes")
.values({
device_code: "expired-dc",
user_code: "CCCC-DDDD",
scopes: JSON.stringify(["content:read"]),
status: "pending",
expires_at: new Date(Date.now() - 1000).toISOString(),
interval: 5,
})
.execute();
const result = await handleDeviceAuthorize(db, "user-1", Role.ADMIN, {
user_code: "CCCC-DDDD",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("EXPIRED_CODE");
});
});
// ---------------------------------------------------------------------------
// Scope escalation prevention (SEC: CWE-269)
// ---------------------------------------------------------------------------
describe("Scope Clamping: Role-based scope restriction", () => {
/** Helper: run a full device flow with given requested scopes and user role */
async function completeDeviceFlow(
requestedScopes: string,
userRole: RoleLevel,
): Promise<{ scopes: string; success: boolean }> {
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: requestedScopes },
"https://example.com/_emdash/device",
);
if (!codeResult.success) return { scopes: "", success: false };
const authResult = await handleDeviceAuthorize(db, "user-1", userRole, {
user_code: codeResult.data.user_code,
});
if (!authResult.success) return { scopes: "", success: false };
const tokenResult = await handleDeviceTokenExchange(db, {
device_code: codeResult.data.device_code,
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
});
if (!tokenResult.success) return { scopes: "", success: false };
return { scopes: tokenResult.data.scope, success: true };
}
it("should strip admin scope from non-admin user tokens", async () => {
// CONTRIBUTOR requests admin scope — this is the core attack scenario
const result = await completeDeviceFlow("content:read content:write admin", Role.CONTRIBUTOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("content:write");
expect(scopes).not.toContain("admin");
});
it("should strip schema:write from non-admin user tokens", async () => {
// EDITOR requests schema:write — only ADMIN gets schema:write
const result = await completeDeviceFlow("content:read schema:read schema:write", Role.EDITOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("schema:read");
expect(scopes).not.toContain("schema:write");
});
it("should strip schema:read from contributor tokens", async () => {
// CONTRIBUTOR requests schema:read — only EDITOR+ gets schema:read
const result = await completeDeviceFlow("content:read schema:read", Role.CONTRIBUTOR);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).not.toContain("schema:read");
});
it("should allow admin user to get all scopes", async () => {
const result = await completeDeviceFlow(
"content:read content:write media:read media:write schema:read schema:write admin",
Role.ADMIN,
);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("admin");
expect(scopes).toContain("schema:write");
expect(scopes).toContain("content:write");
});
it("should return INSUFFICIENT_ROLE when no scopes survive clamping", async () => {
// SUBSCRIBER requests only admin scope — nothing survives
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: "admin schema:write" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const authResult = await handleDeviceAuthorize(db, "user-1", Role.SUBSCRIBER, {
user_code: codeResult.data.user_code,
});
expect(authResult.success).toBe(false);
if (authResult.success) return;
expect(authResult.error.code).toBe("INSUFFICIENT_ROLE");
});
it("should clamp scopes in stored device code at authorize time", async () => {
// Verify that the stored scopes are clamped, not just the response
const codeResult = await handleDeviceCodeRequest(
db,
{ scope: "content:read content:write schema:write admin" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
// Before authorize: scopes include admin and schema:write
const beforeRow = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", codeResult.data.device_code)
.executeTakeFirst();
expect(JSON.parse(beforeRow!.scopes)).toContain("admin");
expect(JSON.parse(beforeRow!.scopes)).toContain("schema:write");
// Authorize as CONTRIBUTOR — admin and schema:write must be stripped
await handleDeviceAuthorize(db, "user-1", Role.CONTRIBUTOR, {
user_code: codeResult.data.user_code,
});
// After authorize: scopes should be clamped in DB
const afterRow = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", codeResult.data.device_code)
.executeTakeFirst();
const storedScopes = JSON.parse(afterRow!.scopes) as string[];
expect(storedScopes).toContain("content:read");
expect(storedScopes).toContain("content:write");
expect(storedScopes).not.toContain("admin");
expect(storedScopes).not.toContain("schema:write");
});
it("should allow editor to get content + media + schema:read scopes", async () => {
const result = await completeDeviceFlow(
"content:read content:write media:read media:write schema:read",
Role.EDITOR,
);
expect(result.success).toBe(true);
const scopes = result.scopes.split(" ");
expect(scopes).toContain("content:read");
expect(scopes).toContain("content:write");
expect(scopes).toContain("media:read");
expect(scopes).toContain("media:write");
expect(scopes).toContain("schema:read");
});
});

View File

@@ -0,0 +1,342 @@
/**
* Integration tests for OAuth client management and redirect URI allowlist.
*
* Tests that the authorization endpoint rejects unregistered clients and
* redirect URIs not in the client's registered set.
*/
import { computeS256Challenge, Role } from "@emdashcms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { handleAuthorizationApproval } from "../../../src/api/handlers/oauth-authorization.js";
import {
handleOAuthClientCreate,
handleOAuthClientDelete,
handleOAuthClientGet,
handleOAuthClientList,
handleOAuthClientUpdate,
lookupOAuthClient,
validateClientRedirectUri,
} from "../../../src/api/handlers/oauth-clients.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
// Create a test user
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50,
email_verified: 1,
})
.execute();
});
afterEach(async () => {
await db.destroy();
});
// ---------------------------------------------------------------------------
// validateClientRedirectUri (unit-level)
// ---------------------------------------------------------------------------
describe("validateClientRedirectUri", () => {
it("should return null for a registered redirect URI", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback", [
"https://myapp.example.com/callback",
"http://127.0.0.1:8080/callback",
]);
expect(result).toBeNull();
});
it("should return error for an unregistered redirect URI", () => {
const result = validateClientRedirectUri("https://evil.com/callback", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
it("should require exact match (no prefix matching)", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback/extra", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
it("should require exact match (no query string tolerance)", () => {
const result = validateClientRedirectUri("https://myapp.example.com/callback?foo=bar", [
"https://myapp.example.com/callback",
]);
expect(result).toBeTruthy();
});
});
// ---------------------------------------------------------------------------
// OAuth Client CRUD
// ---------------------------------------------------------------------------
describe("OAuth Client CRUD", () => {
it("should create a client", async () => {
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.id).toBe("test-client");
expect(result.data.name).toBe("Test Client");
expect(result.data.redirectUris).toEqual(["https://myapp.example.com/callback"]);
});
it("should reject duplicate client IDs", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Duplicate Client",
redirectUris: ["https://other.example.com/callback"],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("CONFLICT");
});
it("should reject clients with empty redirect URIs", async () => {
const result = await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: [],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
});
it("should list clients", async () => {
await handleOAuthClientCreate(db, {
id: "client-1",
name: "Client 1",
redirectUris: ["https://one.example.com/callback"],
});
await handleOAuthClientCreate(db, {
id: "client-2",
name: "Client 2",
redirectUris: ["https://two.example.com/callback"],
});
const result = await handleOAuthClientList(db);
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.items).toHaveLength(2);
});
it("should get a client by ID", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
scopes: ["content:read"],
});
const result = await handleOAuthClientGet(db, "test-client");
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.id).toBe("test-client");
expect(result.data.scopes).toEqual(["content:read"]);
});
it("should return NOT_FOUND for unknown client", async () => {
const result = await handleOAuthClientGet(db, "unknown");
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("NOT_FOUND");
});
it("should update a client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientUpdate(db, "test-client", {
name: "Updated Client",
redirectUris: ["https://myapp.example.com/callback", "https://myapp.example.com/callback2"],
});
expect(result.success).toBe(true);
if (!result.success) return;
expect(result.data.name).toBe("Updated Client");
expect(result.data.redirectUris).toHaveLength(2);
});
it("should reject update with empty redirect URIs", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientUpdate(db, "test-client", {
redirectUris: [],
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("VALIDATION_ERROR");
});
it("should delete a client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback"],
});
const result = await handleOAuthClientDelete(db, "test-client");
expect(result.success).toBe(true);
const getResult = await handleOAuthClientGet(db, "test-client");
expect(getResult.success).toBe(false);
});
it("should return NOT_FOUND when deleting unknown client", async () => {
const result = await handleOAuthClientDelete(db, "unknown");
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("NOT_FOUND");
});
});
// ---------------------------------------------------------------------------
// lookupOAuthClient
// ---------------------------------------------------------------------------
describe("lookupOAuthClient", () => {
it("should return redirect URIs for a registered client", async () => {
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["https://myapp.example.com/callback", "http://127.0.0.1:8080/callback"],
});
const client = await lookupOAuthClient(db, "test-client");
expect(client).toBeTruthy();
expect(client!.redirectUris).toEqual([
"https://myapp.example.com/callback",
"http://127.0.0.1:8080/callback",
]);
});
it("should return null for an unregistered client", async () => {
const client = await lookupOAuthClient(db, "unknown-client");
expect(client).toBeNull();
});
});
// ---------------------------------------------------------------------------
// Authorization with client redirect URI validation
// ---------------------------------------------------------------------------
describe("Authorization with redirect URI allowlist", () => {
beforeEach(async () => {
// Register a client with specific redirect URIs
await handleOAuthClientCreate(db, {
id: "test-client",
name: "Test Client",
redirectUris: ["http://127.0.0.1:8080/callback", "https://myapp.example.com/callback"],
});
});
it("should approve authorization with a registered redirect URI", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read content:write",
state: "random-state-value",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
if (!result.success) return;
const redirectUrl = new URL(result.data.redirect_url);
expect(redirectUrl.origin).toBe("http://127.0.0.1:8080");
expect(redirectUrl.searchParams.get("code")).toBeTruthy();
});
it("should reject authorization with unregistered redirect URI", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://evil.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_REDIRECT_URI");
expect(result.error.message).toContain("not registered");
});
it("should reject authorization with unknown client_id", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "unknown-client",
redirect_uri: "http://127.0.0.1:8080/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(false);
if (result.success) return;
expect(result.error.code).toBe("INVALID_CLIENT");
});
it("should accept HTTPS redirect URI in allowlist", async () => {
const codeVerifier = generateCodeVerifier();
const codeChallenge = computeS256Challenge(codeVerifier);
const result = await handleAuthorizationApproval(db, "user-1", Role.ADMIN, {
response_type: "code",
client_id: "test-client",
redirect_uri: "https://myapp.example.com/callback",
scope: "content:read",
code_challenge: codeChallenge,
code_challenge_method: "S256",
});
expect(result.success).toBe(true);
});
});

View File

@@ -0,0 +1,338 @@
/**
* Integration tests for database-backed rate limiting.
*
* Tests the rate limiter utility and slow_down enforcement
* against a real in-memory SQLite database.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
handleDeviceCodeRequest,
handleDeviceTokenExchange,
} from "../../../src/api/handlers/device-flow.js";
import {
checkRateLimit,
cleanupExpiredRateLimits,
getClientIp,
} from "../../../src/auth/rate-limit.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await db.destroy();
});
// ---------------------------------------------------------------------------
// Rate Limiter
// ---------------------------------------------------------------------------
describe("checkRateLimit", () => {
it("should allow requests within the limit", async () => {
const result1 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result1.allowed).toBe(true);
expect(result1.count).toBe(1);
const result2 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result2.allowed).toBe(true);
expect(result2.count).toBe(2);
const result3 = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result3.allowed).toBe(true);
expect(result3.count).toBe(3);
});
it("should reject requests exceeding the limit", async () => {
// Use up the limit
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
// 4th request should be rejected
const result = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 3, 60);
expect(result.allowed).toBe(false);
expect(result.count).toBe(4);
expect(result.limit).toBe(3);
});
it("should track limits per IP independently", async () => {
// IP A uses its limit
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
const resultA = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 2, 60);
expect(resultA.allowed).toBe(false);
// IP B should still be allowed
const resultB = await checkRateLimit(db, "5.6.7.8", "test/endpoint", 2, 60);
expect(resultB.allowed).toBe(true);
expect(resultB.count).toBe(1);
});
it("should track limits per endpoint independently", async () => {
// Use up limit on endpoint A
await checkRateLimit(db, "1.2.3.4", "endpoint-a", 1, 60);
const resultA = await checkRateLimit(db, "1.2.3.4", "endpoint-a", 1, 60);
expect(resultA.allowed).toBe(false);
// Endpoint B should still be allowed
const resultB = await checkRateLimit(db, "1.2.3.4", "endpoint-b", 1, 60);
expect(resultB.allowed).toBe(true);
});
it("should skip rate limiting when IP is null", async () => {
// Even after many calls, null IP is always allowed
for (let i = 0; i < 10; i++) {
const result = await checkRateLimit(db, null, "test/endpoint", 1, 60);
expect(result.allowed).toBe(true);
expect(result.count).toBe(0);
}
});
it("should reset after window expires", async () => {
// Use a 1-second window
await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
const blocked = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
expect(blocked.allowed).toBe(false);
// Wait for the window to expire (advance past the 1-second boundary)
await new Promise((resolve) => setTimeout(resolve, 1100));
const allowed = await checkRateLimit(db, "1.2.3.4", "test/endpoint", 1, 1);
expect(allowed.allowed).toBe(true);
expect(allowed.count).toBe(1);
});
});
// ---------------------------------------------------------------------------
// IP Extraction
// ---------------------------------------------------------------------------
describe("getClientIp", () => {
/** Create a request with a fake `cf` object to simulate Cloudflare. */
function cfRequest(url: string, init?: RequestInit): Request {
const req = new Request(url, init);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- test helper
(req as unknown as { cf: Record<string, unknown> }).cf = { country: "US" };
return req;
}
it("should extract IP from CF-Connecting-IP on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "cf-connecting-ip": "198.51.100.1" },
});
expect(getClientIp(request)).toBe("198.51.100.1");
});
it("should extract IP from X-Forwarded-For on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "203.0.113.50, 70.41.3.18, 150.172.238.178" },
});
expect(getClientIp(request)).toBe("203.0.113.50");
});
it("should return null when not on Cloudflare (no cf object)", () => {
const request = new Request("http://localhost/test");
expect(getClientIp(request)).toBeNull();
});
it("should return null when not on Cloudflare even with XFF header", () => {
const request = new Request("http://localhost/test", {
headers: { "x-forwarded-for": "203.0.113.50" },
});
expect(getClientIp(request)).toBeNull();
});
it("should reject non-IP values in X-Forwarded-For", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "<script>alert(1)</script>" },
});
expect(getClientIp(request)).toBeNull();
});
it("should handle IPv6 addresses on Cloudflare", () => {
const request = cfRequest("http://localhost/test", {
headers: { "x-forwarded-for": "2001:db8::1" },
});
expect(getClientIp(request)).toBe("2001:db8::1");
});
});
// ---------------------------------------------------------------------------
// Cleanup
// ---------------------------------------------------------------------------
describe("cleanupExpiredRateLimits", () => {
it("should delete expired entries", async () => {
// Insert a rate limit entry with a window in the past
const oldWindow = new Date(Date.now() - 7200 * 1000).toISOString();
const currentWindow = new Date(Math.floor(Date.now() / (60 * 1000)) * 60 * 1000).toISOString();
await db
.insertInto("_emdash_rate_limits")
.values([
{ key: "old:entry", window: oldWindow, count: 5 },
{ key: "current:entry", window: currentWindow, count: 2 },
])
.execute();
const deleted = await cleanupExpiredRateLimits(db, 3600);
expect(deleted).toBe(1);
// Current entry should still exist
const rows = await db.selectFrom("_emdash_rate_limits").selectAll().execute();
expect(rows).toHaveLength(1);
expect(rows[0]?.key).toBe("current:entry");
});
});
// ---------------------------------------------------------------------------
// RFC 8628 slow_down
// ---------------------------------------------------------------------------
describe("Device Token Exchange: slow_down enforcement", () => {
const GRANT_TYPE = "urn:ietf:params:oauth:grant-type:device_code";
it("should return slow_down when polling faster than interval", async () => {
// Create a device code
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets last_polled_at, returns authorization_pending
const poll1 = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll1.success).toBe(false);
expect(poll1.deviceFlowError).toBe("authorization_pending");
// Second poll immediately — should get slow_down with new interval
const poll2 = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll2.success).toBe(false);
expect(poll2.deviceFlowError).toBe("slow_down");
// Default interval (5) + SLOW_DOWN_INCREMENT (5) = 10
expect(poll2.deviceFlowInterval).toBe(10);
});
it("should increase interval by 5s on each slow_down", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets baseline
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Rapid polls — each should trigger slow_down and increase interval
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Check the interval was increased
const row = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
// Default interval is 5, after one slow_down it should be 10
expect(row?.interval).toBe(10);
// Another rapid poll — interval should increase again to 15
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
const row2 = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row2?.interval).toBe(15);
});
it("should cap slow_down interval at 60 seconds", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets baseline
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Set interval to just below the cap so the next slow_down would exceed it
await db
.updateTable("_emdash_device_codes")
.set({ interval: 58 })
.where("device_code", "=", device_code)
.execute();
// Rapid poll — triggers slow_down, interval should cap at 60 not 63
const poll = await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
expect(poll.deviceFlowInterval).toBe(60);
const row = await db
.selectFrom("_emdash_device_codes")
.select("interval")
.where("device_code", "=", device_code)
.executeTakeFirst();
expect(row?.interval).toBe(60);
});
it("should not return slow_down when polling at or above the interval", async () => {
const codeResult = await handleDeviceCodeRequest(
db,
{ client_id: "emdash-cli" },
"https://example.com/_emdash/device",
);
expect(codeResult.success).toBe(true);
if (!codeResult.success) return;
const { device_code } = codeResult.data;
// First poll — sets last_polled_at
await handleDeviceTokenExchange(db, { device_code, grant_type: GRANT_TYPE });
// Manually set last_polled_at to far enough in the past
await db
.updateTable("_emdash_device_codes")
.set({
last_polled_at: new Date(Date.now() - 10_000).toISOString(),
})
.where("device_code", "=", device_code)
.execute();
// This poll should NOT get slow_down (10s > 5s interval)
const poll = await handleDeviceTokenExchange(db, {
device_code,
grant_type: GRANT_TYPE,
});
expect(poll.success).toBe(false);
// Should be authorization_pending, not slow_down
expect(poll.deviceFlowError).toBe("authorization_pending");
});
});

View File

@@ -0,0 +1,316 @@
/**
* E2E tests for CLI commands against a real Astro dev server.
*
* Shells out to the actual `emdash` binary with --url and --token
* flags, verifying real command output and exit codes.
*
* Runs by default. Requires built artifacts (auto-builds if missing).
*/
import { execFile } from "node:child_process";
import { resolve } from "node:path";
import { promisify } from "node:util";
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const exec = promisify(execFile);
const PORT = 4398; // Different port from client integration tests
const TIMEOUT = 60_000;
// Path to the built CLI binary
const CLI_BIN = resolve(import.meta.dirname, "../../../dist/cli/index.mjs");
describe("CLI Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
}, TIMEOUT);
afterAll(async () => {
await ctx?.cleanup();
});
/** Run an emdash CLI command and return stdout */
async function cli(...args: string[]): Promise<string> {
const { stdout } = await exec(
"node",
[CLI_BIN, ...args, "--url", ctx.baseUrl, "--token", ctx.token, "--json"],
{
timeout: 15_000,
},
);
return stdout;
}
/** Run CLI and parse JSON output */
async function cliJson<T = unknown>(...args: string[]): Promise<T> {
const stdout = await cli(...args);
return JSON.parse(stdout) as T;
}
// -----------------------------------------------------------------------
// Schema commands
// -----------------------------------------------------------------------
describe("schema", () => {
it("lists collections", async () => {
const result = await cliJson<{ slug: string }[]>("schema", "list");
expect(Array.isArray(result)).toBe(true);
const slugs = result.map((c) => c.slug);
expect(slugs).toContain("posts");
expect(slugs).toContain("pages");
});
it("gets a single collection", async () => {
const result = await cliJson<{ slug: string; label: string }>("schema", "get", "posts");
expect(result.slug).toBe("posts");
expect(result.label).toBe("Posts");
});
it("creates and deletes a collection", async () => {
const created = await cliJson<{ slug: string }>(
"schema",
"create",
"cli_temp",
"--label",
"CLI Temp",
);
expect(created.slug).toBe("cli_temp");
// Verify it exists
const list = await cliJson<{ slug: string }[]>("schema", "list");
expect(list.map((c) => c.slug)).toContain("cli_temp");
// Delete
await cli("schema", "delete", "cli_temp", "--force");
// Verify it's gone
const listAfter = await cliJson<{ slug: string }[]>("schema", "list");
expect(listAfter.map((c) => c.slug)).not.toContain("cli_temp");
});
it("adds and removes fields", async () => {
// Create a temp collection
await cli("schema", "create", "cli_fields", "--label", "Fields Test");
// Add a field
const field = await cliJson<{ slug: string; type: string }>(
"schema",
"add-field",
"cli_fields",
"name",
"--type",
"string",
"--label",
"Name",
);
expect(field.slug).toBe("name");
expect(field.type).toBe("string");
// Remove the field
await cli("schema", "remove-field", "cli_fields", "name");
// Clean up
await cli("schema", "delete", "cli_fields", "--force");
});
});
// -----------------------------------------------------------------------
// Content commands
// -----------------------------------------------------------------------
describe("content", () => {
it("lists content", async () => {
const result = await cliJson<{ items: { data: Record<string, unknown> }[] }>(
"content",
"list",
"posts",
);
expect(result.items.length).toBeGreaterThanOrEqual(2);
});
it("gets content by id", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const result = await cliJson<{ data: { title: string } }>("content", "get", "posts", postId);
expect(result.data.title).toBe("First Post");
});
it("creates, updates, and deletes content", async () => {
// Create
const created = await cliJson<{ id: string; slug: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Post", excerpt: "From CLI" }),
"--slug",
"cli-post",
);
expect(created.id).toBeDefined();
expect(created.slug).toBe("cli-post");
// Update (get first to obtain _rev, then update with it)
const fetched = await cliJson<{ _rev: string }>("content", "get", "posts", created.id);
const updated = await cliJson<{ data: { title: string } }>(
"content",
"update",
"posts",
created.id,
"--rev",
fetched._rev,
"--data",
JSON.stringify({ title: "Updated CLI Post" }),
);
expect(updated.data.title).toBe("Updated CLI Post");
// Delete
await cli("content", "delete", "posts", created.id);
});
it("publishes and unpublishes content", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "Pub Test" }),
);
await cli("content", "publish", "posts", item.id);
await cli("content", "unpublish", "posts", item.id);
// Clean up
await cli("content", "delete", "posts", item.id);
});
});
// -----------------------------------------------------------------------
// Content lifecycle: schedule and restore
// -----------------------------------------------------------------------
describe("content lifecycle", () => {
it("schedules content for publishing", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Schedule Test" }),
);
// Schedule does not produce JSON output, just a success message
await cli("content", "schedule", "posts", item.id, "--at", "2027-06-01T09:00:00Z");
// Verify via get
const fetched = await cliJson<{ scheduledAt: string }>("content", "get", "posts", item.id);
expect(fetched.scheduledAt).toBe("2027-06-01T09:00:00Z");
// Clean up
await cli("content", "delete", "posts", item.id);
});
it("restores a trashed item", async () => {
const item = await cliJson<{ id: string }>(
"content",
"create",
"posts",
"--data",
JSON.stringify({ title: "CLI Restore Test" }),
);
// Delete (soft trash)
await cli("content", "delete", "posts", item.id);
// Restore
await cli("content", "restore", "posts", item.id);
// Should be accessible again (auto-published before deletion, so restored as published)
const fetched = await cliJson<{ status: string }>("content", "get", "posts", item.id);
expect(fetched.status).toBe("published");
// Final cleanup
await cli("content", "delete", "posts", item.id);
});
});
// -----------------------------------------------------------------------
// Media commands
// -----------------------------------------------------------------------
describe("media", () => {
it("uploads, lists, gets, and deletes media", async () => {
// Create a temp file to upload
const { writeFileSync } = await import("node:fs");
const { join } = await import("node:path");
const { tmpdir } = await import("node:os");
// 1x1 PNG pixel
const pngBytes = Buffer.from([
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44,
0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90,
0x77, 0x53, 0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8,
0xcf, 0xc0, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xe2, 0x21, 0xbc, 0x33, 0x00, 0x00, 0x00,
0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
]);
const tmpFile = join(tmpdir(), "emdash-cli-test.png");
writeFileSync(tmpFile, pngBytes);
// Upload
const uploaded = await cliJson<{ id: string; filename: string }>(
"media",
"upload",
tmpFile,
"--alt",
"CLI test image",
);
expect(uploaded.id).toBeDefined();
expect(uploaded.filename).toBe("emdash-cli-test.png");
// List
const list = await cliJson<{ items: { id: string }[] }>("media", "list");
const ids = list.items.map((m) => m.id);
expect(ids).toContain(uploaded.id);
// Get
const fetched = await cliJson<{ id: string; filename: string }>("media", "get", uploaded.id);
expect(fetched.id).toBe(uploaded.id);
// Delete
await cli("media", "delete", uploaded.id);
// Clean up temp file
const { unlinkSync } = await import("node:fs");
unlinkSync(tmpFile);
});
});
// -----------------------------------------------------------------------
// Search command
// -----------------------------------------------------------------------
describe("search", () => {
it("searches content", async () => {
// Search should work even if no results (the command shouldn't error)
const result = await cliJson<unknown[]>("search", "First Post");
expect(Array.isArray(result)).toBe(true);
});
});
// -----------------------------------------------------------------------
// Auth commands
// -----------------------------------------------------------------------
describe("auth", () => {
it("whoami returns user info with token auth", async () => {
const result = await cliJson<{ email: string; role: string }>("whoami");
expect(result.email).toBe("dev@emdash.local");
expect(result.role).toBe("admin");
});
});
});

View File

@@ -0,0 +1,498 @@
/**
* Integration tests for EmDashClient.
*
* Tests full CRUD lifecycles against a mock HTTP backend that simulates
* the real API behavior including _rev tokens, schema caching, and
* content state transitions.
*/
import { describe, it, expect } from "vitest";
import { EmDashClient, EmDashApiError } from "../../../src/client/index.js";
import type { Interceptor } from "../../../src/client/transport.js";
// ---------------------------------------------------------------------------
// Simulated backend
// ---------------------------------------------------------------------------
const COLLECTION_MATCH_REGEX = /^\/schema\/collections\/([^/]+)$/;
const CONTENT_LIST_REGEX = /^\/content\/([^/]+)$/;
const CONTENT_ITEM_REGEX = /^\/content\/([^/]+)\/([^/]+)$/;
const CONTENT_ACTION_REGEX = /^\/content\/([^/]+)\/([^/]+)\/(publish|unpublish|schedule|restore)$/;
interface StoredItem {
id: string;
type: string;
slug: string | null;
status: string;
data: Record<string, unknown>;
authorId: string | null;
createdAt: string;
updatedAt: string;
publishedAt: string | null;
scheduledAt: string | null;
liveRevisionId: string | null;
draftRevisionId: string | null;
version: number;
}
function encodeRev(item: StoredItem): string {
return btoa(`${item.version}:${item.updatedAt}`);
}
/** Wraps body in `{ data: body }` to match the standard API response envelope. */
function jsonRes(body: unknown, status = 200): Response {
// Error responses (4xx/5xx) are NOT wrapped in { data }
const payload = status >= 400 ? body : { data: body };
return new Response(JSON.stringify(payload), {
status,
headers: { "Content-Type": "application/json" },
});
}
/**
* A stateful mock backend that simulates EmDash's REST API.
* Supports schema, content CRUD, _rev tokens, and conflict detection.
*/
function createStatefulBackend() {
const collections = new Map<
string,
{
slug: string;
label: string;
labelSingular: string;
fields: Array<{ slug: string; type: string; label: string; required?: boolean }>;
}
>();
const content = new Map<string, StoredItem>();
let idCounter = 0;
// Seed a collection
collections.set("posts", {
slug: "posts",
label: "Posts",
labelSingular: "Post",
fields: [
{ slug: "title", type: "string", label: "Title", required: true },
{ slug: "body", type: "portableText", label: "Body" },
{ slug: "excerpt", type: "text", label: "Excerpt" },
],
});
const interceptor: Interceptor = async (req) => {
const url = new URL(req.url);
const path = url.pathname.replace("/_emdash/api", "");
// --- Schema routes ---
if (req.method === "GET" && path === "/schema/collections") {
return jsonRes({
items: Array.from(collections.values(), ({ slug, label, labelSingular }) => ({
slug,
label,
labelSingular,
supports: [],
})),
});
}
const colMatch = path.match(COLLECTION_MATCH_REGEX);
if (req.method === "GET" && colMatch) {
const col = collections.get(colMatch[1]);
if (!col) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
return jsonRes({ item: { ...col, supports: [] } });
}
// --- Manifest ---
if (req.method === "GET" && path === "/manifest") {
const cols: Record<string, unknown> = {};
for (const [slug, col] of collections) {
const fields: Record<string, unknown> = {};
for (const f of col.fields) {
fields[f.slug] = { kind: f.type, label: f.label, required: f.required };
}
cols[slug] = {
label: col.label,
labelSingular: col.labelSingular,
supports: [],
fields,
};
}
return jsonRes({ version: "0.1.0", hash: "abc", collections: cols, plugins: {} });
}
// --- Content list ---
const listMatch = path.match(CONTENT_LIST_REGEX);
if (req.method === "GET" && listMatch) {
const collectionSlug = listMatch[1];
const status = url.searchParams.get("status");
const items = [...content.values()]
.filter((i) => i.type === collectionSlug)
.filter((i) => !status || i.status === status);
return jsonRes({ items, nextCursor: undefined });
}
// --- Content create ---
if (req.method === "POST" && listMatch) {
const collectionSlug = listMatch[1];
const body = (await req.json()) as {
data: Record<string, unknown>;
slug?: string;
status?: string;
};
const id = `item_${++idCounter}`;
const now = new Date().toISOString();
const item: StoredItem = {
id,
type: collectionSlug,
slug: body.slug ?? null,
status: body.status ?? "draft",
data: body.data,
authorId: null,
createdAt: now,
updatedAt: now,
publishedAt: null,
scheduledAt: null,
liveRevisionId: null,
draftRevisionId: null,
version: 1,
};
content.set(id, item);
return jsonRes({ item, _rev: encodeRev(item) });
}
// --- Content get/update/delete ---
const itemMatch = path.match(CONTENT_ITEM_REGEX);
if (itemMatch) {
const itemId = itemMatch[2];
const item = content.get(itemId);
if (req.method === "GET") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
return jsonRes({ item, _rev: encodeRev(item) });
}
if (req.method === "PUT") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
const body = (await req.json()) as {
data?: Record<string, unknown>;
slug?: string;
status?: string;
_rev?: string;
};
// Check _rev for conflict
if (body._rev) {
const expected = encodeRev(item);
if (body._rev !== expected) {
return jsonRes(
{
error: {
code: "CONFLICT",
message: "Entry has been modified since last read",
},
},
409,
);
}
}
// Apply updates
if (body.data) item.data = { ...item.data, ...body.data };
if (body.slug !== undefined) item.slug = body.slug;
if (body.status) item.status = body.status;
item.updatedAt = new Date().toISOString();
item.version++;
return jsonRes({ item, _rev: encodeRev(item) });
}
if (req.method === "DELETE") {
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
item.status = "trashed";
item.updatedAt = new Date().toISOString();
return jsonRes({});
}
}
// --- Content actions ---
const actionMatch = path.match(CONTENT_ACTION_REGEX);
if (req.method === "POST" && actionMatch) {
const itemId = actionMatch[2];
const action = actionMatch[3];
const item = content.get(itemId);
if (!item) return jsonRes({ error: { code: "NOT_FOUND", message: "Not found" } }, 404);
switch (action) {
case "publish":
item.status = "published";
item.publishedAt = new Date().toISOString();
break;
case "unpublish":
item.status = "draft";
item.publishedAt = null;
break;
case "schedule": {
const body = (await req.json()) as { scheduledAt: string };
item.scheduledAt = body.scheduledAt;
break;
}
case "restore":
item.status = "draft";
break;
}
item.updatedAt = new Date().toISOString();
return jsonRes({});
}
// --- Search ---
if (req.method === "GET" && path === "/search") {
const q = url.searchParams.get("q") ?? "";
const items = [...content.values()]
.filter((i) => JSON.stringify(i.data).toLowerCase().includes(q.toLowerCase()))
.map((i) => ({
id: i.id,
collection: i.type,
title: typeof i.data.title === "string" ? i.data.title : "",
score: 1,
}));
return jsonRes({ items });
}
return jsonRes(
{ error: { code: "NOT_FOUND", message: `No route: ${req.method} ${path}` } },
404,
);
};
return { interceptor, collections, content };
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe("EmDashClient lifecycle (integration)", () => {
function createClient() {
const { interceptor, content } = createStatefulBackend();
const client = new EmDashClient({
baseUrl: "http://localhost:4321",
token: "test",
interceptors: [interceptor],
});
return { client, content };
}
it("full content CRUD lifecycle", async () => {
const { client } = createClient();
// Create
const created = await client.create("posts", {
data: { title: "My Post", body: "Hello **world**" },
slug: "my-post",
status: "draft",
});
expect(created.id).toBeDefined();
expect(created.slug).toBe("my-post");
expect(created.status).toBe("draft");
// body was converted from markdown to PT
expect(Array.isArray(created.data.body)).toBe(true);
// List
const list = await client.list("posts");
expect(list.items).toHaveLength(1);
expect(list.items[0].id).toBe(created.id);
// Get — returns _rev for optimistic concurrency
const fetched = await client.get("posts", created.id);
expect(fetched.id).toBe(created.id);
expect(typeof fetched.data.body).toBe("string"); // PT -> markdown
expect(fetched.data.body).toContain("world");
expect(fetched._rev).toBeDefined();
// Update with explicit _rev
const updated = await client.update("posts", created.id, {
data: { title: "Updated Title" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Updated Title");
// Publish
await client.publish("posts", created.id);
// List published
const published = await client.list("posts", { status: "published" });
expect(published.items).toHaveLength(1);
// Unpublish
await client.unpublish("posts", created.id);
// Delete (soft)
await client.delete("posts", created.id);
});
it("blind update succeeds without _rev", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "Test" },
});
// Update without reading — blind write (no _rev) should succeed
const updated = await client.update("posts", item.id, {
data: { title: "Blind Write OK" },
});
expect(updated.data.title).toBe("Blind Write OK");
});
it("get() returns _rev and update() accepts it for conflict detection", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "Test" },
});
// Read — should return _rev on the item
const fetched = await client.get("posts", item.id);
expect(fetched._rev).toBeDefined();
// Update with explicit _rev
const updated = await client.update("posts", item.id, {
data: { title: "Safe Update" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Safe Update");
});
it("multiple sequential updates work with explicit _rev", async () => {
const { client } = createClient();
const item = await client.create("posts", {
data: { title: "V1" },
});
// First read
const v1 = await client.get("posts", item.id);
// First update with _rev
await client.update("posts", item.id, {
data: { title: "V2" },
_rev: v1._rev,
});
// Re-read for fresh _rev (previous rev is now stale)
const v2 = await client.get("posts", item.id);
// Second update with new _rev
const v3 = await client.update("posts", item.id, {
data: { title: "V3" },
_rev: v2._rev,
});
expect(v3.data.title).toBe("V3");
});
it("listAll() iterates through all items", async () => {
const { client } = createClient();
// Create multiple items
await client.create("posts", { data: { title: "A" } });
await client.create("posts", { data: { title: "B" } });
await client.create("posts", { data: { title: "C" } });
const all = [];
for await (const item of client.listAll("posts")) {
all.push(item);
}
expect(all).toHaveLength(3);
});
it("schedule() sets scheduling metadata", async () => {
const { client } = createClient();
const item = await client.create("posts", { data: { title: "Scheduled" } });
await client.schedule("posts", item.id, { at: "2026-06-01T09:00:00Z" });
// Verify via get
const fetched = await client.get("posts", item.id);
expect(fetched.scheduledAt).toBe("2026-06-01T09:00:00Z");
});
it("search() finds matching content", async () => {
const { client } = createClient();
await client.create("posts", { data: { title: "Deployment Guide" } });
await client.create("posts", { data: { title: "Getting Started" } });
const results = await client.search("deployment");
expect(results).toHaveLength(1);
expect(results[0].title).toBe("Deployment Guide");
});
it("schema operations work", async () => {
const { client } = createClient();
const cols = await client.collections();
expect(cols.length).toBeGreaterThan(0);
expect(cols[0].slug).toBe("posts");
const col = await client.collection("posts");
expect(col.fields).toHaveLength(3);
expect(col.fields[0].slug).toBe("title");
});
it("manifest() returns full schema", async () => {
const { client } = createClient();
const manifest = await client.manifest();
expect(manifest.version).toBe("0.1.0");
expect(manifest.collections.posts).toBeDefined();
expect(manifest.collections.posts.fields.title).toBeDefined();
});
it("API errors are typed correctly", async () => {
const { client } = createClient();
try {
await client.get("posts", "nonexistent");
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(404);
expect(apiErr.code).toBe("NOT_FOUND");
}
});
it("PT conversion round-trips through create and get", async () => {
const { client } = createClient();
// Create with markdown
const item = await client.create("posts", {
data: {
title: "Markdown Post",
body: "# Hello\n\nSome **bold** text\n\n- Item 1\n- Item 2",
},
});
// Data stored as PT
expect(Array.isArray(item.data.body)).toBe(true);
// Get returns markdown
const fetched = await client.get("posts", item.id);
expect(typeof fetched.data.body).toBe("string");
const body = fetched.data.body as string;
expect(body).toContain("# Hello");
expect(body).toContain("**bold**");
expect(body).toContain("- Item 1");
});
});

View File

@@ -0,0 +1,395 @@
/**
* E2E tests for EmDashClient against a real Astro dev server.
*
* Uses an isolated fixture (not the demo site). The test helper
* creates a temp directory, starts a fresh dev server, runs setup,
* and seeds collections with test data.
*
* Runs by default. Requires built artifacts (auto-builds if missing).
*/
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import { EmDashClient, EmDashApiError } from "../../../src/client/index.js";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4399;
const TIMEOUT = 60_000;
describe("EmDashClient Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
}, TIMEOUT);
afterAll(async () => {
await ctx?.cleanup();
});
it("fetches the manifest", async () => {
const manifest = await ctx.client.manifest();
expect(manifest.version).toBeDefined();
expect(typeof manifest.collections).toBe("object");
});
it("lists collections", async () => {
const collections = await ctx.client.collections();
expect(Array.isArray(collections)).toBe(true);
// Seeded collections should be present
const slugs = collections.map((c: { slug: string }) => c.slug);
expect(slugs).toContain("posts");
expect(slugs).toContain("pages");
});
it("lists seeded content", async () => {
const posts = await ctx.client.list("posts");
expect(posts.items.length).toBeGreaterThanOrEqual(2);
// Check published posts are returned
const titles = posts.items.map((p: { data: Record<string, unknown> }) => p.data.title);
expect(titles).toContain("First Post");
expect(titles).toContain("Second Post");
});
it("creates, reads, updates, and deletes content", async () => {
// Create
const item = await ctx.client.create("posts", {
data: { title: "E2E Article", body: "Hello **e2e**", excerpt: "Testing" },
slug: "e2e-article",
});
expect(item.id).toBeDefined();
expect(item.slug).toBe("e2e-article");
// Read — returns _rev for optimistic concurrency
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("E2E Article");
expect(typeof fetched.data.body).toBe("string"); // PT→Markdown
expect(fetched._rev).toBeDefined();
// Update — pass _rev explicitly
const updated = await ctx.client.update("posts", item.id, {
data: { title: "Updated E2E Article" },
_rev: fetched._rev,
});
expect(updated.data.title).toBe("Updated E2E Article");
// Publish / unpublish
await ctx.client.publish("posts", item.id);
await ctx.client.unpublish("posts", item.id);
// Delete
await ctx.client.delete("posts", item.id);
});
it("blind update succeeds without _rev", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Blind Update Test" },
});
// Fresh client — no prior get(), no _rev — blind write should succeed
const freshClient = new EmDashClient({
baseUrl: ctx.baseUrl,
devBypass: true,
});
const updated = await freshClient.update("posts", item.id, {
data: { title: "Blind Write OK" },
});
expect(updated.data.title).toBe("Blind Write OK");
await ctx.client.delete("posts", item.id);
});
it("returns Portable Text arrays in raw mode", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Raw Test", body: "Some **bold** text" },
});
// Normal get — body as markdown string
const normal = await ctx.client.get("posts", item.id);
expect(typeof normal.data.body).toBe("string");
// Raw get — body as PT array
const raw = await ctx.client.get("posts", item.id, { raw: true });
expect(Array.isArray(raw.data.body)).toBe(true);
await ctx.client.delete("posts", item.id);
});
it("authenticates with PAT token", async () => {
// Use the PAT token directly via fetch (not the devBypass client)
const res = await fetch(`${ctx.baseUrl}/_emdash/api/content/posts`, {
headers: { Authorization: `Bearer ${ctx.token}` },
});
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: unknown[] } };
expect(Array.isArray(json.data.items)).toBe(true);
});
// -----------------------------------------------------------------------
// Rendered output tests
// -----------------------------------------------------------------------
/** Fetch a page and return the HTML body text */
async function fetchHtml(path: string): Promise<string> {
const res = await fetch(`${ctx.baseUrl}${path}`);
return res.text();
}
it("renders seeded posts on the index page", async () => {
const html = await fetchHtml("/");
// Published posts should appear
expect(html).toContain("First Post");
expect(html).toContain("Second Post");
// Draft post should NOT appear on the public page
expect(html).not.toContain("Draft Post");
});
it("renders a single post by slug", async () => {
const html = await fetchHtml("/posts/first-post");
expect(html).toContain('<h1 id="title">First Post</h1>');
expect(html).toContain("The very first post"); // excerpt
});
it("returns 404 for a nonexistent slug", async () => {
const res = await fetch(`${ctx.baseUrl}/posts/does-not-exist`);
expect(res.status).toBe(404);
});
it("reflects API edits in rendered output", async () => {
// Create and publish a new post
const item = await ctx.client.create("posts", {
data: { title: "Render Test Post", excerpt: "Check the HTML" },
slug: "render-test",
});
await ctx.client.publish("posts", item.id);
// Index page should include the new post
const indexHtml = await fetchHtml("/");
expect(indexHtml).toContain("Render Test Post");
// Single page should render it
const postHtml = await fetchHtml("/posts/render-test");
expect(postHtml).toContain("Render Test Post");
expect(postHtml).toContain("Check the HTML");
// Update the title via API — pass _rev from get()
const current = await ctx.client.get("posts", item.id);
await ctx.client.update("posts", item.id, {
data: { title: "Edited Render Test" },
_rev: current._rev,
});
// Rendered page should reflect the edit
const updatedHtml = await fetchHtml("/posts/render-test");
expect(updatedHtml).toContain("Edited Render Test");
expect(updatedHtml).not.toContain("Render Test Post");
// Unpublish — should disappear from index
await ctx.client.unpublish("posts", item.id);
const afterUnpublish = await fetchHtml("/");
expect(afterUnpublish).not.toContain("Edited Render Test");
// Clean up
await ctx.client.delete("posts", item.id);
});
it("creates and deletes collections", async () => {
const col = await ctx.client.createCollection({
slug: "e2e_temp",
label: "Temp",
});
expect(col.slug).toBe("e2e_temp");
const titleField = await ctx.client.createField("e2e_temp", {
slug: "title",
type: "string",
label: "Title",
});
expect(titleField.slug).toBe("title");
await ctx.client.deleteCollection("e2e_temp");
// Collection should be gone
const collections = await ctx.client.collections();
const slugs = collections.map((c: { slug: string }) => c.slug);
expect(slugs).not.toContain("e2e_temp");
});
// -----------------------------------------------------------------------
// Media tests
// -----------------------------------------------------------------------
it("uploads, gets, lists, and deletes media", async () => {
// Create a small PNG file (1x1 pixel)
const pngBytes = new Uint8Array([
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44,
0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90,
0x77, 0x53, 0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8,
0xcf, 0xc0, 0x00, 0x00, 0x00, 0x02, 0x00, 0x01, 0xe2, 0x21, 0xbc, 0x33, 0x00, 0x00, 0x00,
0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82,
]);
// Upload
const uploaded = await ctx.client.mediaUpload(pngBytes, "test-pixel.png", {
alt: "A test pixel",
});
expect(uploaded.id).toBeDefined();
expect(uploaded.filename).toBe("test-pixel.png");
expect(uploaded.mimeType).toBe("image/png");
// Get by ID
const fetched = await ctx.client.mediaGet(uploaded.id);
expect(fetched.id).toBe(uploaded.id);
expect(fetched.filename).toBe("test-pixel.png");
// List — should include the uploaded item
const list = await ctx.client.mediaList();
expect(list.items.length).toBeGreaterThanOrEqual(1);
const ids = list.items.map((m: { id: string }) => m.id);
expect(ids).toContain(uploaded.id);
// Delete
await ctx.client.mediaDelete(uploaded.id);
// Should be gone
await expect(ctx.client.mediaGet(uploaded.id)).rejects.toThrow();
});
// -----------------------------------------------------------------------
// Conflict detection
// -----------------------------------------------------------------------
it("returns 409 on _rev conflict", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Conflict Test" },
});
// Two clients both read the same version
const clientA = new EmDashClient({ baseUrl: ctx.baseUrl, token: ctx.token });
const clientB = new EmDashClient({ baseUrl: ctx.baseUrl, token: ctx.token });
const fetchedA = await clientA.get("posts", item.id);
const fetchedB = await clientB.get("posts", item.id);
// A updates first — succeeds (passes _rev explicitly)
await clientA.update("posts", item.id, {
data: { title: "A wins" },
_rev: fetchedA._rev,
});
// B's _rev is now stale — should get 409
try {
await clientB.update("posts", item.id, {
data: { title: "B loses" },
_rev: fetchedB._rev,
});
expect.fail("Should have thrown a conflict error");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(409);
expect(apiErr.code).toBe("CONFLICT");
}
// Clean up
await ctx.client.delete("posts", item.id);
});
// -----------------------------------------------------------------------
// Schedule and restore
// -----------------------------------------------------------------------
it("schedules and restores content", async () => {
const item = await ctx.client.create("posts", {
data: { title: "Schedule Test" },
});
// Schedule for a future date
await ctx.client.schedule("posts", item.id, { at: "2027-06-01T09:00:00Z" });
// Verify via get
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.scheduledAt).toBe("2027-06-01T09:00:00Z");
// Trash and restore
await ctx.client.delete("posts", item.id);
await ctx.client.restore("posts", item.id);
// Should be accessible again (restore preserves the previous status)
const restored = await ctx.client.get("posts", item.id);
expect(restored.status).toBe("scheduled");
// Final cleanup
await ctx.client.delete("posts", item.id);
});
// -----------------------------------------------------------------------
// listAll cursor pagination
// -----------------------------------------------------------------------
it("listAll iterates through paginated results", async () => {
// Create enough items to potentially page (use limit=2 to force pagination)
const ids: string[] = [];
for (let i = 0; i < 5; i++) {
const item = await ctx.client.create("posts", {
data: { title: `Paginate ${i}` },
});
ids.push(item.id);
}
// listAll with small limit should still get all items
const all: { id: string }[] = [];
for await (const item of ctx.client.listAll("posts", { limit: 2 })) {
all.push(item);
}
// Should have at least our 5 + the seeded posts
expect(all.length).toBeGreaterThanOrEqual(5);
// All our created IDs should be in the results
const resultIds = all.map((a) => a.id);
for (const id of ids) {
expect(resultIds).toContain(id);
}
// Clean up
for (const id of ids) {
await ctx.client.delete("posts", id);
}
});
// -----------------------------------------------------------------------
// Error paths
// -----------------------------------------------------------------------
it("throws EmDashApiError on 404", async () => {
try {
await ctx.client.get("posts", "nonexistent-id-12345");
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
const apiErr = error as EmDashApiError;
expect(apiErr.status).toBe(404);
expect(apiErr.code).toBe("NOT_FOUND");
}
});
it("throws on unauthorized request (no token)", async () => {
const noAuthClient = new EmDashClient({
baseUrl: ctx.baseUrl,
// No token, no devBypass
});
try {
await noAuthClient.collections();
expect.fail("Should have thrown");
} catch (error) {
expect(error).toBeInstanceOf(EmDashApiError);
expect((error as EmDashApiError).status).toBe(401);
}
});
});

View File

@@ -0,0 +1,350 @@
/**
* E2E tests for comment frontend components and API.
*
* Tests the full flow: rendering comments on pages, submitting via the
* public API, approving via admin API, and verifying display.
*
* Note: the public comment API has a rate limit (5 per 10 min per IP).
* Tests are ordered to stay within the limit — avoid adding submissions
* without accounting for the budget.
*/
import { describe, it, expect, beforeAll, afterAll } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4398;
const TIMEOUT = 60_000;
/** Helper: raw fetch with auth headers */
async function adminFetch(
ctx: TestServerContext,
path: string,
init?: RequestInit,
): Promise<Response> {
return fetch(`${ctx.baseUrl}${path}`, {
...init,
headers: {
Authorization: `Bearer ${ctx.token}`,
"X-EmDash-Request": "1",
"Content-Type": "application/json",
...(init?.headers as Record<string, string>),
},
});
}
/** Helper: fetch HTML page */
async function fetchHtml(ctx: TestServerContext, path: string): Promise<string> {
const res = await fetch(`${ctx.baseUrl}${path}`);
return res.text();
}
/** Helper: submit a comment via the public API */
async function submitComment(
ctx: TestServerContext,
collection: string,
contentId: string,
data: {
authorName: string;
authorEmail: string;
body: string;
parentId?: string;
website_url?: string;
},
): Promise<Response> {
return fetch(
`${ctx.baseUrl}/_emdash/api/comments/${encodeURIComponent(collection)}/${encodeURIComponent(contentId)}`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Origin: ctx.baseUrl,
},
body: JSON.stringify(data),
},
);
}
const COMMENT_COUNT_RE = /\d+ Comments/;
describe("Comments Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
// Enable comments on the posts collection with "none" moderation
// so comments are auto-approved for most tests
const res = await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({
commentsEnabled: true,
commentsModeration: "none",
}),
});
if (!res.ok) {
const body = await res.text().catch(() => "");
throw new Error(`Failed to enable comments on posts (${res.status}): ${body}`);
}
}, TIMEOUT);
afterAll(async () => {
await ctx?.cleanup();
});
// -----------------------------------------------------------------------
// Server-rendered component (no submissions)
// -----------------------------------------------------------------------
it("renders 'No comments yet' for a post with no comments", async () => {
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain("No comments yet");
expect(html).toContain("ec-comments");
expect(html).toContain("ec-comment-form");
});
it("renders the comment form with correct fields", async () => {
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain('name="authorName"');
expect(html).toContain('name="authorEmail"');
expect(html).toContain('name="body"');
expect(html).toContain('name="website_url"');
expect(html).toContain("Post Comment");
});
// -----------------------------------------------------------------------
// Submission #1: basic submit + rendering + auto-link + XSS escape
// -----------------------------------------------------------------------
it("submits a comment and renders it with auto-linked URLs and escaped HTML", async () => {
const postId = ctx.contentIds["posts"]![0]!;
// Submit a comment with a URL and HTML in the body
const res = await submitComment(ctx, "posts", postId, {
authorName: "Test User",
authorEmail: "test@example.com",
body: 'Check https://example.com and <script>alert("xss")</script>',
});
expect(res.status).toBe(201);
const json = (await res.json()) as { data: { id: string; status: string; message: string } };
expect(json.data.id).toBeDefined();
expect(json.data.status).toBe("approved");
expect(json.data.message).toBe("Comment published");
// Verify rendered page
const html = await fetchHtml(ctx, "/posts/first-post");
expect(html).toContain("Test User");
expect(html).not.toContain("No comments yet");
// Auto-linked URL
expect(html).toContain('href="https://example.com"');
expect(html).toContain('rel="nofollow ugc noopener"');
// HTML escaped (not rendered as real script tag)
expect(html).toContain("&lt;script&gt;");
expect(html).not.toContain('<script>alert("xss")</script>');
});
// -----------------------------------------------------------------------
// Submission #2: honeypot (early exit, doesn't count toward rate limit)
// -----------------------------------------------------------------------
it("silently accepts honeypot submissions", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await submitComment(ctx, "posts", postId, {
authorName: "Bot",
authorEmail: "bot@spam.com",
body: "Buy cheap pills",
website_url: "http://spam.com",
});
// Honeypot: returns 200 OK but doesn't actually create the comment
expect(res.status).toBe(200);
const json = (await res.json()) as { data: { status: string; message: string } };
expect(json.data.status).toBe("pending");
});
// -----------------------------------------------------------------------
// No submission: validation and disabled collection
// -----------------------------------------------------------------------
it("rejects comments when collection has comments disabled", async () => {
const pageId = ctx.contentIds["pages"]![0]!;
const res = await submitComment(ctx, "pages", pageId, {
authorName: "Test",
authorEmail: "test@example.com",
body: "Should fail",
});
expect(res.status).toBe(403);
const data = (await res.json()) as { error: { code: string } };
expect(data.error.code).toBe("COMMENTS_DISABLED");
});
it("returns validation error for missing required fields", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/posts/${postId}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Origin: ctx.baseUrl,
},
body: JSON.stringify({ authorName: "Test" }),
});
expect(res.status).toBe(400);
});
// -----------------------------------------------------------------------
// No submission: public GET API
// -----------------------------------------------------------------------
it("lists approved comments via the public GET API", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/posts/${postId}`);
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { authorName: string; body: string }[] } };
expect(Array.isArray(json.data.items)).toBe(true);
expect(json.data.items.length).toBeGreaterThan(0);
});
// -----------------------------------------------------------------------
// Submissions #3-4: threading (on second-post)
// -----------------------------------------------------------------------
it("submits and renders threaded replies", async () => {
const postId = ctx.contentIds["posts"]![1]!;
const rootRes = await submitComment(ctx, "posts", postId, {
authorName: "Thread Root",
authorEmail: "root@example.com",
body: "Root comment for threading test",
});
expect(rootRes.status).toBe(201);
const rootJson = (await rootRes.json()) as { data: { id: string } };
const replyRes = await submitComment(ctx, "posts", postId, {
authorName: "Thread Reply",
authorEmail: "reply@example.com",
body: "Reply to root comment",
parentId: rootJson.data.id,
});
expect(replyRes.status).toBe(201);
const html = await fetchHtml(ctx, "/posts/second-post");
expect(html).toContain("Thread Root");
expect(html).toContain("Thread Reply");
expect(html).toContain("ec-comment-replies");
});
// -----------------------------------------------------------------------
// Submission #5: moderation (last one within rate limit)
// -----------------------------------------------------------------------
it("holds comments for moderation and allows admin approval", async () => {
const updateRes = await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({ commentsModeration: "all" }),
});
expect(updateRes.ok).toBe(true);
const postId = ctx.contentIds["posts"]![1]!;
const submitRes = await submitComment(ctx, "posts", postId, {
authorName: "Pending Author",
authorEmail: "pending@example.com",
body: "This needs approval",
});
expect(submitRes.status).toBe(201);
const submitJson = (await submitRes.json()) as { data: { id: string; status: string } };
expect(submitJson.data.status).toBe("pending");
// Pending comment should NOT appear on the rendered page
const htmlBefore = await fetchHtml(ctx, "/posts/second-post");
expect(htmlBefore).not.toContain("This needs approval");
// Approve via admin API
const approveRes = await adminFetch(
ctx,
`/_emdash/api/admin/comments/${submitJson.data.id}/status`,
{
method: "PUT",
body: JSON.stringify({ status: "approved" }),
},
);
expect(approveRes.ok).toBe(true);
// Now it should appear on the rendered page
const htmlAfter = await fetchHtml(ctx, "/posts/second-post");
expect(htmlAfter).toContain("This needs approval");
expect(htmlAfter).toContain("Pending Author");
// Restore "none" moderation
await adminFetch(ctx, "/_emdash/api/schema/collections/posts", {
method: "PUT",
body: JSON.stringify({ commentsModeration: "none" }),
});
});
// -----------------------------------------------------------------------
// No submission: comment count, admin inbox
// -----------------------------------------------------------------------
it("updates the comment count heading as comments are added", async () => {
const html = await fetchHtml(ctx, "/posts/second-post");
expect(html).toMatch(COMMENT_COUNT_RE);
});
it("lists comments in the admin inbox", async () => {
// Default inbox lists all statuses; filter to approved to find our comments
const res = await adminFetch(ctx, "/_emdash/api/admin/comments?status=approved");
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { id: string; status: string }[] } };
expect(Array.isArray(json.data.items)).toBe(true);
expect(json.data.items.length).toBeGreaterThan(0);
});
it("filters admin inbox by status", async () => {
const res = await adminFetch(ctx, "/_emdash/api/admin/comments?status=approved");
expect(res.ok).toBe(true);
const json = (await res.json()) as { data: { items: { status: string }[] } };
for (const item of json.data.items) {
expect(item.status).toBe("approved");
}
});
// -----------------------------------------------------------------------
// No submission: edge cases (GET-only or expected failures)
// -----------------------------------------------------------------------
it("returns 404 for comments on nonexistent collection", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/comments/nonexistent/some-id`);
expect(res.status).toBe(404);
});
it("returns 404 for comments on nonexistent content", async () => {
const res = await submitComment(ctx, "posts", "nonexistent-id", {
authorName: "Test",
authorEmail: "test@example.com",
body: "Should fail",
});
// 404 (content not found) or 429 (rate limited) are both acceptable
expect([404, 429]).toContain(res.status);
});
it("returns 400 for reply to nonexistent parent", async () => {
const postId = ctx.contentIds["posts"]![0]!;
const res = await submitComment(ctx, "posts", postId, {
authorName: "Test",
authorEmail: "test@example.com",
body: "Orphan reply",
parentId: "nonexistent-parent-id",
});
// 400 (parent not found) or 429 (rate limited) are both acceptable
expect([400, 429]).toContain(res.status);
});
});

View File

@@ -0,0 +1,190 @@
/**
* Integration tests for plugin field widgets.
*
* Tests the full pipeline:
* - Manifest includes widget property on fields
* - Manifest includes plugin fieldWidgets declarations
* - Content CRUD works with widget-annotated fields
* - Widget data roundtrips correctly through the API
*
* The integration fixture is configured with the color plugin and a
* "theme_color" field with widget "color:picker" on the posts collection.
*/
import { afterAll, beforeAll, describe, expect, it } from "vitest";
import type { TestServerContext } from "../server.js";
import { assertNodeVersion, createTestServer } from "../server.js";
const PORT = 4397;
const TIMEOUT = 90_000;
describe("Field Widgets Integration", () => {
let ctx: TestServerContext;
beforeAll(async () => {
assertNodeVersion();
ctx = await createTestServer({ port: PORT });
}, TIMEOUT);
afterAll(async () => {
await ctx?.cleanup();
});
describe("manifest", () => {
it("includes widget property on the theme_color field", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
expect(res.ok).toBe(true);
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const collections = manifest.collections as Record<string, Record<string, unknown>>;
expect(collections.posts).toBeTruthy();
const fields = collections.posts.fields as Record<string, { kind: string; widget?: string }>;
expect(fields.theme_color).toBeTruthy();
expect(fields.theme_color.kind).toBe("string");
expect(fields.theme_color.widget).toBe("color:picker");
});
it("does not include widget on fields without one", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const collections = manifest.collections as Record<string, Record<string, unknown>>;
const fields = collections.posts.fields as Record<string, { kind: string; widget?: string }>;
expect(fields.title).toBeTruthy();
expect(fields.title.widget).toBeUndefined();
});
it("includes color plugin with fieldWidgets in plugin manifest", async () => {
const res = await fetch(`${ctx.baseUrl}/_emdash/api/manifest`, {
headers: {
Cookie: ctx.sessionCookie,
"X-EmDash-Request": "1",
},
});
const body = (await res.json()) as { data: Record<string, unknown> };
const manifest = body.data;
const plugins = manifest.plugins as Record<string, Record<string, unknown>>;
expect(plugins.color).toBeTruthy();
expect(plugins.color.enabled).toBe(true);
const fieldWidgets = plugins.color.fieldWidgets as Array<{
name: string;
label: string;
fieldTypes: string[];
}>;
expect(fieldWidgets).toBeTruthy();
expect(fieldWidgets.length).toBe(1);
expect(fieldWidgets[0]!.name).toBe("picker");
expect(fieldWidgets[0]!.label).toBe("Color Picker");
expect(fieldWidgets[0]!.fieldTypes).toEqual(["string"]);
});
});
describe("content CRUD with widget fields", () => {
it("creates content with a color widget field value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Colorful Post",
theme_color: "#ff6600",
},
slug: "colorful-post",
});
expect(item.id).toBeDefined();
expect(item.slug).toBe("colorful-post");
});
it("reads back the color value correctly", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Read Color Test",
theme_color: "#00ff88",
},
slug: "read-color-test",
});
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("Read Color Test");
expect(fetched.data.theme_color).toBe("#00ff88");
});
it("updates the color value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Update Color Test",
theme_color: "#111111",
},
slug: "update-color-test",
});
const fetched = await ctx.client.get("posts", item.id);
const updated = await ctx.client.update("posts", item.id, {
data: { theme_color: "#222222" },
_rev: fetched._rev,
});
expect(updated.data.theme_color).toBe("#222222");
});
it("allows null/empty color value", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "No Color Post",
},
slug: "no-color-post",
});
const fetched = await ctx.client.get("posts", item.id);
// Color field is optional, so it should be null/undefined
expect(fetched.data.theme_color == null || fetched.data.theme_color === "").toBe(true);
});
it("stores color value alongside other content fields", async () => {
const item = await ctx.client.create("posts", {
data: {
title: "Full Post",
excerpt: "A post with color",
theme_color: "#abcdef",
},
slug: "full-post-with-color",
});
const fetched = await ctx.client.get("posts", item.id);
expect(fetched.data.title).toBe("Full Post");
expect(fetched.data.excerpt).toBe("A post with color");
expect(fetched.data.theme_color).toBe("#abcdef");
});
});
describe("content list with widget fields", () => {
it("includes widget field values in list results", async () => {
await ctx.client.create("posts", {
data: {
title: "Listed Color Post",
theme_color: "#ff0000",
},
slug: "listed-color-post",
});
const list = await ctx.client.list("posts");
const post = list.items.find(
(p: { data: Record<string, unknown> }) => p.data.title === "Listed Color Post",
);
expect(post).toBeTruthy();
expect((post as { data: Record<string, unknown> }).data.theme_color).toBe("#ff0000");
});
});
});

View File

@@ -0,0 +1,518 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { defaultCommentModerate } from "../../../src/comments/moderator.js";
import {
createComment,
moderateComment,
type CommentHookRunner,
} from "../../../src/comments/service.js";
import type { Database } from "../../../src/database/types.js";
import { definePlugin } from "../../../src/plugins/define-plugin.js";
import { createHookPipeline, resolveExclusiveHooks } from "../../../src/plugins/hooks.js";
import type {
CollectionCommentSettings,
CommentBeforeCreateEvent,
CommentModerateEvent,
ModerationDecision,
PluginContext,
} from "../../../src/plugins/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function defaultSettings(
overrides: Partial<CollectionCommentSettings> = {},
): CollectionCommentSettings {
return {
commentsEnabled: true,
commentsModeration: "first_time",
commentsClosedAfterDays: 90,
commentsAutoApproveUsers: true,
...overrides,
};
}
const defaultInput = {
collection: "post",
contentId: "content-1",
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
};
// ---------------------------------------------------------------------------
// Group 1: Service with mocked CommentHookRunner
// ---------------------------------------------------------------------------
describe("Comment Service with CommentHookRunner", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
function makeHookRunner(overrides: Partial<CommentHookRunner> = {}): CommentHookRunner {
return {
runBeforeCreate: vi.fn(async (event: CommentBeforeCreateEvent) => event),
runModerate: vi.fn(async () => ({
status: "approved" as const,
reason: "Test",
})),
fireAfterCreate: vi.fn(),
fireAfterModerate: vi.fn(),
...overrides,
};
}
it("creates comment with status from runModerate", async () => {
const hooks = makeHookRunner({
runModerate: vi.fn(async () => ({ status: "pending" as const, reason: "Held" })),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.status).toBe("pending");
expect(result!.decision.status).toBe("pending");
});
it("transforms comment data via beforeCreate", async () => {
const hooks = makeHookRunner({
runBeforeCreate: vi.fn(async (event: CommentBeforeCreateEvent) => ({
...event,
comment: { ...event.comment, body: "Modified body" },
})),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.body).toBe("Modified body");
});
it("returns null when beforeCreate returns false (rejected)", async () => {
const hooks = makeHookRunner({
runBeforeCreate: vi.fn(async () => false as const),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).toBeNull();
});
it("saves as spam when runModerate returns spam", async () => {
const hooks = makeHookRunner({
runModerate: vi.fn(async () => ({ status: "spam" as const, reason: "Spam detected" })),
});
const result = await createComment(db, defaultInput, defaultSettings(), hooks);
expect(result).not.toBeNull();
expect(result!.comment.status).toBe("spam");
});
it("fires fireAfterCreate with correct shape", async () => {
const hooks = makeHookRunner();
await createComment(db, defaultInput, defaultSettings(), hooks, {
id: "content-1",
collection: "post",
slug: "my-post",
title: "My Post",
});
expect(hooks.fireAfterCreate).toHaveBeenCalledOnce();
const event = (hooks.fireAfterCreate as ReturnType<typeof vi.fn>).mock.calls[0]![0];
expect(event.comment.collection).toBe("post");
expect(event.comment.contentId).toBe("content-1");
expect(event.content.slug).toBe("my-post");
});
it("moderateComment updates status and fires fireAfterModerate", async () => {
const hooks = makeHookRunner();
const created = await createComment(db, defaultInput, defaultSettings(), hooks);
const updated = await moderateComment(
db,
created!.comment.id,
"spam",
{ id: "admin-1", name: "Admin" },
hooks,
);
expect(updated).not.toBeNull();
expect(updated!.status).toBe("spam");
expect(hooks.fireAfterModerate).toHaveBeenCalledOnce();
const event = (hooks.fireAfterModerate as ReturnType<typeof vi.fn>).mock.calls[0]![0];
expect(event.previousStatus).toBe("approved");
expect(event.newStatus).toBe("spam");
expect(event.moderator.id).toBe("admin-1");
});
it("moderateComment returns null for non-existent id", async () => {
const hooks = makeHookRunner();
const result = await moderateComment(
db,
"nonexistent",
"approved",
{ id: "admin-1", name: "Admin" },
hooks,
);
expect(result).toBeNull();
expect(hooks.fireAfterModerate).not.toHaveBeenCalled();
});
});
// ---------------------------------------------------------------------------
// Group 2: Built-in moderator unit tests
// ---------------------------------------------------------------------------
describe("Built-in Default Comment Moderator", () => {
const ctx = {} as PluginContext;
function makeModerateEvent(overrides: Partial<CommentModerateEvent> = {}): CommentModerateEvent {
return {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings(),
priorApprovedCount: 0,
...overrides,
};
}
it("auto-approves authenticated CMS users when configured", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
comment: {
...makeModerateEvent().comment,
authorUserId: "user-1",
},
collectionSettings: defaultSettings({ commentsAutoApproveUsers: true }),
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("Authenticated");
});
it("does not auto-approve when commentsAutoApproveUsers is false", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
comment: {
...makeModerateEvent().comment,
authorUserId: "user-1",
},
collectionSettings: defaultSettings({
commentsAutoApproveUsers: false,
commentsModeration: "all",
}),
}),
ctx,
);
expect(decision.status).toBe("pending");
});
it("approves when moderation is 'none'", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "none" }),
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("disabled");
});
it("approves returning commenter with first_time moderation", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "first_time" }),
priorApprovedCount: 3,
}),
ctx,
);
expect(decision.status).toBe("approved");
expect(decision.reason).toContain("Returning");
});
it("holds new commenter with first_time moderation", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "first_time" }),
priorApprovedCount: 0,
}),
ctx,
);
expect(decision.status).toBe("pending");
});
it("holds all comments when moderation is 'all'", async () => {
const decision = await defaultCommentModerate(
makeModerateEvent({
collectionSettings: defaultSettings({ commentsModeration: "all" }),
priorApprovedCount: 10,
}),
ctx,
);
expect(decision.status).toBe("pending");
});
});
// ---------------------------------------------------------------------------
// Group 3: Real HookPipeline integration
// ---------------------------------------------------------------------------
describe("Comment Hooks with HookPipeline", () => {
let pipelineDb: Kysely<Database>;
beforeEach(async () => {
pipelineDb = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(pipelineDb);
});
it("invokes comment:beforeCreate handler registered via definePlugin", async () => {
const spy = vi.fn(async (event: CommentBeforeCreateEvent) => ({
...event,
metadata: { ...event.metadata, enriched: true },
}));
const plugin = definePlugin({
id: "test-enricher",
version: "1.0.0",
capabilities: ["read:users"],
hooks: {
"comment:beforeCreate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
const event: CommentBeforeCreateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
};
const result = await pipeline.runCommentBeforeCreate(event);
expect(spy).toHaveBeenCalledOnce();
expect(result).not.toBe(false);
expect((result as CommentBeforeCreateEvent).metadata.enriched).toBe(true);
});
it("invokes exclusive comment:moderate plugin and returns decision", async () => {
const moderateHandler = vi.fn(async () => ({
status: "spam" as const,
reason: "Custom moderator",
}));
const plugin = definePlugin({
id: "test-moderator",
version: "1.0.0",
capabilities: ["read:users"],
hooks: {
"comment:moderate": {
exclusive: true,
handler: moderateHandler,
},
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
// Auto-select the sole provider
await resolveExclusiveHooks({
pipeline,
isActive: () => true,
getOption: async () => null,
setOption: async () => {},
deleteOption: async () => {},
});
const moderateEvent: CommentModerateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Buy cheap pills",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings(),
priorApprovedCount: 0,
};
const result = await pipeline.invokeExclusiveHook("comment:moderate", moderateEvent);
expect(result).not.toBeNull();
expect((result!.result as ModerationDecision).status).toBe("spam");
expect(moderateHandler).toHaveBeenCalledOnce();
});
it("built-in moderator is auto-selected when sole provider", async () => {
const { DEFAULT_COMMENT_MODERATOR_PLUGIN_ID } =
await import("../../../src/comments/moderator.js");
const plugin = definePlugin({
id: DEFAULT_COMMENT_MODERATOR_PLUGIN_ID,
version: "0.0.0",
capabilities: ["read:users"],
hooks: {
"comment:moderate": {
exclusive: true,
handler: defaultCommentModerate,
},
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await resolveExclusiveHooks({
pipeline,
isActive: () => true,
getOption: async () => null,
setOption: async () => {},
deleteOption: async () => {},
});
const selection = pipeline.getExclusiveSelection("comment:moderate");
expect(selection).toBe(DEFAULT_COMMENT_MODERATOR_PLUGIN_ID);
// Verify it actually works
const moderateEvent: CommentModerateEvent = {
comment: {
collection: "post",
contentId: "c1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
ipHash: null,
userAgent: null,
},
metadata: {},
collectionSettings: defaultSettings({ commentsModeration: "none" }),
priorApprovedCount: 0,
};
const result = await pipeline.invokeExclusiveHook("comment:moderate", moderateEvent);
expect(result).not.toBeNull();
expect((result!.result as ModerationDecision).status).toBe("approved");
});
it("fires comment:afterCreate handlers", async () => {
const spy = vi.fn(async () => {});
const plugin = definePlugin({
id: "test-after-create",
version: "1.0.0",
capabilities: ["read:users"],
hooks: {
"comment:afterCreate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await pipeline.runCommentAfterCreate({
comment: {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
status: "approved",
moderationMetadata: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
},
metadata: {},
content: { id: "content-1", collection: "post", slug: "my-post" },
});
expect(spy).toHaveBeenCalledOnce();
});
it("fires comment:afterModerate handlers", async () => {
const spy = vi.fn(async () => {});
const plugin = definePlugin({
id: "test-after-moderate",
version: "1.0.0",
capabilities: ["read:users"],
hooks: {
"comment:afterModerate": spy,
},
});
const pipeline = createHookPipeline([plugin], { db: pipelineDb });
await pipeline.runCommentAfterModerate({
comment: {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: null,
body: "Hello",
status: "approved",
moderationMetadata: null,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
},
previousStatus: "pending",
newStatus: "approved",
moderator: { id: "admin-1", name: "Admin" },
});
expect(spy).toHaveBeenCalledOnce();
});
});

View File

@@ -0,0 +1,318 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import {
buildCommentNotificationEmail,
lookupContentAuthor,
sendCommentNotification,
} from "../../../src/comments/notifications.js";
import type { Database } from "../../../src/database/types.js";
import type { EmailPipeline } from "../../../src/plugins/email.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("Comment Notifications", () => {
describe("buildCommentNotificationEmail", () => {
it("builds email with content title", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Great post!",
contentTitle: "My Blog Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.to).toBe("author@example.com");
expect(email.subject).toBe('New comment on "My Blog Post"');
expect(email.text).toContain("Jane");
expect(email.text).toContain("Great post!");
expect(email.text).toContain("/_emdash/admin/comments");
expect(email.html).toContain("Jane");
expect(email.html).toContain("Great post!");
});
it("falls back to collection name when no title", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Nice!",
contentTitle: "",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.subject).toBe('New comment on "post item"');
});
it("truncates long comment bodies", () => {
const longBody = "x".repeat(600);
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: longBody,
contentTitle: "Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.text).toContain("...");
expect(email.text).not.toContain("x".repeat(600));
});
it("escapes HTML in author name and body", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: '<script>alert("xss")</script>',
commentBody: "<img src=x onerror=alert(1)>",
contentTitle: "Post",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.html).not.toContain("<script>");
expect(email.html).not.toContain("<img src=x");
expect(email.html).toContain("&lt;script&gt;");
});
it("strips CRLF from subject to prevent header injection", () => {
const email = buildCommentNotificationEmail("author@example.com", {
commentAuthorName: "Jane",
commentBody: "Nice!",
contentTitle: "Post\r\nBcc: attacker@evil.com",
collection: "post",
adminBaseUrl: "https://example.com/_emdash",
});
expect(email.subject).not.toContain("\r");
expect(email.subject).not.toContain("\n");
expect(email.subject).toContain("Post");
});
});
describe("sendCommentNotification", () => {
let mockEmail: EmailPipeline;
let sendSpy: ReturnType<typeof vi.fn>;
beforeEach(() => {
sendSpy = vi.fn().mockResolvedValue(undefined);
mockEmail = {
send: sendSpy,
isAvailable: () => true,
} as unknown as EmailPipeline;
});
it("sends notification for approved comments", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentTitle: "My Post",
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(true);
expect(sendSpy).toHaveBeenCalledOnce();
const [message, source] = sendSpy.mock.calls[0]!;
expect(message.to).toBe("author@example.com");
expect(message.subject).toContain("My Post");
expect(source).toBe("emdash-comments");
});
it("skips pending comments", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "pending",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when no content author", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentAuthor: undefined,
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when email provider not available", async () => {
mockEmail = {
send: sendSpy,
isAvailable: () => false,
} as unknown as EmailPipeline;
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("skips when commenter is the content author", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Author",
authorEmail: "author@example.com",
body: "My own comment",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
it("compares emails case-insensitively for self-comment check", async () => {
const sent = await sendCommentNotification({
email: mockEmail,
comment: {
authorName: "Author",
authorEmail: "Author@Example.COM",
body: "My own comment",
status: "approved",
collection: "post",
},
contentAuthor: { email: "author@example.com", name: "Author" },
adminBaseUrl: "https://example.com/_emdash",
});
expect(sent).toBe(false);
expect(sendSpy).not.toHaveBeenCalled();
});
});
describe("lookupContentAuthor", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
it("returns null for non-existent content", async () => {
const result = await lookupContentAuthor(db, "post", "nonexistent");
expect(result).toBeNull();
});
it("returns slug and author for content with author", async () => {
await db
.insertInto("users")
.values({
id: "user1",
email: "author@example.com",
name: "Author Name",
role: 50,
email_verified: 1,
})
.execute();
await db
.insertInto("ec_post" as never)
.values({
id: "post1",
slug: "my-post",
status: "published",
author_id: "user1",
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post1");
expect(result).not.toBeNull();
expect(result!.slug).toBe("my-post");
expect(result!.author).toEqual({
id: "user1",
email: "author@example.com",
name: "Author Name",
});
});
it("excludes author with unverified email", async () => {
await db
.insertInto("users")
.values({
id: "unverified1",
email: "unverified@example.com",
name: "Unverified",
role: 50,
email_verified: 0,
})
.execute();
await db
.insertInto("ec_post" as never)
.values({
id: "post3",
slug: "unverified-post",
status: "published",
author_id: "unverified1",
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post3");
expect(result).not.toBeNull();
expect(result!.slug).toBe("unverified-post");
expect(result!.author).toBeUndefined();
});
it("rejects invalid collection names", async () => {
await expect(lookupContentAuthor(db, "'; DROP TABLE users; --", "post1")).rejects.toThrow(
"collection",
);
});
it("returns slug without author for content without author_id", async () => {
await db
.insertInto("ec_post" as never)
.values({
id: "post2",
slug: "orphan-post",
status: "published",
author_id: null,
} as never)
.execute();
const result = await lookupContentAuthor(db, "post", "post2");
expect(result).not.toBeNull();
expect(result!.slug).toBe("orphan-post");
expect(result!.author).toBeUndefined();
});
});
});

View File

@@ -0,0 +1,412 @@
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { CommentRepository, type Comment } from "../../../src/database/repositories/comment.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("CommentRepository", () => {
let db: Kysely<Database>;
let repo: CommentRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new CommentRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// -------------------------------------------------------------------------
// Helpers
// -------------------------------------------------------------------------
function makeInput(overrides: Partial<Parameters<CommentRepository["create"]>[0]> = {}) {
return {
collection: "post",
contentId: "content-1",
authorName: "Jane",
authorEmail: "jane@example.com",
body: "Great post!",
...overrides,
};
}
// -------------------------------------------------------------------------
// CRUD
// -------------------------------------------------------------------------
describe("CRUD", () => {
it("creates a comment and returns it with id and timestamps", async () => {
const comment = await repo.create(makeInput());
expect(comment.id).toBeTruthy();
expect(comment.collection).toBe("post");
expect(comment.contentId).toBe("content-1");
expect(comment.authorName).toBe("Jane");
expect(comment.authorEmail).toBe("jane@example.com");
expect(comment.body).toBe("Great post!");
expect(comment.status).toBe("pending");
expect(comment.createdAt).toBeTruthy();
expect(comment.updatedAt).toBeTruthy();
expect(comment.parentId).toBeNull();
});
it("findById returns the comment", async () => {
const created = await repo.create(makeInput());
const found = await repo.findById(created.id);
expect(found).not.toBeNull();
expect(found!.id).toBe(created.id);
expect(found!.authorName).toBe("Jane");
});
it("findById returns null for non-existent id", async () => {
const found = await repo.findById("nonexistent");
expect(found).toBeNull();
});
it("findByContent returns matching comments", async () => {
await repo.create(makeInput());
await repo.create(makeInput({ body: "Second comment" }));
await repo.create(makeInput({ contentId: "other-content" }));
const result = await repo.findByContent("post", "content-1");
expect(result.items).toHaveLength(2);
expect(result.items.every((c) => c.contentId === "content-1")).toBe(true);
});
it("findByStatus filters by status", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "spam" }));
const result = await repo.findByStatus("approved");
expect(result.items).toHaveLength(1);
expect(result.items[0]!.status).toBe("approved");
});
});
// -------------------------------------------------------------------------
// Status transitions
// -------------------------------------------------------------------------
describe("Status transitions", () => {
it("updateStatus changes status", async () => {
const created = await repo.create(makeInput());
const updated = await repo.updateStatus(created.id, "approved");
expect(updated).not.toBeNull();
expect(updated!.status).toBe("approved");
expect(updated!.id).toBe(created.id);
});
it("bulkUpdateStatus returns count of updated rows", async () => {
const c1 = await repo.create(makeInput());
const c2 = await repo.create(makeInput({ body: "Second" }));
const count = await repo.bulkUpdateStatus([c1.id, c2.id], "approved");
expect(count).toBe(2);
const found1 = await repo.findById(c1.id);
const found2 = await repo.findById(c2.id);
expect(found1!.status).toBe("approved");
expect(found2!.status).toBe("approved");
});
it("bulkUpdateStatus returns 0 for empty array", async () => {
const count = await repo.bulkUpdateStatus([], "approved");
expect(count).toBe(0);
});
});
// -------------------------------------------------------------------------
// Deletion
// -------------------------------------------------------------------------
describe("Deletion", () => {
it("delete hard-deletes and returns true", async () => {
const created = await repo.create(makeInput());
const deleted = await repo.delete(created.id);
expect(deleted).toBe(true);
expect(await repo.findById(created.id)).toBeNull();
});
it("delete returns false for non-existent id", async () => {
const deleted = await repo.delete("nonexistent");
expect(deleted).toBe(false);
});
it("bulkDelete returns count", async () => {
const c1 = await repo.create(makeInput());
const c2 = await repo.create(makeInput({ body: "Second" }));
const count = await repo.bulkDelete([c1.id, c2.id]);
expect(count).toBe(2);
});
it("bulkDelete returns 0 for empty array", async () => {
const count = await repo.bulkDelete([]);
expect(count).toBe(0);
});
it("deleteByContent removes all comments for content", async () => {
await repo.create(makeInput());
await repo.create(makeInput({ body: "Second" }));
await repo.create(makeInput({ contentId: "other-content" }));
const count = await repo.deleteByContent("post", "content-1");
expect(count).toBe(2);
const remaining = await repo.findByContent("post", "content-1");
expect(remaining.items).toHaveLength(0);
const other = await repo.findByContent("post", "other-content");
expect(other.items).toHaveLength(1);
});
it("parent FK cascade deletes replies", async () => {
const parent = await repo.create(makeInput());
const reply = await repo.create(makeInput({ parentId: parent.id, body: "Reply" }));
await repo.delete(parent.id);
expect(await repo.findById(parent.id)).toBeNull();
expect(await repo.findById(reply.id)).toBeNull();
});
});
// -------------------------------------------------------------------------
// Counting
// -------------------------------------------------------------------------
describe("Counting", () => {
it("countByContent with and without status filter", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "approved" }));
const total = await repo.countByContent("post", "content-1");
expect(total).toBe(3);
const approved = await repo.countByContent("post", "content-1", "approved");
expect(approved).toBe(2);
const pending = await repo.countByContent("post", "content-1", "pending");
expect(pending).toBe(1);
});
it("countByStatus returns grouped counts", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
await repo.create(makeInput({ status: "spam" }));
const counts = await repo.countByStatus();
expect(counts.approved).toBe(2);
expect(counts.pending).toBe(1);
expect(counts.spam).toBe(1);
expect(counts.trash).toBe(0);
});
it("countApprovedByEmail counts only approved comments", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
const count = await repo.countApprovedByEmail("jane@example.com");
expect(count).toBe(2);
});
});
// -------------------------------------------------------------------------
// Cursor pagination
// -------------------------------------------------------------------------
describe("Cursor pagination", () => {
it("findByContent paginates with cursor", async () => {
// Create 5 comments
for (let i = 0; i < 5; i++) {
await repo.create(makeInput({ body: `Comment ${i}` }));
}
const page1 = await repo.findByContent("post", "content-1", { limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findByContent("post", "content-1", {
limit: 2,
cursor: page1.nextCursor,
});
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeTruthy();
const page3 = await repo.findByContent("post", "content-1", {
limit: 2,
cursor: page2.nextCursor,
});
expect(page3.items).toHaveLength(1);
expect(page3.nextCursor).toBeUndefined();
// Ensure no duplicates across pages
const allIds = [...page1.items, ...page2.items, ...page3.items].map((c) => c.id);
expect(new Set(allIds).size).toBe(5);
});
it("findByStatus paginates with cursor", async () => {
for (let i = 0; i < 4; i++) {
await repo.create(makeInput({ status: "approved", body: `Comment ${i}` }));
}
const page1 = await repo.findByStatus("approved", { limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findByStatus("approved", {
limit: 2,
cursor: page1.nextCursor,
});
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeUndefined();
});
});
// -------------------------------------------------------------------------
// Threading
// -------------------------------------------------------------------------
describe("Threading", () => {
it("assembleThreads produces 1-level nesting", () => {
const root: Comment = {
id: "root",
collection: "post",
contentId: "c1",
parentId: null,
authorName: "A",
authorEmail: "a@test.com",
authorUserId: null,
body: "Root",
status: "approved",
ipHash: null,
userAgent: null,
moderationMetadata: null,
createdAt: "2026-01-01T00:00:00.000Z",
updatedAt: "2026-01-01T00:00:00.000Z",
};
const reply: Comment = {
...root,
id: "reply1",
parentId: "root",
body: "Reply",
};
const threads = CommentRepository.assembleThreads([root, reply]);
expect(threads).toHaveLength(1);
expect((threads[0] as Comment & { _replies?: Comment[] })._replies).toHaveLength(1);
});
it("toPublicComment strips private fields", () => {
const comment: Comment & { _replies?: Comment[] } = {
id: "c1",
collection: "post",
contentId: "content-1",
parentId: null,
authorName: "Jane",
authorEmail: "jane@example.com",
authorUserId: "user-1",
body: "Great!",
status: "approved",
ipHash: "abc123",
userAgent: "Mozilla/5.0",
moderationMetadata: { score: 0.9 },
createdAt: "2026-01-01T00:00:00.000Z",
updatedAt: "2026-01-01T00:00:00.000Z",
};
const pub = CommentRepository.toPublicComment(comment);
expect(pub.id).toBe("c1");
expect(pub.authorName).toBe("Jane");
expect(pub.isRegisteredUser).toBe(true);
expect(pub.body).toBe("Great!");
expect(pub.createdAt).toBe("2026-01-01T00:00:00.000Z");
// Private fields should not be present
expect("authorEmail" in pub).toBe(false);
expect("ipHash" in pub).toBe(false);
expect("userAgent" in pub).toBe(false);
expect("moderationMetadata" in pub).toBe(false);
expect("status" in pub).toBe(false);
});
});
// -------------------------------------------------------------------------
// Edge cases
// -------------------------------------------------------------------------
describe("Edge cases", () => {
it("returns empty results for non-existent content", async () => {
const result = await repo.findByContent("post", "nonexistent");
expect(result.items).toHaveLength(0);
expect(result.nextCursor).toBeUndefined();
});
it("moderationMetadata JSON round-trips correctly", async () => {
const metadata = {
aiScore: 0.95,
categories: ["safe"],
nested: { key: "value" },
};
const created = await repo.create(makeInput({ moderationMetadata: metadata }));
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toEqual(metadata);
});
it("moderationMetadata null round-trips", async () => {
const created = await repo.create(makeInput());
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toBeNull();
});
it("findByStatus with search filters by body", async () => {
await repo.create(makeInput({ status: "approved", body: "Hello world" }));
await repo.create(makeInput({ status: "approved", body: "Goodbye world" }));
const result = await repo.findByStatus("approved", { search: "Hello" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.body).toBe("Hello world");
});
it("findByStatus with search filters by author name", async () => {
await repo.create(makeInput({ status: "approved", authorName: "Alice" }));
await repo.create(makeInput({ status: "approved", authorName: "Bob" }));
const result = await repo.findByStatus("approved", { search: "Alice" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.authorName).toBe("Alice");
});
it("findByContent with status filter", async () => {
await repo.create(makeInput({ status: "approved" }));
await repo.create(makeInput({ status: "pending" }));
const result = await repo.findByContent("post", "content-1", { status: "approved" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.status).toBe("approved");
});
it("updateModerationMetadata updates the JSON field", async () => {
const created = await repo.create(makeInput());
await repo.updateModerationMetadata(created.id, { score: 0.5 });
const found = await repo.findById(created.id);
expect(found!.moderationMetadata).toEqual({ score: 0.5 });
});
});
});

View File

@@ -0,0 +1,345 @@
/**
* Dialect compatibility tests
*
* Runs core database operations against every available dialect.
* SQLite always runs (in-memory). Postgres runs when EMDASH_TEST_PG is set.
*
* These tests verify that migrations, schema registry, and content CRUD
* work identically across dialects.
*/
import { it, expect, beforeEach, afterEach } from "vitest";
import { runMigrations, getMigrationStatus } from "../../../src/database/migrations/runner.js";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import {
createForDialect,
describeEachDialect,
setupForDialect,
setupForDialectWithCollections,
teardownForDialect,
type DialectTestContext,
} from "../../utils/test-db.js";
// ---------------------------------------------------------------------------
// Migrations
// ---------------------------------------------------------------------------
describeEachDialect("Migrations", (dialect) => {
let ctx: DialectTestContext;
beforeEach(async () => {
// Bare database — no migrations yet. Tests run them explicitly.
ctx = await createForDialect(dialect);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("runs all migrations and creates system tables", async () => {
await runMigrations(ctx.db);
const tables = [
"revisions",
"taxonomies",
"content_taxonomies",
"media",
"users",
"options",
"audit_logs",
"_emdash_migrations",
"_emdash_collections",
"_emdash_fields",
"_plugin_storage",
"_plugin_state",
"_plugin_indexes",
"_emdash_sections",
"_emdash_bylines",
"_emdash_content_bylines",
];
for (const table of tables) {
const result = await ctx.db
.selectFrom(table as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(result), `table ${table} should exist`).toBe(true);
}
});
it("tracks migrations in _emdash_migrations", async () => {
await runMigrations(ctx.db);
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(31);
expect(migrations[0]?.name).toBe("001_initial");
});
it("is idempotent", async () => {
await runMigrations(ctx.db);
await runMigrations(ctx.db);
const migrations = await ctx.db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(31);
});
it("reports correct migration status", async () => {
const before = await getMigrationStatus(ctx.db);
expect(before.pending).toContain("001_initial");
expect(before.applied).toHaveLength(0);
await runMigrations(ctx.db);
const after = await getMigrationStatus(ctx.db);
expect(after.applied).toContain("001_initial");
expect(after.pending).toHaveLength(0);
});
});
// ---------------------------------------------------------------------------
// Schema registry
// ---------------------------------------------------------------------------
describeEachDialect("Schema registry", (dialect) => {
let ctx: DialectTestContext;
let registry: SchemaRegistry;
beforeEach(async () => {
ctx = await setupForDialect(dialect);
await runMigrations(ctx.db);
registry = new SchemaRegistry(ctx.db);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("creates a collection and its dynamic table", async () => {
await registry.createCollection({
slug: "article",
label: "Articles",
labelSingular: "Article",
});
// Dynamic table should exist
const rows = await ctx.db
.selectFrom("ec_article" as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(rows)).toBe(true);
// Registry should have the collection
const collections = await registry.listCollections();
expect(collections.map((c) => c.slug)).toContain("article");
});
it("adds fields to a collection", async () => {
await registry.createCollection({
slug: "post",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("post", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("post", {
slug: "body",
label: "Body",
type: "portableText",
});
await registry.createField("post", {
slug: "views",
label: "Views",
type: "integer",
});
const coll = await registry.getCollectionWithFields("post");
expect(coll).not.toBeNull();
const slugs = coll!.fields.map((f) => f.slug);
expect(slugs).toContain("title");
expect(slugs).toContain("body");
expect(slugs).toContain("views");
});
it("deletes a collection and drops its table", async () => {
await registry.createCollection({
slug: "temp",
label: "Temp",
labelSingular: "Temp",
});
// Verify it exists
const before = await registry.listCollections();
expect(before.map((c) => c.slug)).toContain("temp");
await registry.deleteCollection("temp");
const after = await registry.listCollections();
expect(after.map((c) => c.slug)).not.toContain("temp");
});
});
// ---------------------------------------------------------------------------
// Content CRUD
// ---------------------------------------------------------------------------
describeEachDialect("Content CRUD", (dialect) => {
let ctx: DialectTestContext;
let repo: ContentRepository;
beforeEach(async () => {
ctx = await setupForDialectWithCollections(dialect);
repo = new ContentRepository(ctx.db);
});
afterEach(async () => {
await teardownForDialect(ctx);
});
it("creates and retrieves content", async () => {
const created = await repo.create({
type: "post",
slug: "hello-world",
data: {
title: "Hello World",
content: [{ _type: "block", children: [{ _type: "span", text: "Content" }] }],
},
status: "draft",
});
expect(created.id).toBeDefined();
expect(created.slug).toBe("hello-world");
const found = await repo.findById("post", created.id);
expect(found).not.toBeNull();
expect(found!.data.title).toBe("Hello World");
expect(found!.slug).toBe("hello-world");
});
it("updates content", async () => {
const created = await repo.create({
type: "post",
slug: "original",
data: { title: "Original" },
status: "draft",
});
const updated = await repo.update("post", created.id, {
data: { title: "Updated" },
});
expect(updated.data.title).toBe("Updated");
expect(updated.slug).toBe("original");
});
it("lists content with pagination", async () => {
for (let i = 0; i < 5; i++) {
await repo.create({
type: "post",
slug: `post-${i}`,
data: { title: `Post ${i}` },
status: "draft",
});
}
const result = await repo.findMany("post", { limit: 3 });
expect(result.items).toHaveLength(3);
if (result.nextCursor) {
const page2 = await repo.findMany("post", {
limit: 3,
cursor: result.nextCursor,
});
expect(page2.items).toHaveLength(2);
}
});
it("soft-deletes content", async () => {
const created = await repo.create({
type: "post",
slug: "to-delete",
data: { title: "To Delete" },
status: "draft",
});
const deleted = await repo.delete("post", created.id);
expect(deleted).toBe(true);
const found = await repo.findById("post", created.id);
expect(found).toBeNull();
});
it("filters by status", async () => {
await repo.create({
type: "post",
slug: "draft-post",
data: { title: "Draft Post" },
status: "draft",
});
await repo.create({
type: "post",
slug: "published-post",
data: { title: "Published Post" },
status: "published",
});
const drafts = await repo.findMany("post", { where: { status: "draft" } });
expect(drafts.items).toHaveLength(1);
expect(drafts.items[0]?.data.title).toBe("Draft Post");
const published = await repo.findMany("post", { where: { status: "published" } });
expect(published.items).toHaveLength(1);
expect(published.items[0]?.data.title).toBe("Published Post");
});
it("enforces unique slug within a collection", async () => {
await repo.create({
type: "post",
slug: "same-slug",
data: { title: "First" },
status: "draft",
});
await expect(
repo.create({
type: "post",
slug: "same-slug",
data: { title: "Second" },
status: "draft",
}),
).rejects.toThrow();
});
it("isolates collections", async () => {
await repo.create({
type: "post",
slug: "shared-slug",
data: { title: "A Post" },
status: "draft",
});
await repo.create({
type: "page",
slug: "shared-slug",
data: { title: "A Page" },
status: "draft",
});
const posts = await repo.findMany("post");
const pages = await repo.findMany("page");
expect(posts.items).toHaveLength(1);
expect(pages.items).toHaveLength(1);
expect(posts.items[0]?.data.title).toBe("A Post");
expect(pages.items[0]?.data.title).toBe("A Page");
});
});

View File

@@ -0,0 +1,412 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import { runMigrations, getMigrationStatus } from "../../../src/database/migrations/runner.js";
import type { Database } from "../../../src/database/types.js";
describe("Database Migrations (Integration)", () => {
let db: Kysely<Database>;
beforeEach(() => {
// Create fresh in-memory database for each test
db = createDatabase({ url: ":memory:" });
});
afterEach(async () => {
// Close the database connection
await db.destroy();
});
it("should create all tables from migrations", async () => {
await runMigrations(db);
// Verify all tables exist by querying them
// Note: No generic "content" table - collections create ec_* tables dynamically
const tables = [
"revisions",
"taxonomies",
"content_taxonomies",
"media",
"users",
"options",
"audit_logs",
"_emdash_migrations",
"_emdash_collections",
"_emdash_fields",
"_plugin_storage",
"_plugin_state",
"_plugin_indexes",
"_emdash_sections",
"_emdash_bylines",
"_emdash_content_bylines",
];
for (const table of tables) {
// Query table to verify it exists
const result = await db
.selectFrom(table as keyof Database)
.selectAll()
.execute();
expect(Array.isArray(result)).toBe(true);
}
});
it("should track migration in _emdash_migrations table", async () => {
await runMigrations(db);
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
expect(migrations).toHaveLength(31);
expect(migrations[0]?.name).toBe("001_initial");
expect(migrations[0]?.timestamp).toBeDefined();
expect(migrations[1]?.name).toBe("002_media_status");
expect(migrations[1]?.timestamp).toBeDefined();
expect(migrations[2]?.name).toBe("003_schema_registry");
expect(migrations[2]?.timestamp).toBeDefined();
expect(migrations[3]?.name).toBe("004_plugins");
expect(migrations[3]?.timestamp).toBeDefined();
expect(migrations[4]?.name).toBe("005_menus");
expect(migrations[4]?.timestamp).toBeDefined();
expect(migrations[5]?.name).toBe("006_taxonomy_defs");
expect(migrations[5]?.timestamp).toBeDefined();
expect(migrations[6]?.name).toBe("007_widgets");
expect(migrations[6]?.timestamp).toBeDefined();
expect(migrations[7]?.name).toBe("008_auth");
expect(migrations[7]?.timestamp).toBeDefined();
expect(migrations[8]?.name).toBe("009_user_disabled");
expect(migrations[8]?.timestamp).toBeDefined();
expect(migrations[9]?.name).toBe("011_sections");
expect(migrations[9]?.timestamp).toBeDefined();
expect(migrations[10]?.name).toBe("012_search");
expect(migrations[10]?.timestamp).toBeDefined();
expect(migrations[11]?.name).toBe("013_scheduled_publishing");
expect(migrations[11]?.timestamp).toBeDefined();
expect(migrations[12]?.name).toBe("014_draft_revisions");
expect(migrations[12]?.timestamp).toBeDefined();
expect(migrations[13]?.name).toBe("015_indexes");
expect(migrations[13]?.timestamp).toBeDefined();
expect(migrations[14]?.name).toBe("016_api_tokens");
expect(migrations[14]?.timestamp).toBeDefined();
expect(migrations[15]?.name).toBe("017_authorization_codes");
expect(migrations[15]?.timestamp).toBeDefined();
});
it("should be idempotent (running twice is safe)", async () => {
await runMigrations(db);
await runMigrations(db);
const migrations = await db.selectFrom("_emdash_migrations").selectAll().execute();
// Should still only have thirty-one migration records
expect(migrations).toHaveLength(31);
});
it("should report correct migration status", async () => {
const statusBefore = await getMigrationStatus(db);
expect(statusBefore.pending).toContain("001_initial");
expect(statusBefore.pending).toContain("002_media_status");
expect(statusBefore.applied).toHaveLength(0);
await runMigrations(db);
const statusAfter = await getMigrationStatus(db);
expect(statusAfter.applied).toContain("001_initial");
expect(statusAfter.applied).toContain("002_media_status");
expect(statusAfter.pending).toHaveLength(0);
});
it("should create schema registry tables", async () => {
await runMigrations(db);
// Test collections table
const testId = "test-collection";
await db
.insertInto("_emdash_collections")
.values({
id: testId,
slug: "posts",
label: "Posts",
label_singular: "Post",
})
.execute();
const collection = await db
.selectFrom("_emdash_collections")
.selectAll()
.where("id", "=", testId)
.executeTakeFirst();
expect(collection).toBeDefined();
expect(collection?.slug).toBe("posts");
expect(collection?.label).toBe("Posts");
expect(collection?.created_at).toBeDefined();
});
it("should enforce unique constraint on collection slug", async () => {
await runMigrations(db);
await db
.insertInto("_emdash_collections")
.values({
id: "id1",
slug: "posts",
label: "Posts",
})
.execute();
// Attempting to insert duplicate slug should fail
await expect(
db
.insertInto("_emdash_collections")
.values({
id: "id2",
slug: "posts",
label: "Posts Again",
})
.execute(),
).rejects.toThrow();
});
it("should create fields table with foreign key to collections", async () => {
await runMigrations(db);
// Create collection first
const collectionId = "collection-1";
await db
.insertInto("_emdash_collections")
.values({
id: collectionId,
slug: "posts",
label: "Posts",
})
.execute();
// Create field
await db
.insertInto("_emdash_fields")
.values({
id: "field-1",
collection_id: collectionId,
slug: "title",
label: "Title",
type: "string",
column_type: "TEXT",
required: 0,
unique: 0,
sort_order: 0,
})
.execute();
const fields = await db
.selectFrom("_emdash_fields")
.selectAll()
.where("collection_id", "=", collectionId)
.execute();
expect(fields).toHaveLength(1);
expect(fields[0]?.slug).toBe("title");
});
it("should create revisions table with collection+entry_id", async () => {
await runMigrations(db);
// Create revision for a content entry
await db
.insertInto("revisions")
.values({
id: "rev-1",
collection: "posts",
entry_id: "entry-1",
data: JSON.stringify({ title: "Revised" }),
})
.execute();
const revisions = await db
.selectFrom("revisions")
.selectAll()
.where("collection", "=", "posts")
.where("entry_id", "=", "entry-1")
.execute();
expect(revisions).toHaveLength(1);
expect(revisions[0]?.collection).toBe("posts");
});
it("should create users table with unique email constraint", async () => {
await runMigrations(db);
await db
.insertInto("users")
.values({
id: "user-1",
email: "test@example.com",
name: "Test User",
role: 50, // ADMIN
email_verified: 1,
})
.execute();
// Duplicate email should fail
await expect(
db
.insertInto("users")
.values({
id: "user-2",
email: "test@example.com",
role: 10, // SUBSCRIBER
email_verified: 1,
})
.execute(),
).rejects.toThrow();
});
it("should create taxonomies table with hierarchical support", async () => {
await runMigrations(db);
// Create parent category
const parentId = "cat-parent";
await db
.insertInto("taxonomies")
.values({
id: parentId,
name: "category",
slug: "parent",
label: "Parent Category",
})
.execute();
// Create child category
await db
.insertInto("taxonomies")
.values({
id: "cat-child",
name: "category",
slug: "child",
label: "Child Category",
parent_id: parentId,
})
.execute();
const child = await db
.selectFrom("taxonomies")
.selectAll()
.where("id", "=", "cat-child")
.executeTakeFirst();
expect(child?.parent_id).toBe(parentId);
});
it("should create content_taxonomies junction table", async () => {
await runMigrations(db);
const taxonomyId = "tax-1";
// Create taxonomy
await db
.insertInto("taxonomies")
.values({
id: taxonomyId,
name: "category",
slug: "tech",
label: "Technology",
})
.execute();
// Assign taxonomy to content entry (collection + entry_id)
await db
.insertInto("content_taxonomies")
.values({
collection: "posts",
entry_id: "entry-1",
taxonomy_id: taxonomyId,
})
.execute();
const assignments = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.where("entry_id", "=", "entry-1")
.execute();
expect(assignments).toHaveLength(1);
expect(assignments[0]?.taxonomy_id).toBe(taxonomyId);
});
it("should create media table", async () => {
await runMigrations(db);
await db
.insertInto("media")
.values({
id: "media-1",
filename: "photo.jpg",
mime_type: "image/jpeg",
size: 1024000,
width: 1920,
height: 1080,
alt: "Test photo",
storage_key: "uploads/photo.jpg",
status: "ready",
})
.execute();
const media = await db
.selectFrom("media")
.selectAll()
.where("id", "=", "media-1")
.executeTakeFirst();
expect(media).toBeDefined();
expect(media?.width).toBe(1920);
expect(media?.height).toBe(1080);
});
it("should create options table for key-value storage", async () => {
await runMigrations(db);
await db
.insertInto("options")
.values({
name: "site_title",
value: JSON.stringify("My Site"),
})
.execute();
const option = await db
.selectFrom("options")
.selectAll()
.where("name", "=", "site_title")
.executeTakeFirst();
expect(option).toBeDefined();
expect(JSON.parse(option!.value)).toBe("My Site");
});
it("should create audit_logs table with indexes", async () => {
await runMigrations(db);
await db
.insertInto("audit_logs")
.values({
id: "log-1",
actor_id: "user-1",
actor_ip: "192.168.1.1",
action: "content:create",
resource_type: "content",
resource_id: "post-1",
status: "success",
})
.execute();
const logs = await db
.selectFrom("audit_logs")
.selectAll()
.where("actor_id", "=", "user-1")
.execute();
expect(logs).toHaveLength(1);
expect(logs[0]?.action).toBe("content:create");
});
});

View File

@@ -0,0 +1,94 @@
{
"$schema": "https://emdashcms.com/seed.schema.json",
"version": "1",
"meta": {
"name": "E2E Test Fixture",
"description": "Schema for E2E tests"
},
"taxonomies": [
{
"name": "categories",
"label": "Categories",
"labelSingular": "Category",
"hierarchical": true,
"collections": ["posts"],
"terms": [
{ "slug": "news", "label": "News" },
{ "slug": "tutorials", "label": "Tutorials" },
{ "slug": "opinion", "label": "Opinion" }
]
}
],
"sections": [
{
"slug": "hero",
"title": "Hero Section",
"description": "Main hero area",
"content": [
{
"_type": "block",
"_key": "b1",
"style": "normal",
"children": [{ "_type": "span", "_key": "s1", "text": "Welcome to our site" }],
"markDefs": []
}
]
}
],
"collections": [
{
"slug": "posts",
"label": "Posts",
"labelSingular": "Post",
"fields": [
{
"slug": "title",
"label": "Title",
"type": "string",
"required": true
},
{
"slug": "body",
"label": "Body",
"type": "portableText"
},
{
"slug": "excerpt",
"label": "Excerpt",
"type": "text"
},
{
"slug": "theme_color",
"label": "Theme Color",
"type": "string",
"widget": "color:picker"
}
]
},
{
"slug": "pages",
"label": "Pages",
"labelSingular": "Page",
"fields": [
{
"slug": "title",
"label": "Title",
"type": "string",
"required": true
},
{
"slug": "body",
"label": "Body",
"type": "portableText"
}
]
}
],
"bylines": [
{
"id": "fixture-editorial",
"slug": "fixture-editorial",
"displayName": "Fixture Editorial"
}
]
}

View File

@@ -0,0 +1,41 @@
/**
* Minimal Astro config for e2e tests.
*
* Uses EMDASH_TEST_DB env var for the database path so each
* test run gets an isolated database.
*/
import node from "@astrojs/node";
import react from "@astrojs/react";
import { colorPlugin } from "@emdashcms/plugin-color";
import { defineConfig } from "astro/config";
import emdash from "emdash/astro";
import { sqlite } from "emdash/db";
const dbUrl = process.env.EMDASH_TEST_DB || "file:./test.db";
export default defineConfig({
output: "server",
adapter: node({ mode: "standalone" }),
integrations: [
react(),
emdash({
database: sqlite({ url: dbUrl }),
plugins: [colorPlugin()],
}),
],
i18n: {
defaultLocale: "en",
locales: ["en", "fr", "es"],
fallback: { fr: "en", es: "en" },
},
devToolbar: { enabled: false },
vite: {
server: {
fs: {
// When running from a temp dir, node_modules is symlinked back to the
// monorepo. Vite needs permission to serve files from the real paths.
strict: false,
},
},
},
});

View File

@@ -0,0 +1,39 @@
// Generated by EmDash on dev server start
// Do not edit manually
/// <reference types="emdash/locals" />
import type { ContentBylineCredit, PortableTextBlock } from "emdash";
export interface Page {
id: string;
slug: string | null;
status: string;
title: string;
body?: PortableTextBlock[];
createdAt: Date;
updatedAt: Date;
publishedAt: Date | null;
bylines?: ContentBylineCredit[];
}
export interface Post {
id: string;
slug: string | null;
status: string;
title: string;
body?: PortableTextBlock[];
excerpt?: string;
theme_color?: string;
createdAt: Date;
updatedAt: Date;
publishedAt: Date | null;
bylines?: ContentBylineCredit[];
}
declare module "emdash" {
interface EmDashCollections {
pages: Page;
posts: Post;
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "emdash-integration-fixture",
"private": true,
"type": "module",
"dependencies": {
"@astrojs/node": "catalog:",
"@astrojs/react": "catalog:",
"@emdashcms/auth": "workspace:*",
"@emdashcms/plugin-color": "workspace:*",
"astro": "catalog:",
"better-sqlite3": "^11.10.0",
"emdash": "workspace:*",
"react": "^19.1.0",
"react-dom": "^19.1.0"
}
}

View File

@@ -0,0 +1 @@
/// <reference types="astro/client" />

View File

@@ -0,0 +1,6 @@
import { defineLiveCollection } from "astro:content";
import { emdashLoader } from "emdash/runtime";
export const collections = {
_emdash: defineLiveCollection({ loader: emdashLoader() }),
};

View File

@@ -0,0 +1,21 @@
---
import { getEmDashCollection } from "emdash";
const { entries: posts } = await getEmDashCollection("posts");
---
<html>
<body>
<h1>Posts</h1>
<ul id="post-list">
{
posts.map((p) => (
<li>
<a href={`/posts/${p.id}`}>{p.data.title}</a>
{p.data.excerpt && <span class="excerpt">{p.data.excerpt}</span>}
</li>
))
}
</ul>
{posts.length === 0 && <p id="empty">No posts</p>}
</body>
</html>

View File

@@ -0,0 +1,21 @@
---
import { getEmDashEntry } from "emdash";
import { PortableText, Comments, CommentForm } from "emdash/ui";
const { slug } = Astro.params;
if (!slug) return Astro.redirect("/404");
const { entry: post } = await getEmDashEntry("posts", slug);
if (!post) return new Response("Not found", { status: 404 });
---
<html>
<body>
<article>
<h1 id="title">{post.data.title}</h1>
{post.data.excerpt && <p id="excerpt">{post.data.excerpt}</p>}
<div id="body"><PortableText value={post.data.body} /></div>
</article>
<Comments collection="posts" contentId={post.data.id} threaded />
<CommentForm collection="posts" contentId={post.data.id} />
</body>
</html>

View File

@@ -0,0 +1,5 @@
{
"extends": "astro/tsconfigs/base",
"compilerOptions": { "types": ["node"] },
"include": ["src", ".astro/types.d.ts"]
}

View File

@@ -0,0 +1,839 @@
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { ContentRepository } from "../../../src/database/repositories/content.js";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { FTSManager } from "../../../src/search/fts-manager.js";
import { searchWithDb } from "../../../src/search/query.js";
import { applySeed } from "../../../src/seed/apply.js";
import type { SeedFile } from "../../../src/seed/types.js";
import { validateSeed } from "../../../src/seed/validate.js";
import { createPostFixture } from "../../utils/fixtures.js";
import { setupTestDatabaseWithCollections, teardownTestDatabase } from "../../utils/test-db.js";
describe("i18n (Integration)", () => {
let db: Kysely<Database>;
let repo: ContentRepository;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
repo = new ContentRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// ─── 1. Migration — i18n columns exist ──────────────────────────
describe("Migration — i18n columns", () => {
it("should have locale and translation_group columns on content tables", async () => {
const result = await sql<{ name: string }>`
PRAGMA table_info(ec_post)
`.execute(db);
const columnNames = result.rows.map((r) => r.name);
expect(columnNames).toContain("locale");
expect(columnNames).toContain("translation_group");
});
it("should default locale to 'en'", async () => {
const result = await sql<{ name: string; dflt_value: string | null }>`
PRAGMA table_info(ec_post)
`.execute(db);
const localeCol = result.rows.find((r) => r.name === "locale");
expect(localeCol).toBeDefined();
expect(localeCol!.dflt_value).toBe("'en'");
});
it("should have translatable column on _emdash_fields", async () => {
const result = await sql<{ name: string }>`
PRAGMA table_info(_emdash_fields)
`.execute(db);
const columnNames = result.rows.map((r) => r.name);
expect(columnNames).toContain("translatable");
});
it("should have compound unique constraint on slug+locale", async () => {
// Insert same slug, different locale — should succeed
await sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id1', 'hello', 'en', 'id1', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db);
await sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id2', 'hello', 'fr', 'id1', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db);
// Same slug, same locale — should fail
await expect(
sql`
INSERT INTO ec_post (id, slug, locale, translation_group, status, version, created_at, updated_at)
VALUES ('id3', 'hello', 'en', 'id3', 'draft', 1, datetime('now'), datetime('now'))
`.execute(db),
).rejects.toThrow();
});
it("should have locale and translation_group indexes", async () => {
const result = await sql<{ name: string }>`
PRAGMA index_list(ec_post)
`.execute(db);
const indexNames = result.rows.map((r) => r.name);
expect(indexNames).toContain("idx_ec_post_locale");
expect(indexNames).toContain("idx_ec_post_translation_group");
});
});
// ─── 2. ContentRepository — locale-aware CRUD ───────────────────
describe("ContentRepository — locale-aware CRUD", () => {
it("create() without locale defaults to 'en'", async () => {
const post = await repo.create(createPostFixture());
expect(post.locale).toBe("en");
});
it("create() with explicit locale stores it", async () => {
const post = await repo.create(createPostFixture({ locale: "fr", slug: "bonjour" }));
expect(post.locale).toBe("fr");
});
it("create() with translationOf links via translation_group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello-world", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour-monde",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour le Monde" },
}),
);
// Both should share the same translation_group
expect(frPost.translationGroup).toBe(enPost.translationGroup);
// The group should be the original item's id (since it was first)
expect(enPost.translationGroup).toBe(enPost.id);
});
it("create() with translationOf on a chained translation uses the root group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
// Create a third translation linked to the French version
const dePost = await repo.create(
createPostFixture({
slug: "hallo",
locale: "de",
translationOf: frPost.id,
data: { title: "Hallo" },
}),
);
// All three should share the same translation_group
expect(dePost.translationGroup).toBe(enPost.id);
expect(frPost.translationGroup).toBe(enPost.id);
});
it("create() with translationOf pointing to non-existent ID throws", async () => {
await expect(
repo.create(
createPostFixture({
slug: "orphan",
locale: "fr",
translationOf: "NONEXISTENT_ID_12345678",
}),
),
).rejects.toThrow("Translation source content not found");
});
it("same slug different locales are allowed", async () => {
const en = await repo.create(createPostFixture({ slug: "about", locale: "en" }));
const fr = await repo.create(
createPostFixture({
slug: "about",
locale: "fr",
data: { title: "À propos" },
}),
);
expect(en.slug).toBe("about");
expect(fr.slug).toBe("about");
expect(en.id).not.toBe(fr.id);
});
it("same slug same locale is rejected", async () => {
await repo.create(createPostFixture({ slug: "unique-slug", locale: "en" }));
await expect(
repo.create(
createPostFixture({
slug: "unique-slug",
locale: "en",
data: { title: "Duplicate" },
}),
),
).rejects.toThrow();
});
// ── findBySlug ────────────────────────────────────────────────
it("findBySlug() without locale returns any match", async () => {
await repo.create(createPostFixture({ slug: "shared-slug", locale: "en" }));
await repo.create(
createPostFixture({
slug: "shared-slug",
locale: "fr",
data: { title: "Version FR" },
}),
);
const found = await repo.findBySlug("post", "shared-slug");
expect(found).not.toBeNull();
expect(found!.slug).toBe("shared-slug");
});
it("findBySlug() with locale filters to that locale", async () => {
await repo.create(createPostFixture({ slug: "about", locale: "en" }));
await repo.create(
createPostFixture({
slug: "about",
locale: "fr",
data: { title: "À propos" },
}),
);
const en = await repo.findBySlug("post", "about", "en");
expect(en).not.toBeNull();
expect(en!.locale).toBe("en");
const fr = await repo.findBySlug("post", "about", "fr");
expect(fr).not.toBeNull();
expect(fr!.locale).toBe("fr");
const de = await repo.findBySlug("post", "about", "de");
expect(de).toBeNull();
});
// ── findByIdOrSlug ────────────────────────────────────────────
it("findByIdOrSlug() — ID lookup ignores locale param", async () => {
const post = await repo.create(createPostFixture({ slug: "test-post", locale: "en" }));
// ID lookup should find it regardless of locale param
const found = await repo.findByIdOrSlug("post", post.id, "fr");
expect(found).not.toBeNull();
expect(found!.id).toBe(post.id);
expect(found!.locale).toBe("en");
});
it("findByIdOrSlug() — slug lookup respects locale", async () => {
const enPost = await repo.create(createPostFixture({ slug: "test", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "test",
locale: "fr",
data: { title: "Test FR" },
}),
);
const foundEn = await repo.findByIdOrSlug("post", "test", "en");
expect(foundEn).not.toBeNull();
expect(foundEn!.id).toBe(enPost.id);
const foundFr = await repo.findByIdOrSlug("post", "test", "fr");
expect(foundFr).not.toBeNull();
expect(foundFr!.id).toBe(frPost.id);
});
// ── findMany ──────────────────────────────────────────────────
it("findMany() without locale returns all locales", async () => {
await repo.create(createPostFixture({ slug: "en-post", locale: "en" }));
await repo.create(
createPostFixture({
slug: "fr-post",
locale: "fr",
data: { title: "Post FR" },
}),
);
await repo.create(
createPostFixture({
slug: "de-post",
locale: "de",
data: { title: "Post DE" },
}),
);
const result = await repo.findMany("post");
expect(result.items).toHaveLength(3);
});
it("findMany() with locale filters to that locale", async () => {
await repo.create(createPostFixture({ slug: "en-post", locale: "en" }));
await repo.create(
createPostFixture({
slug: "fr-post",
locale: "fr",
data: { title: "Post FR" },
}),
);
await repo.create(
createPostFixture({
slug: "de-post",
locale: "de",
data: { title: "Post DE" },
}),
);
const frResult = await repo.findMany("post", {
where: { locale: "fr" },
});
expect(frResult.items).toHaveLength(1);
expect(frResult.items[0]!.locale).toBe("fr");
});
// ── count ─────────────────────────────────────────────────────
it("count() without locale counts all", async () => {
await repo.create(createPostFixture({ slug: "post-en", locale: "en" }));
await repo.create(
createPostFixture({
slug: "post-fr",
locale: "fr",
data: { title: "FR" },
}),
);
const total = await repo.count("post");
expect(total).toBe(2);
});
it("count() with locale counts only that locale", async () => {
await repo.create(createPostFixture({ slug: "post-en", locale: "en" }));
await repo.create(
createPostFixture({
slug: "post-fr",
locale: "fr",
data: { title: "FR" },
}),
);
const enCount = await repo.count("post", { locale: "en" });
expect(enCount).toBe(1);
const deCount = await repo.count("post", { locale: "de" });
expect(deCount).toBe(0);
});
// ── findTranslations ──────────────────────────────────────────
it("findTranslations() returns all locales for a translation group", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
await repo.create(
createPostFixture({
slug: "hallo",
locale: "de",
translationOf: enPost.id,
data: { title: "Hallo" },
}),
);
const translations = await repo.findTranslations("post", enPost.translationGroup!);
expect(translations).toHaveLength(3);
const locales = translations
.map((t) => t.locale)
.toSorted((a, b) => (a ?? "").localeCompare(b ?? ""));
expect(locales).toEqual(["de", "en", "fr"]);
});
it("findTranslations() returns only non-deleted items", async () => {
const enPost = await repo.create(createPostFixture({ slug: "hello", locale: "en" }));
const frPost = await repo.create(
createPostFixture({
slug: "bonjour",
locale: "fr",
translationOf: enPost.id,
data: { title: "Bonjour" },
}),
);
// Soft-delete the French translation
await repo.delete("post", frPost.id);
const translations = await repo.findTranslations("post", enPost.translationGroup!);
expect(translations).toHaveLength(1);
expect(translations[0]!.locale).toBe("en");
});
});
// ─── 3. FTS — locale-aware search ───────────────────────────────
describe("FTS — locale-aware search", () => {
let registry: SchemaRegistry;
let ftsManager: FTSManager;
beforeEach(async () => {
registry = new SchemaRegistry(db);
ftsManager = new FTSManager(db);
// Mark title as searchable and enable FTS
await registry.updateField("post", "title", { searchable: true });
await ftsManager.enableSearch("post");
});
it("search with locale filter returns only that locale's results", async () => {
// Create published posts in different locales
const enPost = await repo.create(
createPostFixture({
slug: "hello-world",
locale: "en",
status: "published",
data: { title: "Hello World" },
}),
);
const frPost = await repo.create(
createPostFixture({
slug: "bonjour-monde",
locale: "fr",
status: "published",
data: { title: "Bonjour le Monde" },
}),
);
// Search for "world" — English only
const enResults = await searchWithDb(db, "Hello", {
collections: ["post"],
locale: "en",
status: "published",
});
expect(enResults.items.length).toBeGreaterThanOrEqual(1);
expect(enResults.items.every((r) => r.locale === "en")).toBe(true);
expect(enResults.items.some((r) => r.id === enPost.id)).toBe(true);
// Search for "Bonjour" — French only
const frResults = await searchWithDb(db, "Bonjour", {
collections: ["post"],
locale: "fr",
status: "published",
});
expect(frResults.items.length).toBeGreaterThanOrEqual(1);
expect(frResults.items.every((r) => r.locale === "fr")).toBe(true);
expect(frResults.items.some((r) => r.id === frPost.id)).toBe(true);
});
it("search without locale returns results from all locales", async () => {
await repo.create(
createPostFixture({
slug: "universal-en",
locale: "en",
status: "published",
data: { title: "Universal Content" },
}),
);
await repo.create(
createPostFixture({
slug: "universal-fr",
locale: "fr",
status: "published",
data: { title: "Universal Contenu" },
}),
);
const results = await searchWithDb(db, "Universal", {
collections: ["post"],
status: "published",
});
expect(results.items).toHaveLength(2);
const locales = results.items.map((r) => r.locale).toSorted();
expect(locales).toEqual(["en", "fr"]);
});
it("FTS index includes locale column", async () => {
// Verify the FTS table has the locale column by checking structure
const exists = await ftsManager.ftsTableExists("post");
expect(exists).toBe(true);
// Create a post and verify it appears in FTS results with locale
await repo.create(
createPostFixture({
slug: "fts-test",
locale: "ja",
status: "published",
data: { title: "FTS Locale Test" },
}),
);
const results = await searchWithDb(db, "FTS Locale", {
collections: ["post"],
locale: "ja",
status: "published",
});
expect(results.items).toHaveLength(1);
expect(results.items[0]!.locale).toBe("ja");
});
it("rebuilt index preserves locale-aware search", async () => {
// Create content before rebuild
await repo.create(
createPostFixture({
slug: "pre-rebuild-en",
locale: "en",
status: "published",
data: { title: "Rebuild Test English" },
}),
);
await repo.create(
createPostFixture({
slug: "pre-rebuild-fr",
locale: "fr",
status: "published",
data: { title: "Rebuild Test French" },
}),
);
// Rebuild the index
await ftsManager.rebuildIndex("post", ["title"]);
// Verify locale-aware search still works
const enResults = await searchWithDb(db, "Rebuild", {
collections: ["post"],
locale: "en",
status: "published",
});
expect(enResults.items).toHaveLength(1);
expect(enResults.items[0]!.locale).toBe("en");
});
});
// ─── 4. Seed — locale-aware content ─────────────────────────────
describe("Seed — locale-aware content", () => {
it("applySeed() creates content with locale and translationOf", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "welcome",
slug: "welcome",
locale: "en",
status: "published",
data: { title: "Welcome" },
},
{
id: "welcome-fr",
slug: "bienvenue",
locale: "fr",
translationOf: "welcome",
status: "draft",
data: { title: "Bienvenue" },
},
{
id: "welcome-de",
slug: "willkommen",
locale: "de",
translationOf: "welcome",
status: "published",
data: { title: "Willkommen" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(3);
expect(result.content.skipped).toBe(0);
// Verify the entries exist with correct locales
const seedRepo = new ContentRepository(db);
const enPost = await seedRepo.findBySlug("post", "welcome", "en");
const frPost = await seedRepo.findBySlug("post", "bienvenue", "fr");
const dePost = await seedRepo.findBySlug("post", "willkommen", "de");
expect(enPost).not.toBeNull();
expect(frPost).not.toBeNull();
expect(dePost).not.toBeNull();
expect(enPost!.locale).toBe("en");
expect(frPost!.locale).toBe("fr");
expect(dePost!.locale).toBe("de");
// All should share the same translation_group
expect(frPost!.translationGroup).toBe(enPost!.translationGroup);
expect(dePost!.translationGroup).toBe(enPost!.translationGroup);
});
it("applySeed() without locale falls back to default", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "plain",
slug: "plain-post",
data: { title: "No Locale" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(1);
const plainRepo = new ContentRepository(db);
const post = await plainRepo.findBySlug("post", "plain-post");
expect(post).not.toBeNull();
expect(post!.locale).toBe("en"); // default
expect(post!.translationGroup).toBe(post!.id); // self-reference
});
it("applySeed() skips existing entries with locale-aware lookup", async () => {
// Pre-create an entry
const skipRepo = new ContentRepository(db);
await skipRepo.create(createPostFixture({ slug: "existing", locale: "fr" }));
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "existing",
slug: "existing",
locale: "fr",
data: { title: "Should Skip" },
},
],
},
};
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.skipped).toBe(1);
expect(result.content.created).toBe(0);
});
it("applySeed() rejects missing translationOf via validation", async () => {
const seed: SeedFile = {
version: "1",
content: {
post: [
{
id: "orphan-fr",
slug: "orphelin",
locale: "fr",
translationOf: "nonexistent",
data: { title: "Orphan" },
},
],
},
};
// Validation catches the bad reference before applySeed runs
await expect(applySeed(db, seed, { includeContent: true })).rejects.toThrow(
'references "nonexistent" which is not in this collection',
);
});
});
// ─── 5. Seed validation — i18n fields ───────────────────────────
describe("Seed validation — i18n fields", () => {
it("validates translationOf requires locale", () => {
const seed = {
version: "1",
content: {
posts: [
{ id: "en", slug: "hello", data: { title: "Hello" } },
{
id: "fr",
slug: "bonjour",
translationOf: "en",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(false);
expect(result.errors.some((e) => e.includes("locale is required when translationOf"))).toBe(
true,
);
});
it("validates translationOf references exist", () => {
const seed = {
version: "1",
content: {
posts: [
{
id: "fr",
slug: "bonjour",
locale: "fr",
translationOf: "nonexistent",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(false);
expect(
result.errors.some((e) => e.includes('references "nonexistent" which is not in')),
).toBe(true);
});
it("valid seed with i18n fields passes validation", () => {
const seed = {
version: "1",
content: {
posts: [
{ id: "en", slug: "hello", locale: "en", data: { title: "Hello" } },
{
id: "fr",
slug: "bonjour",
locale: "fr",
translationOf: "en",
data: { title: "Bonjour" },
},
],
},
};
const result = validateSeed(seed);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
});
// ─── 6. Non-i18n regression ─────────────────────────────────────
describe("Non-i18n regression", () => {
it("content created without locale has locale 'en'", async () => {
const post = await repo.create({
type: "post",
slug: "no-locale",
data: { title: "No Locale Specified" },
});
expect(post.locale).toBe("en");
});
it("findMany without locale param returns all results", async () => {
await repo.create(createPostFixture({ slug: "post-1" }));
await repo.create(createPostFixture({ slug: "post-2" }));
const result = await repo.findMany("post");
expect(result.items).toHaveLength(2);
});
it("findBySlug works without locale param", async () => {
const created = await repo.create(createPostFixture({ slug: "find-me" }));
const found = await repo.findBySlug("post", "find-me");
expect(found).not.toBeNull();
expect(found!.id).toBe(created.id);
});
it("findByIdOrSlug works without locale param", async () => {
const created = await repo.create(createPostFixture({ slug: "lookup-test" }));
// By slug
const bySlug = await repo.findByIdOrSlug("post", "lookup-test");
expect(bySlug).not.toBeNull();
expect(bySlug!.id).toBe(created.id);
// By ID
const byId = await repo.findByIdOrSlug("post", created.id);
expect(byId).not.toBeNull();
expect(byId!.id).toBe(created.id);
});
it("slug uniqueness is still enforced within the same locale", async () => {
await repo.create(createPostFixture({ slug: "dupe-test" }));
// Same slug, same default locale — should fail
await expect(repo.create(createPostFixture({ slug: "dupe-test" }))).rejects.toThrow();
});
it("count works without locale param", async () => {
await repo.create(createPostFixture({ slug: "count-1" }));
await repo.create(createPostFixture({ slug: "count-2" }));
const count = await repo.count("post");
expect(count).toBe(2);
});
it("translation_group is auto-set to item id when no translationOf", async () => {
const post = await repo.create(createPostFixture({ slug: "standalone" }));
expect(post.translationGroup).toBe(post.id);
});
it("existing CRUD operations are unaffected by i18n columns", async () => {
// Create
const post = await repo.create(createPostFixture({ slug: "crud-test", status: "draft" }));
expect(post.status).toBe("draft");
// Update
const updated = await repo.update("post", post.id, {
data: { title: "Updated Title" },
});
expect(updated.data.title).toBe("Updated Title");
expect(updated.locale).toBe("en"); // locale unchanged
// Delete (soft)
const deleted = await repo.delete("post", post.id);
expect(deleted).toBe(true);
// Should not be found
const notFound = await repo.findById("post", post.id);
expect(notFound).toBeNull();
// Restore
const restored = await repo.restore("post", post.id);
expect(restored).toBe(true);
const found = await repo.findById("post", post.id);
expect(found).not.toBeNull();
expect(found!.locale).toBe("en");
});
});
});

View File

@@ -0,0 +1,193 @@
import SwaggerParser from "@apidevtools/swagger-parser";
import { describe, expect, it } from "vitest";
import { generateOpenApiDocument } from "../../../src/api/openapi/document.js";
describe("OpenAPI spec validation", () => {
it("produces a valid OpenAPI 3.1 document", async () => {
const doc = generateOpenApiDocument();
// swagger-parser.validate() resolves $refs and validates against the OAS JSON Schema.
// It throws if the document is invalid.
const validated = await SwaggerParser.validate(structuredClone(doc));
expect(validated.openapi).toBe("3.1.0");
expect(validated.info.title).toBe("EmDash CMS API");
});
it("resolves all $ref pointers without errors", async () => {
const doc = generateOpenApiDocument();
// dereference() resolves every $ref in the document tree.
// If any $ref points to a missing schema, it throws.
const dereferenced = await SwaggerParser.dereference(structuredClone(doc));
// After dereferencing, no $ref keys should remain.
// Use a replacer to handle circular references (e.g. PublicComment.replies)
const seen = new WeakSet();
const json = JSON.stringify(dereferenced, (_key, value) => {
if (typeof value === "object" && value !== null) {
if (seen.has(value)) return "[Circular]";
seen.add(value);
}
return value;
});
expect(json).not.toContain('"$ref"');
});
it("has all content paths with responses", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, unknown>; operationId?: string }
| undefined;
if (!op) continue;
// Every operation must have responses
expect(op.responses, `${method.toUpperCase()} ${path} missing responses`).toBeDefined();
// Every operation must have an operationId
expect(op.operationId, `${method.toUpperCase()} ${path} missing operationId`).toBeDefined();
// Every operation must have at least one success response (2xx)
const statusCodes = Object.keys(op.responses ?? {});
const has2xx = statusCodes.some((code) => code.startsWith("2"));
expect(has2xx, `${method.toUpperCase()} ${path} has no 2xx response`).toBe(true);
}
}
});
it("wraps all success responses in the { data } envelope", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, Record<string, unknown>> }
| undefined;
if (!op?.responses) continue;
for (const [statusCode, response] of Object.entries(op.responses)) {
if (!statusCode.startsWith("2")) continue;
const content = (response as Record<string, unknown>)?.content as
| Record<string, { schema?: Record<string, unknown> }>
| undefined;
if (!content?.["application/json"]) continue;
const schema = content["application/json"].schema;
expect(
schema,
`${method.toUpperCase()} ${path} ${statusCode} missing schema`,
).toBeDefined();
// The envelope must have a "data" property (either directly or via $ref that wraps it)
// Check for direct properties or allOf/oneOf patterns
const props = (schema as Record<string, unknown>)?.properties as
| Record<string, unknown>
| undefined;
if (props) {
expect(
props,
`${method.toUpperCase()} ${path} ${statusCode} envelope missing "data" property`,
).toHaveProperty("data");
}
}
}
}
});
it("includes auth error responses on authenticated endpoints", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
// Public endpoints that don't require authentication
const publicPaths = new Set(["/_emdash/api/comments/{collection}/{contentId}"]);
for (const [path, pathItem] of Object.entries(paths)) {
if (publicPaths.has(path)) continue;
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, unknown> }
| undefined;
if (!op?.responses) continue;
const statusCodes = Object.keys(op.responses);
expect(statusCodes, `${method.toUpperCase()} ${path} missing 401`).toContain("401");
expect(statusCodes, `${method.toUpperCase()} ${path} missing 403`).toContain("403");
}
}
});
it("has no duplicate operation IDs across all paths", () => {
const doc = generateOpenApiDocument();
const operationIds: string[] = [];
for (const pathItem of Object.values(doc.paths ?? {})) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { operationId?: string }
| undefined;
if (op?.operationId) {
operationIds.push(op.operationId);
}
}
}
const seen = new Set<string>();
for (const id of operationIds) {
expect(seen.has(id), `duplicate operationId: ${id}`).toBe(false);
seen.add(id);
}
});
it("registers referenced schemas as reusable components", async () => {
const doc = generateOpenApiDocument();
const schemas = doc.components?.schemas ?? {};
const schemaNames = Object.keys(schemas);
// Should have a reasonable number of reusable schemas
expect(schemaNames.length).toBeGreaterThanOrEqual(5);
// All registered schemas should be valid objects with type or properties
for (const [name, schema] of Object.entries(schemas)) {
expect(schema, `component schema "${name}" is not an object`).toBeTypeOf("object");
}
});
it("uses consistent error response shape across all error codes", () => {
const doc = generateOpenApiDocument();
const paths = doc.paths ?? {};
for (const [path, pathItem] of Object.entries(paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const op = (pathItem as Record<string, unknown>)?.[method] as
| { responses?: Record<string, Record<string, unknown>> }
| undefined;
if (!op?.responses) continue;
for (const [statusCode, response] of Object.entries(op.responses)) {
// Only check error responses (4xx, 5xx)
const code = Number(statusCode);
if (code < 400) continue;
const content = (response as Record<string, unknown>)?.content as
| Record<string, { schema?: Record<string, unknown> }>
| undefined;
if (!content?.["application/json"]) continue;
const schema = content["application/json"].schema;
expect(
schema,
`${method.toUpperCase()} ${path} ${statusCode} error missing schema`,
).toBeDefined();
}
}
}
});
});

View File

@@ -0,0 +1,732 @@
/**
* Capability Enforcement Integration Tests (v2)
*
* Tests the capability-based access gating in the v2 plugin context.
* v2 always enforces capabilities - there's no "trusted mode" bypass.
*
*/
import Database from "better-sqlite3";
import { Kysely, SqliteDialect, sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import { OptionsRepository } from "../../../src/database/repositories/options.js";
import { UserRepository } from "../../../src/database/repositories/user.js";
import type { Database as DbSchema } from "../../../src/database/types.js";
import {
PluginContextFactory,
createContentAccess,
createContentAccessWithWrite,
createHttpAccess,
createUnrestrictedHttpAccess,
createBlockedHttpAccess,
createLogAccess,
createStorageAccess,
createKVAccess,
createSiteInfo,
createUrlHelper,
createUserAccess,
} from "../../../src/plugins/context.js";
import type { ResolvedPlugin } from "../../../src/plugins/types.js";
// Test regex patterns
const NOT_ALLOWED_FETCH_REGEX = /not allowed to fetch from host/;
const NO_ALLOWED_FETCH_REGEX = /not allowed to fetch/;
const NO_NETWORK_FETCH_REGEX = /does not have the "network:fetch" capability/;
/**
* Create a minimal resolved plugin for testing
*/
function createTestPlugin(overrides: Partial<ResolvedPlugin> = {}): ResolvedPlugin {
return {
id: "test-plugin",
version: "1.0.0",
capabilities: [],
allowedHosts: [],
storage: {},
admin: {
pages: [],
widgets: [],
fieldWidgets: {},
},
hooks: {},
routes: {},
settings: undefined,
...overrides,
};
}
describe("Capability Enforcement Integration (v2)", () => {
let db: Kysely<DbSchema>;
let sqliteDb: Database.Database;
beforeEach(async () => {
// Create in-memory SQLite database
sqliteDb = new Database(":memory:");
db = new Kysely<DbSchema>({
dialect: new SqliteDialect({
database: sqliteDb,
}),
});
// Run migrations
await runMigrations(db);
// Create test content table with actual field columns (not JSON data column)
// The ContentRepository expects real columns for each field
await sql`
CREATE TABLE IF NOT EXISTS ec_posts (
id TEXT PRIMARY KEY,
slug TEXT,
status TEXT DEFAULT 'draft',
author_id TEXT,
primary_byline_id TEXT,
created_at TEXT DEFAULT (datetime('now')),
updated_at TEXT DEFAULT (datetime('now')),
published_at TEXT,
deleted_at TEXT,
version INTEGER DEFAULT 1,
locale TEXT NOT NULL DEFAULT 'en',
translation_group TEXT,
title TEXT,
content TEXT,
UNIQUE(slug, locale)
)
`.execute(db);
// Insert test content with actual column values
await sql`
INSERT INTO ec_posts (id, slug, status, title, content, locale, translation_group)
VALUES
('post-1', 'hello-world', 'published', 'Hello World', 'Content 1', 'en', 'post-1'),
('post-2', 'second-post', 'draft', 'Second Post', 'Content 2', 'en', 'post-2')
`.execute(db);
});
afterEach(async () => {
await db.destroy();
sqliteDb.close();
});
describe("Content Access", () => {
describe("createContentAccess (read-only)", () => {
it("can read content by ID", async () => {
const access = createContentAccess(db);
const post = await access.get("posts", "post-1");
expect(post).not.toBeNull();
expect(post!.id).toBe("post-1");
expect(post!.data.title).toBe("Hello World");
});
it("can list content", async () => {
const access = createContentAccess(db);
const result = await access.list("posts");
expect(result.items).toHaveLength(2);
expect(result.hasMore).toBe(false);
});
it("returns null for non-existent content", async () => {
const access = createContentAccess(db);
const post = await access.get("posts", "non-existent");
expect(post).toBeNull();
});
});
describe("createContentAccessWithWrite", () => {
it("includes read methods", async () => {
const access = createContentAccessWithWrite(db);
expect(typeof access.get).toBe("function");
expect(typeof access.list).toBe("function");
});
it("includes write methods", async () => {
const access = createContentAccessWithWrite(db);
expect(typeof access.create).toBe("function");
expect(typeof access.update).toBe("function");
expect(typeof access.delete).toBe("function");
});
it("can create new content", async () => {
const access = createContentAccessWithWrite(db);
const created = await access.create("posts", {
title: "New Post",
content: "New content",
});
expect(created.id).toBeDefined();
expect(created.data.title).toBe("New Post");
// Verify it was created
const found = await access.get("posts", created.id);
expect(found).not.toBeNull();
});
});
});
describe("HTTP Access", () => {
describe("createHttpAccess (with host restrictions)", () => {
it("allows requests to allowed hosts", async () => {
const http = createHttpAccess("test-plugin", ["example.com"]);
// We can't actually make the request in tests, but we can verify
// the function doesn't throw for allowed hosts
expect(typeof http.fetch).toBe("function");
});
it("blocks requests to non-allowed hosts", async () => {
const http = createHttpAccess("test-plugin", ["example.com"]);
await expect(http.fetch("https://evil.com/api")).rejects.toThrow(NOT_ALLOWED_FETCH_REGEX);
});
it("supports wildcard host patterns", { timeout: 15000 }, async () => {
const http = createHttpAccess("test-plugin", ["*.example.com"]);
// Should not throw for subdomains
// (Can't test actual fetch, but verify pattern matching logic)
await expect(http.fetch("https://api.example.com/test")).rejects.not.toThrow(
NO_ALLOWED_FETCH_REGEX,
);
});
});
describe("createBlockedHttpAccess", () => {
it("always throws", async () => {
const http = createBlockedHttpAccess("no-network-plugin");
await expect(http.fetch("https://example.com")).rejects.toThrow(NO_NETWORK_FETCH_REGEX);
});
});
describe("createUnrestrictedHttpAccess", () => {
it("returns an HttpAccess with a fetch function", () => {
const http = createUnrestrictedHttpAccess("unrestricted-plugin");
expect(typeof http.fetch).toBe("function");
});
it("does not throw for any host", async () => {
const http = createUnrestrictedHttpAccess("unrestricted-plugin");
// Can't make a real request in tests, but verify it doesn't throw a
// host-validation error — it will throw a network error instead.
await expect(http.fetch("https://any-host-at-all.example.com/test")).rejects.not.toThrow(
NOT_ALLOWED_FETCH_REGEX,
);
});
});
});
describe("Storage Access", () => {
it("creates collection accessors from config", () => {
const storage = createStorageAccess(db, "test-plugin", {
events: { indexes: ["type"] },
cache: { indexes: ["key"] },
});
expect(storage.events).toBeDefined();
expect(storage.cache).toBeDefined();
});
it("provides full StorageCollection API", () => {
const storage = createStorageAccess(db, "test-plugin", {
items: { indexes: [] },
});
const collection = storage.items;
expect(typeof collection.get).toBe("function");
expect(typeof collection.put).toBe("function");
expect(typeof collection.delete).toBe("function");
expect(typeof collection.exists).toBe("function");
expect(typeof collection.getMany).toBe("function");
expect(typeof collection.putMany).toBe("function");
expect(typeof collection.deleteMany).toBe("function");
expect(typeof collection.query).toBe("function");
expect(typeof collection.count).toBe("function");
});
it("isolates storage between plugins", async () => {
const storage1 = createStorageAccess(db, "plugin-1", {
items: { indexes: [] },
});
const storage2 = createStorageAccess(db, "plugin-2", {
items: { indexes: [] },
});
await storage1.items.put("doc-1", { value: "from plugin 1" });
// Plugin 2 should not see plugin 1's data
const fromPlugin2 = await storage2.items.get("doc-1");
expect(fromPlugin2).toBeNull();
// Plugin 1 should still see its data
const fromPlugin1 = await storage1.items.get("doc-1");
expect(fromPlugin1).toEqual({ value: "from plugin 1" });
});
});
describe("KV Access", () => {
it("prefixes keys with plugin ID", async () => {
const optionsRepo = new OptionsRepository(db);
const kv = createKVAccess(optionsRepo, "test-plugin");
await kv.set("my-key", { foo: "bar" });
// Verify the key is prefixed in the database
const rawValue = await optionsRepo.get("plugin:test-plugin:my-key");
expect(rawValue).toEqual({ foo: "bar" });
});
it("isolates KV between plugins", async () => {
const optionsRepo = new OptionsRepository(db);
const kv1 = createKVAccess(optionsRepo, "plugin-1");
const kv2 = createKVAccess(optionsRepo, "plugin-2");
await kv1.set("shared-key", "value from 1");
await kv2.set("shared-key", "value from 2");
expect(await kv1.get("shared-key")).toBe("value from 1");
expect(await kv2.get("shared-key")).toBe("value from 2");
});
it("supports listing keys with prefix", async () => {
const optionsRepo = new OptionsRepository(db);
const kv = createKVAccess(optionsRepo, "test-plugin");
await kv.set("settings:theme", "dark");
await kv.set("settings:lang", "en");
await kv.set("cache:user-1", { name: "John" });
const settings = await kv.list("settings:");
expect(settings).toHaveLength(2);
expect(settings.map((s) => s.key).toSorted()).toEqual(["settings:lang", "settings:theme"]);
});
});
describe("Log Access", () => {
it("prefixes messages with plugin ID", () => {
const log = createLogAccess("test-plugin");
// These just verify the methods exist and don't throw
expect(() => log.debug("test message")).not.toThrow();
expect(() => log.info("test message", { extra: "data" })).not.toThrow();
expect(() => log.warn("test warning")).not.toThrow();
expect(() => log.error("test error")).not.toThrow();
});
});
describe("PluginContextFactory", () => {
it("creates context with capability-gated access", () => {
const factory = new PluginContextFactory({ db });
const readOnlyPlugin = createTestPlugin({
id: "reader",
capabilities: ["read:content"],
});
const ctx = factory.createContext(readOnlyPlugin);
// Content should be read-only (no create/update/delete)
expect(ctx.content).toBeDefined();
expect(typeof ctx.content!.get).toBe("function");
expect(typeof ctx.content!.list).toBe("function");
expect("create" in ctx.content!).toBe(false);
});
it("provides undefined content for plugins without capability", () => {
const factory = new PluginContextFactory({ db });
const noContentPlugin = createTestPlugin({
id: "no-content",
capabilities: ["network:fetch"],
});
const ctx = factory.createContext(noContentPlugin);
expect(ctx.content).toBeUndefined();
});
it("provides http for plugins with network:fetch", () => {
const factory = new PluginContextFactory({ db });
const networkPlugin = createTestPlugin({
id: "network",
capabilities: ["network:fetch"],
allowedHosts: ["api.example.com"],
});
const ctx = factory.createContext(networkPlugin);
expect(ctx.http).toBeDefined();
expect(typeof ctx.http!.fetch).toBe("function");
});
it("provides undefined http for plugins without capability", () => {
const factory = new PluginContextFactory({ db });
const noNetworkPlugin = createTestPlugin({
id: "no-network",
capabilities: [],
});
const ctx = factory.createContext(noNetworkPlugin);
expect(ctx.http).toBeUndefined();
});
it("provides unrestricted http for plugins with network:fetch:any", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "unrestricted-network",
capabilities: ["network:fetch:any", "network:fetch"],
});
const ctx = factory.createContext(plugin);
expect(ctx.http).toBeDefined();
expect(typeof ctx.http!.fetch).toBe("function");
});
it("prefers network:fetch:any over network:fetch when both present", async () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "both-fetch",
capabilities: ["network:fetch", "network:fetch:any"],
allowedHosts: ["restricted.example.com"],
});
const ctx = factory.createContext(plugin);
expect(ctx.http).toBeDefined();
// With network:fetch:any, arbitrary hosts should not throw a host validation error
await expect(ctx.http!.fetch("https://unrestricted.example.com/test")).rejects.not.toThrow(
NOT_ALLOWED_FETCH_REGEX,
);
});
it("always provides kv, storage, and log", () => {
const factory = new PluginContextFactory({ db });
const minimalPlugin = createTestPlugin({
id: "minimal",
capabilities: [],
storage: {
items: { indexes: [] },
},
});
const ctx = factory.createContext(minimalPlugin);
expect(ctx.kv).toBeDefined();
expect(ctx.storage).toBeDefined();
expect(ctx.storage.items).toBeDefined();
expect(ctx.log).toBeDefined();
});
it("provides write:content access with create/update/delete", () => {
const factory = new PluginContextFactory({ db });
const writePlugin = createTestPlugin({
id: "writer",
capabilities: ["write:content"],
});
const ctx = factory.createContext(writePlugin);
expect(ctx.content).toBeDefined();
expect("create" in ctx.content!).toBe(true);
expect("update" in ctx.content!).toBe(true);
expect("delete" in ctx.content!).toBe(true);
});
it("always provides site info", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({ id: "site-test", capabilities: [] });
const ctx = factory.createContext(plugin);
expect(ctx.site).toBeDefined();
expect(typeof ctx.site.name).toBe("string");
expect(typeof ctx.site.url).toBe("string");
expect(typeof ctx.site.locale).toBe("string");
});
it("always provides url() helper", () => {
const factory = new PluginContextFactory({
db,
siteInfo: { siteUrl: "https://example.com" },
});
const plugin = createTestPlugin({ id: "url-test", capabilities: [] });
const ctx = factory.createContext(plugin);
expect(typeof ctx.url).toBe("function");
expect(ctx.url("/posts")).toBe("https://example.com/posts");
});
it("provides users for plugins with read:users", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "user-reader",
capabilities: ["read:users"],
});
const ctx = factory.createContext(plugin);
expect(ctx.users).toBeDefined();
expect(typeof ctx.users!.get).toBe("function");
expect(typeof ctx.users!.getByEmail).toBe("function");
expect(typeof ctx.users!.list).toBe("function");
});
it("provides undefined users for plugins without read:users", () => {
const factory = new PluginContextFactory({ db });
const plugin = createTestPlugin({
id: "no-users",
capabilities: [],
});
const ctx = factory.createContext(plugin);
expect(ctx.users).toBeUndefined();
});
});
describe("Site Info", () => {
it("creates site info with all options", () => {
const info = createSiteInfo({
siteName: "My Site",
siteUrl: "https://example.com/",
locale: "fr",
});
expect(info.name).toBe("My Site");
expect(info.url).toBe("https://example.com"); // trailing slash stripped
expect(info.locale).toBe("fr");
});
it("uses defaults for missing values", () => {
const info = createSiteInfo({});
expect(info.name).toBe("");
expect(info.url).toBe("");
expect(info.locale).toBe("en");
});
it("strips trailing slash from URL", () => {
const info = createSiteInfo({ siteUrl: "https://example.com/" });
expect(info.url).toBe("https://example.com");
});
});
describe("URL Helper", () => {
it("creates absolute URLs from paths", () => {
const url = createUrlHelper("https://example.com");
expect(url("/posts")).toBe("https://example.com/posts");
expect(url("/")).toBe("https://example.com/");
});
it("strips trailing slash from base URL", () => {
const url = createUrlHelper("https://example.com/");
expect(url("/posts")).toBe("https://example.com/posts");
});
it("throws for paths not starting with /", () => {
const url = createUrlHelper("https://example.com");
expect(() => url("posts")).toThrow('URL path must start with "/"');
});
it("works with empty base URL", () => {
const url = createUrlHelper("");
expect(url("/posts")).toBe("/posts");
});
it("rejects protocol-relative paths (//)", () => {
const url = createUrlHelper("https://example.com");
expect(() => url("//evil.com")).toThrow("protocol-relative");
});
it("rejects protocol-relative paths with empty base URL", () => {
const url = createUrlHelper("");
expect(() => url("//evil.com/path")).toThrow("protocol-relative");
});
});
describe("User Access", () => {
let userRepo: UserRepository;
beforeEach(async () => {
userRepo = new UserRepository(db);
// Create test users with all 5 role levels
await userRepo.create({ email: "admin@test.com", name: "Admin User", role: "admin" });
await userRepo.create({ email: "editor@test.com", name: "Editor User", role: "editor" });
await userRepo.create({ email: "author@test.com", name: "Author User", role: "author" });
await userRepo.create({
email: "contrib@test.com",
name: "Contributor User",
role: "contributor",
});
await userRepo.create({
email: "sub@test.com",
name: "Subscriber User",
role: "subscriber",
});
});
it("gets user by ID", async () => {
const user = await userRepo.findByEmail("admin@test.com");
const access = createUserAccess(db);
const result = await access.get(user!.id);
expect(result).not.toBeNull();
expect(result!.email).toBe("admin@test.com");
expect(result!.name).toBe("Admin User");
expect(result!.role).toBe(50); // admin = 50
});
it("gets user by email", async () => {
const access = createUserAccess(db);
const result = await access.getByEmail("editor@test.com");
expect(result).not.toBeNull();
expect(result!.email).toBe("editor@test.com");
expect(result!.role).toBe(40); // editor = 40
});
it("returns null for non-existent user", async () => {
const access = createUserAccess(db);
expect(await access.get("non-existent")).toBeNull();
expect(await access.getByEmail("nobody@test.com")).toBeNull();
});
it("lists users", async () => {
const access = createUserAccess(db);
const result = await access.list();
expect(result.items).toHaveLength(5);
// All users should have role as number
for (const user of result.items) {
expect(typeof user.role).toBe("number");
}
});
it("excludes sensitive fields", async () => {
const access = createUserAccess(db);
const result = await access.list();
for (const user of result.items) {
// UserInfo should only have: id, email, name, role, createdAt
const keys = Object.keys(user);
expect(keys).toContain("id");
expect(keys).toContain("email");
expect(keys).toContain("name");
expect(keys).toContain("role");
expect(keys).toContain("createdAt");
// Should NOT have sensitive fields
expect(keys).not.toContain("avatarUrl");
expect(keys).not.toContain("emailVerified");
expect(keys).not.toContain("data");
expect(keys).not.toContain("password_hash");
}
});
it("converts role strings to numeric levels", async () => {
const access = createUserAccess(db);
const admin = await access.getByEmail("admin@test.com");
const editor = await access.getByEmail("editor@test.com");
const subscriber = await access.getByEmail("sub@test.com");
expect(admin!.role).toBe(50);
expect(editor!.role).toBe(40);
expect(subscriber!.role).toBe(10);
});
it("respects limit on list", async () => {
const access = createUserAccess(db);
const result = await access.list({ limit: 2 });
expect(result.items).toHaveLength(2);
expect(result.nextCursor).toBeDefined();
});
it("clamps limit to maximum of 100", async () => {
const access = createUserAccess(db);
// Should not throw for large limits — just clamp
const result = await access.list({ limit: 500 });
expect(result.items).toHaveLength(5);
});
it("clamps negative limit to minimum of 1", async () => {
const access = createUserAccess(db);
// Negative limit should be clamped to 1, not passed through
const result = await access.list({ limit: -999 });
expect(result.items).toHaveLength(1);
});
it("preserves contributor (20) and author (30) roles", async () => {
// beforeEach creates users via UserRepository with all 5 roles.
// Verify that contributor (20) and author (30) survive the round-trip.
const access = createUserAccess(db);
const contributor = await access.getByEmail("contrib@test.com");
expect(contributor).not.toBeNull();
expect(contributor!.role).toBe(20);
const author = await access.getByEmail("author@test.com");
expect(author).not.toBeNull();
expect(author!.role).toBe(30);
});
it("filters users by exact role number", async () => {
// beforeEach creates one user per role level (10, 20, 30, 40, 50)
const access = createUserAccess(db);
const contributors = await access.list({ role: 20 });
expect(contributors.items).toHaveLength(1);
expect(contributors.items[0]!.email).toBe("contrib@test.com");
expect(contributors.items[0]!.role).toBe(20);
const authors = await access.list({ role: 30 });
expect(authors.items).toHaveLength(1);
expect(authors.items[0]!.email).toBe("author@test.com");
expect(authors.items[0]!.role).toBe(30);
const admins = await access.list({ role: 50 });
expect(admins.items).toHaveLength(1);
expect(admins.items[0]!.email).toBe("admin@test.com");
});
it("supports cursor-based pagination", async () => {
const access = createUserAccess(db);
const seen = new Set<string>();
// Page through all 5 users one at a time
let cursor: string | undefined;
let pageCount = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
const page = await access.list({ limit: 1, cursor });
if (page.items.length === 0) break;
expect(page.items).toHaveLength(1);
const userId = page.items[0]!.id;
expect(seen.has(userId)).toBe(false); // no duplicates
seen.add(userId);
pageCount++;
if (!page.nextCursor) break; // last page
cursor = page.nextCursor;
}
expect(seen.size).toBe(5);
expect(pageCount).toBe(5);
});
});
});

View File

@@ -0,0 +1,236 @@
/**
* Integration tests for field widget manifest pipeline.
*
* Tests that field widgets declared on collections flow through
* the manifest builder correctly, including the widget property
* and select options for select/multiSelect fields.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../../src/database/types.js";
import { SchemaRegistry } from "../../../src/schema/registry.js";
import { setupTestDatabase } from "../../utils/test-db.js";
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await db.destroy();
});
describe("field widget on schema fields", () => {
it("should store and retrieve widget property on a field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "theme_color",
label: "Theme Color",
type: "string",
widget: "color:picker",
});
const collection = await registry.getCollectionWithFields("posts");
expect(collection).toBeTruthy();
const colorField = collection!.fields.find((f) => f.slug === "theme_color");
expect(colorField).toBeTruthy();
expect(colorField!.widget).toBe("color:picker");
expect(colorField!.type).toBe("string");
});
it("should store and retrieve widget on a json field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "pricing",
label: "Pricing",
type: "json",
widget: "x402:pricing",
});
const collection = await registry.getCollectionWithFields("posts");
const pricingField = collection!.fields.find((f) => f.slug === "pricing");
expect(pricingField).toBeTruthy();
expect(pricingField!.widget).toBe("x402:pricing");
expect(pricingField!.type).toBe("json");
});
it("should return undefined widget when not set", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "title",
label: "Title",
type: "string",
});
const collection = await registry.getCollectionWithFields("posts");
const titleField = collection!.fields.find((f) => f.slug === "title");
expect(titleField).toBeTruthy();
expect(titleField!.widget).toBeUndefined();
});
it("should update widget on an existing field", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "color",
label: "Color",
type: "string",
});
// Update to add widget
await registry.updateField("posts", "color", {
widget: "color:picker",
});
const collection = await registry.getCollectionWithFields("posts");
const colorField = collection!.fields.find((f) => f.slug === "color");
expect(colorField!.widget).toBe("color:picker");
});
it("should include select options from validation in manifest format", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "priority",
label: "Priority",
type: "select",
validation: {
options: ["low", "medium", "high"],
},
});
const collection = await registry.getCollectionWithFields("posts");
const priorityField = collection!.fields.find((f) => f.slug === "priority");
expect(priorityField).toBeTruthy();
expect(priorityField!.type).toBe("select");
expect(priorityField!.validation?.options).toEqual(["low", "medium", "high"]);
});
});
describe("field widget content CRUD", () => {
it("should save and retrieve content with a widget field value", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "title",
label: "Title",
type: "string",
});
await registry.createField("posts", {
slug: "theme_color",
label: "Theme Color",
type: "string",
widget: "color:picker",
});
// Insert content with the widget field value
const { ulid } = await import("ulidx");
const id = ulid();
await db
.insertInto("ec_posts" as never)
.values({
id,
slug: "test-post",
status: "draft",
title: "Test Post",
theme_color: "#ff6600",
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
version: 1,
} as never)
.execute();
// Read it back
const row = await db
.selectFrom("ec_posts" as never)
.selectAll()
.where("id" as never, "=", id)
.executeTakeFirst();
expect(row).toBeTruthy();
expect((row as Record<string, unknown>).theme_color).toBe("#ff6600");
});
it("should save and retrieve json widget field value", async () => {
const registry = new SchemaRegistry(db);
await registry.createCollection({
slug: "posts",
label: "Posts",
labelSingular: "Post",
});
await registry.createField("posts", {
slug: "pricing",
label: "Pricing",
type: "json",
widget: "x402:pricing",
});
const { ulid } = await import("ulidx");
const id = ulid();
const pricingValue = JSON.stringify({ enabled: true, price: "$0.10", gateMode: "bots" });
await db
.insertInto("ec_posts" as never)
.values({
id,
slug: "premium-post",
status: "draft",
pricing: pricingValue,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString(),
version: 1,
} as never)
.execute();
const row = await db
.selectFrom("ec_posts" as never)
.selectAll()
.where("id" as never, "=", id)
.executeTakeFirst();
expect(row).toBeTruthy();
const pricing = JSON.parse((row as Record<string, unknown>).pricing as string);
expect(pricing.enabled).toBe(true);
expect(pricing.price).toBe("$0.10");
expect(pricing.gateMode).toBe("bots");
});
});

View File

@@ -0,0 +1,380 @@
import type { Kysely } from "kysely";
import { sql } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { PluginStorageRepository } from "../../../src/database/repositories/plugin-storage.js";
import type { Database } from "../../../src/database/types.js";
import {
createStorageIndexes,
removeOrphanedIndexes,
syncStorageIndexes,
removeAllPluginIndexes,
getPluginIndexStatus,
} from "../../../src/plugins/storage-indexes.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
const UNIQUE_CONSTRAINT_PATTERN = /UNIQUE constraint failed/;
describe("Plugin Storage Indexes Integration", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("createStorageIndexes", () => {
it("should create single-field index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
expect(result.created).toContain("idx_plugin_my-plugin_events_eventType");
expect(result.errors).toHaveLength(0);
});
it("should create composite index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", [
["status", "createdAt"],
]);
expect(result.created).toContain("idx_plugin_my-plugin_events_status_createdAt");
expect(result.errors).toHaveLength(0);
});
it("should create multiple indexes", async () => {
const result = await createStorageIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
["status", "timestamp"],
]);
expect(result.created).toHaveLength(3);
expect(result.errors).toHaveLength(0);
});
it("should track indexes in _plugin_indexes table", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(2);
expect(indexes.map((i) => JSON.parse(i.fields))).toContainEqual(["eventType"]);
expect(indexes.map((i) => JSON.parse(i.fields))).toContainEqual(["userId"]);
});
it("should be idempotent", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
const result = await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
// Should still succeed
expect(result.errors).toHaveLength(0);
// Should not duplicate tracking records
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(1);
});
});
describe("removeOrphanedIndexes", () => {
it("should remove indexes no longer in declaration", async () => {
// Create initial indexes
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId", "status"]);
// Remove one
const result = await removeOrphanedIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
]);
expect(result.removed).toContain("idx_plugin_my-plugin_events_status");
expect(result.errors).toHaveLength(0);
});
it("should keep indexes that are still declared", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
const result = await removeOrphanedIndexes(db, "my-plugin", "events", [
"eventType",
"userId",
]);
expect(result.removed).toHaveLength(0);
});
it("should update tracking table", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "status"]);
await removeOrphanedIndexes(db, "my-plugin", "events", ["eventType"]);
const indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(indexes).toHaveLength(1);
expect(JSON.parse(indexes[0].fields)).toEqual(["eventType"]);
});
});
describe("syncStorageIndexes", () => {
it("should create new and remove old indexes in one call", async () => {
// Initial state
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "oldField"]);
// Sync to new state
const result = await syncStorageIndexes(db, "my-plugin", "events", ["eventType", "newField"]);
expect(result.created).toContain("idx_plugin_my-plugin_events_newField");
expect(result.removed).toContain("idx_plugin_my-plugin_events_oldField");
const status = await getPluginIndexStatus(db, "my-plugin");
const fields = status.map((s) => s.fields);
expect(fields).toContainEqual(["eventType"]);
expect(fields).toContainEqual(["newField"]);
expect(fields).not.toContainEqual(["oldField"]);
});
});
describe("removeAllPluginIndexes", () => {
it("should remove all indexes for a plugin", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", "userId"]);
await createStorageIndexes(db, "my-plugin", "cache", ["key", "expiresAt"]);
const result = await removeAllPluginIndexes(db, "my-plugin");
expect(result.removed).toHaveLength(4);
expect(result.errors).toHaveLength(0);
const remaining = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "my-plugin")
.execute();
expect(remaining).toHaveLength(0);
});
it("should not affect other plugins", async () => {
await createStorageIndexes(db, "plugin1", "events", ["eventType"]);
await createStorageIndexes(db, "plugin2", "events", ["eventType"]);
await removeAllPluginIndexes(db, "plugin1");
const plugin2Indexes = await db
.selectFrom("_plugin_indexes")
.selectAll()
.where("plugin_id", "=", "plugin2")
.execute();
expect(plugin2Indexes).toHaveLength(1);
});
});
describe("getPluginIndexStatus", () => {
it("should return all indexes for a plugin", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType", ["status", "timestamp"]]);
await createStorageIndexes(db, "my-plugin", "cache", ["key"]);
const status = await getPluginIndexStatus(db, "my-plugin");
expect(status).toHaveLength(3);
expect(status).toContainEqual(
expect.objectContaining({
collection: "events",
fields: ["eventType"],
}),
);
expect(status).toContainEqual(
expect.objectContaining({
collection: "events",
fields: ["status", "timestamp"],
}),
);
expect(status).toContainEqual(
expect.objectContaining({
collection: "cache",
fields: ["key"],
}),
);
});
it("should return empty array for plugin with no indexes", async () => {
const status = await getPluginIndexStatus(db, "nonexistent-plugin");
expect(status).toEqual([]);
});
});
describe("query performance with indexes", () => {
it("should efficiently query using indexed fields", async () => {
const pluginId = "perf-test";
const collection = "events";
// Create index first
await createStorageIndexes(db, pluginId, collection, ["eventType"]);
// Create repository with the indexed field
const repo = new PluginStorageRepository<{ eventType: string }>(db, pluginId, collection, [
"eventType",
]);
// Insert test data
const items = Array.from({ length: 100 }, (_, i) => ({
id: `event-${i}`,
data: { eventType: i % 2 === 0 ? "pageview" : "click" },
}));
await repo.putMany(items);
// Query should work and use the index
const result = await repo.query({
where: { eventType: "pageview" },
});
expect(result.items).toHaveLength(50);
expect(result.items.every((i) => i.data.eventType === "pageview")).toBe(true);
});
});
describe("index verification", () => {
it("should create actual SQLite index", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
// Query SQLite's index list
const indexes = await sql<{ name: string }>`
SELECT name FROM sqlite_master
WHERE type = 'index'
AND name LIKE 'idx_plugin_%'
`.execute(db);
expect(indexes.rows.map((r) => r.name)).toContain("idx_plugin_my-plugin_events_eventType");
});
it("should drop actual SQLite index on removal", async () => {
await createStorageIndexes(db, "my-plugin", "events", ["eventType"]);
await removeAllPluginIndexes(db, "my-plugin");
const indexes = await sql<{ name: string }>`
SELECT name FROM sqlite_master
WHERE type = 'index'
AND name LIKE 'idx_plugin_my-plugin_%'
`.execute(db);
expect(indexes.rows).toHaveLength(0);
});
});
describe("unique indexes", () => {
it("should create a unique index", async () => {
const result = await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
expect(result.created).toContain("uidx_plugin_my-plugin_forms_slug");
expect(result.errors).toHaveLength(0);
// Verify it's actually a UNIQUE index in SQLite
const indexSql = await sql<{ sql: string }>`
SELECT sql FROM sqlite_master
WHERE type = 'index'
AND name = 'uidx_plugin_my-plugin_forms_slug'
`.execute(db);
expect(indexSql.rows).toHaveLength(1);
expect(indexSql.rows[0].sql).toContain("UNIQUE");
});
it("should enforce uniqueness on insert", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; name: string }>(
db,
"my-plugin",
"forms",
["slug"],
);
await repo.put("form-1", { slug: "contact", name: "Contact" });
// Second insert with a different ID but same slug should fail
await expect(repo.put("form-2", { slug: "contact", name: "Contact Copy" })).rejects.toThrow(
UNIQUE_CONSTRAINT_PATTERN,
);
});
it("should allow updating the same document", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; name: string }>(
db,
"my-plugin",
"forms",
["slug"],
);
await repo.put("form-1", { slug: "contact", name: "Contact" });
// Updating the same ID should succeed (upsert)
await repo.put("form-1", { slug: "contact", name: "Contact Updated" });
const result = await repo.get("form-1");
expect(result?.name).toBe("Contact Updated");
});
it("should allow different slugs across different collections", async () => {
await createStorageIndexes(db, "my-plugin", "forms", [], {
uniqueIndexes: ["slug"],
});
await createStorageIndexes(db, "my-plugin", "templates", [], {
uniqueIndexes: ["slug"],
});
const formsRepo = new PluginStorageRepository<{ slug: string }>(db, "my-plugin", "forms", [
"slug",
]);
const templatesRepo = new PluginStorageRepository<{ slug: string }>(
db,
"my-plugin",
"templates",
["slug"],
);
// Same slug in different collections should work (partial index scoped by collection)
await formsRepo.put("form-1", { slug: "contact" });
await templatesRepo.put("tmpl-1", { slug: "contact" });
expect(await formsRepo.get("form-1")).toEqual({ slug: "contact" });
expect(await templatesRepo.get("tmpl-1")).toEqual({ slug: "contact" });
});
it("should include unique index fields in queryable fields", async () => {
await createStorageIndexes(db, "my-plugin", "forms", ["status"], {
uniqueIndexes: ["slug"],
});
const repo = new PluginStorageRepository<{ slug: string; status: string }>(
db,
"my-plugin",
"forms",
["status", "slug"],
);
await repo.put("form-1", { slug: "contact", status: "active" });
await repo.put("form-2", { slug: "feedback", status: "active" });
// Query by unique field should work
const result = await repo.query({ where: { slug: "contact" } });
expect(result.items).toHaveLength(1);
expect(result.items[0].data.slug).toBe("contact");
});
});
});

View File

@@ -0,0 +1,293 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
PluginStorageRepository,
createPluginStorageAccessor,
deleteAllPluginStorage,
deletePluginCollection,
} from "../../../src/database/repositories/plugin-storage.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
interface AnalyticsEvent {
eventType: string;
userId: string;
timestamp: string;
metadata: Record<string, unknown>;
}
describe("Plugin Storage Integration", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("full storage flow", () => {
it("should support complete CRUD cycle", async () => {
const repo = new PluginStorageRepository<AnalyticsEvent>(db, "analytics-plugin", "events", [
"eventType",
"userId",
"timestamp",
]);
// Create
const event: AnalyticsEvent = {
eventType: "pageview",
userId: "user123",
timestamp: new Date().toISOString(),
metadata: { page: "/home", referrer: "google.com" },
};
await repo.put("event1", event);
// Read
const fetched = await repo.get("event1");
expect(fetched).toEqual(event);
// Update
const updatedEvent = {
...event,
metadata: { ...event.metadata, duration: 5000 },
};
await repo.put("event1", updatedEvent);
const refetched = await repo.get("event1");
expect(refetched?.metadata).toHaveProperty("duration", 5000);
// Delete
const deleted = await repo.delete("event1");
expect(deleted).toBe(true);
expect(await repo.get("event1")).toBeNull();
});
it("should support complex queries with JSON extraction", async () => {
const repo = new PluginStorageRepository<AnalyticsEvent>(db, "analytics-plugin", "events", [
"eventType",
"userId",
"timestamp",
]);
// Create events
await repo.putMany([
{
id: "e1",
data: {
eventType: "pageview",
userId: "user1",
timestamp: "2024-01-01T10:00:00Z",
metadata: {},
},
},
{
id: "e2",
data: {
eventType: "click",
userId: "user1",
timestamp: "2024-01-01T10:05:00Z",
metadata: {},
},
},
{
id: "e3",
data: {
eventType: "pageview",
userId: "user2",
timestamp: "2024-01-01T11:00:00Z",
metadata: {},
},
},
]);
// Query by eventType
const pageviews = await repo.query({ where: { eventType: "pageview" } });
expect(pageviews.items).toHaveLength(2);
// Query by userId
const user1Events = await repo.query({ where: { userId: "user1" } });
expect(user1Events.items).toHaveLength(2);
// Combined query
const user1Pageviews = await repo.query({
where: { eventType: "pageview", userId: "user1" },
});
expect(user1Pageviews.items).toHaveLength(1);
});
});
describe("createPluginStorageAccessor", () => {
it("should create accessor with multiple collections", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType", "timestamp"] },
cache: { indexes: ["key", "expiresAt"] },
});
expect(accessor).toHaveProperty("events");
expect(accessor).toHaveProperty("cache");
// Use events collection
await accessor.events.put("e1", {
eventType: "test",
timestamp: new Date().toISOString(),
});
const event = await accessor.events.get("e1");
expect(event).toBeDefined();
// Use cache collection
await accessor.cache.put("c1", {
key: "test-key",
value: "test-value",
expiresAt: new Date().toISOString(),
});
const cached = await accessor.cache.get("c1");
expect(cached).toBeDefined();
});
it("should isolate collections from each other", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
await accessor.events.put("item1", { eventType: "test" });
await accessor.cache.put("item1", { key: "test" });
// Both should exist independently
expect(await accessor.events.get("item1")).toEqual({ eventType: "test" });
expect(await accessor.cache.get("item1")).toEqual({ key: "test" });
// Count should be separate
expect(
await (accessor.events as PluginStorageRepository<any>).count({
eventType: "test",
}),
).toBe(1);
expect(
await (accessor.cache as PluginStorageRepository<any>).count({
key: "test",
}),
).toBe(1);
});
});
describe("deleteAllPluginStorage", () => {
it("should delete all data for a plugin", async () => {
const accessor = createPluginStorageAccessor(db, "cleanup-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
// Add data
await accessor.events.put("e1", { eventType: "test" });
await accessor.events.put("e2", { eventType: "test2" });
await accessor.cache.put("c1", { key: "test" });
// Delete all
const deleted = await deleteAllPluginStorage(db, "cleanup-plugin");
expect(deleted).toBe(3);
// Verify empty
expect(await accessor.events.get("e1")).toBeNull();
expect(await accessor.events.get("e2")).toBeNull();
expect(await accessor.cache.get("c1")).toBeNull();
});
it("should not affect other plugins", async () => {
const plugin1 = createPluginStorageAccessor(db, "plugin1", {
data: { indexes: ["key"] },
});
const plugin2 = createPluginStorageAccessor(db, "plugin2", {
data: { indexes: ["key"] },
});
await plugin1.data.put("item1", { key: "test" });
await plugin2.data.put("item1", { key: "test" });
await deleteAllPluginStorage(db, "plugin1");
expect(await plugin1.data.get("item1")).toBeNull();
expect(await plugin2.data.get("item1")).toEqual({ key: "test" });
});
});
describe("deletePluginCollection", () => {
it("should delete specific collection", async () => {
const accessor = createPluginStorageAccessor(db, "my-plugin", {
events: { indexes: ["eventType"] },
cache: { indexes: ["key"] },
});
await accessor.events.put("e1", { eventType: "test" });
await accessor.cache.put("c1", { key: "test" });
await deletePluginCollection(db, "my-plugin", "events");
expect(await accessor.events.get("e1")).toBeNull();
expect(await accessor.cache.get("c1")).toEqual({ key: "test" });
});
});
describe("pagination", () => {
it("should paginate through large datasets", async () => {
const repo = new PluginStorageRepository<{ index: number }>(
db,
"pagination-test",
"items",
[],
);
// Create 25 items
const items = Array.from({ length: 25 }, (_, i) => ({
id: `item-${String(i).padStart(3, "0")}`,
data: { index: i },
}));
await repo.putMany(items);
// Paginate with limit of 10
const pages: Array<Array<{ id: string; data: { index: number } }>> = [];
let cursor: string | undefined;
do {
const result = await repo.query({ limit: 10, cursor });
pages.push(result.items);
cursor = result.cursor;
} while (cursor);
expect(pages).toHaveLength(3);
expect(pages[0]).toHaveLength(10);
expect(pages[1]).toHaveLength(10);
expect(pages[2]).toHaveLength(5);
// Verify all items were retrieved
const allItems = pages.flat();
expect(allItems).toHaveLength(25);
expect(new Set(allItems.map((i) => i.id)).size).toBe(25);
});
});
describe("concurrent operations", () => {
it("should handle concurrent puts", async () => {
const repo = new PluginStorageRepository<{ value: number }>(
db,
"concurrent-test",
"items",
[],
);
// Concurrent puts
await Promise.all([
repo.put("item1", { value: 1 }),
repo.put("item2", { value: 2 }),
repo.put("item3", { value: 3 }),
repo.put("item4", { value: 4 }),
repo.put("item5", { value: 5 }),
]);
const count = await repo.count();
expect(count).toBe(5);
});
});
});

View File

@@ -0,0 +1,515 @@
import type { Kysely } from "kysely";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import { RedirectRepository } from "../../../src/database/repositories/redirect.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../../utils/test-db.js";
describe("RedirectRepository", () => {
let db: Kysely<Database>;
let repo: RedirectRepository;
beforeEach(async () => {
db = await setupTestDatabase();
repo = new RedirectRepository(db);
});
afterEach(async () => {
await teardownTestDatabase(db);
});
// --- CRUD ---------------------------------------------------------------
describe("create", () => {
it("creates a redirect with defaults", async () => {
const redirect = await repo.create({
source: "/old",
destination: "/new",
});
expect(redirect.source).toBe("/old");
expect(redirect.destination).toBe("/new");
expect(redirect.type).toBe(301);
expect(redirect.isPattern).toBe(false);
expect(redirect.enabled).toBe(true);
expect(redirect.hits).toBe(0);
expect(redirect.lastHitAt).toBeNull();
expect(redirect.auto).toBe(false);
expect(redirect.id).toBeTruthy();
});
it("creates a redirect with custom values", async () => {
const redirect = await repo.create({
source: "/temp",
destination: "/target",
type: 302,
enabled: false,
groupName: "Temporary",
auto: true,
});
expect(redirect.type).toBe(302);
expect(redirect.enabled).toBe(false);
expect(redirect.groupName).toBe("Temporary");
expect(redirect.auto).toBe(true);
});
it("auto-detects pattern sources", async () => {
const redirect = await repo.create({
source: "/old-blog/[...path]",
destination: "/blog/[...path]",
});
expect(redirect.isPattern).toBe(true);
});
it("respects explicit isPattern=false override", async () => {
const redirect = await repo.create({
source: "/literal-with-brackets",
destination: "/target",
isPattern: false,
});
expect(redirect.isPattern).toBe(false);
});
});
describe("findById", () => {
it("returns null for non-existent id", async () => {
expect(await repo.findById("nonexistent")).toBeNull();
});
it("finds a redirect by id", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
const found = await repo.findById(created.id);
expect(found?.source).toBe("/a");
});
});
describe("findBySource", () => {
it("returns null for non-existent source", async () => {
expect(await repo.findBySource("/nope")).toBeNull();
});
it("finds a redirect by source", async () => {
await repo.create({ source: "/old", destination: "/new" });
const found = await repo.findBySource("/old");
expect(found?.destination).toBe("/new");
});
});
describe("update", () => {
it("returns null for non-existent id", async () => {
expect(await repo.update("nonexistent", { destination: "/x" })).toBeNull();
});
it("updates destination", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
const updated = await repo.update(created.id, { destination: "/c" });
expect(updated?.destination).toBe("/c");
});
it("updates type and enabled", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
type: 301,
});
const updated = await repo.update(created.id, {
type: 302,
enabled: false,
});
expect(updated?.type).toBe(302);
expect(updated?.enabled).toBe(false);
});
it("auto-detects isPattern when source changes", async () => {
const created = await repo.create({
source: "/literal",
destination: "/target",
});
expect(created.isPattern).toBe(false);
const updated = await repo.update(created.id, {
source: "/[slug]",
});
expect(updated?.isPattern).toBe(true);
});
});
describe("delete", () => {
it("returns false for non-existent id", async () => {
expect(await repo.delete("nonexistent")).toBe(false);
});
it("deletes and returns true", async () => {
const created = await repo.create({
source: "/a",
destination: "/b",
});
expect(await repo.delete(created.id)).toBe(true);
expect(await repo.findById(created.id)).toBeNull();
});
});
describe("findMany", () => {
it("returns empty list when no redirects", async () => {
const result = await repo.findMany({});
expect(result.items).toEqual([]);
expect(result.nextCursor).toBeUndefined();
});
it("returns all redirects", async () => {
await repo.create({ source: "/a", destination: "/b" });
await repo.create({ source: "/c", destination: "/d" });
const result = await repo.findMany({});
expect(result.items).toHaveLength(2);
});
it("paginates with cursor", async () => {
for (let i = 0; i < 5; i++) {
await repo.create({ source: `/s${i}`, destination: `/d${i}` });
}
const page1 = await repo.findMany({ limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.findMany({ limit: 2, cursor: page1.nextCursor });
expect(page2.items).toHaveLength(2);
expect(page2.nextCursor).toBeTruthy();
// Ensure no overlap
const page1Ids = new Set(page1.items.map((r) => r.id));
for (const item of page2.items) {
expect(page1Ids.has(item.id)).toBe(false);
}
});
it("filters by search term", async () => {
await repo.create({ source: "/blog/hello", destination: "/new/hello" });
await repo.create({ source: "/about", destination: "/info" });
const result = await repo.findMany({ search: "blog" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.source).toBe("/blog/hello");
});
it("filters by enabled status", async () => {
await repo.create({ source: "/a", destination: "/b", enabled: true });
await repo.create({ source: "/c", destination: "/d", enabled: false });
const enabled = await repo.findMany({ enabled: true });
expect(enabled.items).toHaveLength(1);
expect(enabled.items[0]!.source).toBe("/a");
const disabled = await repo.findMany({ enabled: false });
expect(disabled.items).toHaveLength(1);
expect(disabled.items[0]!.source).toBe("/c");
});
it("filters by group", async () => {
await repo.create({
source: "/a",
destination: "/b",
groupName: "wp-import",
});
await repo.create({ source: "/c", destination: "/d" });
const result = await repo.findMany({ group: "wp-import" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.groupName).toBe("wp-import");
});
it("filters by auto flag", async () => {
await repo.create({ source: "/a", destination: "/b", auto: true });
await repo.create({ source: "/c", destination: "/d", auto: false });
const autoOnly = await repo.findMany({ auto: true });
expect(autoOnly.items).toHaveLength(1);
expect(autoOnly.items[0]!.auto).toBe(true);
});
it("clamps limit to 1-100", async () => {
for (let i = 0; i < 3; i++) {
await repo.create({ source: `/s${i}`, destination: `/d${i}` });
}
// limit=0 should clamp to 1
const min = await repo.findMany({ limit: 0 });
expect(min.items.length).toBeLessThanOrEqual(1);
// limit=200 should clamp to 100
const max = await repo.findMany({ limit: 200 });
expect(max.items).toHaveLength(3); // only 3 exist
});
});
// --- Matching -----------------------------------------------------------
describe("matchPath", () => {
it("returns null when no redirects exist", async () => {
expect(await repo.matchPath("/anything")).toBeNull();
});
it("matches exact paths", async () => {
await repo.create({ source: "/old", destination: "/new" });
const match = await repo.matchPath("/old");
expect(match).not.toBeNull();
expect(match!.resolvedDestination).toBe("/new");
});
it("does not match disabled redirects", async () => {
await repo.create({
source: "/old",
destination: "/new",
enabled: false,
});
expect(await repo.matchPath("/old")).toBeNull();
});
it("matches pattern redirects", async () => {
await repo.create({
source: "/old-blog/[...path]",
destination: "/blog/[...path]",
});
const match = await repo.matchPath("/old-blog/2024/01/post");
expect(match).not.toBeNull();
expect(match!.resolvedDestination).toBe("/blog/2024/01/post");
});
it("prefers exact match over pattern match", async () => {
await repo.create({
source: "/blog/[slug]",
destination: "/articles/[slug]",
});
await repo.create({
source: "/blog/special",
destination: "/special-page",
});
const match = await repo.matchPath("/blog/special");
expect(match!.resolvedDestination).toBe("/special-page");
});
it("matches [param] in single segment", async () => {
await repo.create({
source: "/category/[slug]",
destination: "/tags/[slug]",
});
const match = await repo.matchPath("/category/typescript");
expect(match!.resolvedDestination).toBe("/tags/typescript");
// Should not match multi-segment
expect(await repo.matchPath("/category/a/b")).toBeNull();
});
});
// --- Hit tracking -------------------------------------------------------
describe("recordHit", () => {
it("increments hit count and updates lastHitAt", async () => {
const redirect = await repo.create({
source: "/a",
destination: "/b",
});
expect(redirect.hits).toBe(0);
expect(redirect.lastHitAt).toBeNull();
await repo.recordHit(redirect.id);
const updated = await repo.findById(redirect.id);
expect(updated!.hits).toBe(1);
expect(updated!.lastHitAt).toBeTruthy();
await repo.recordHit(redirect.id);
const again = await repo.findById(redirect.id);
expect(again!.hits).toBe(2);
});
});
// --- Auto-redirects -----------------------------------------------------
describe("createAutoRedirect", () => {
it("creates a redirect for slug change with url pattern", async () => {
const redirect = await repo.createAutoRedirect(
"posts",
"old-title",
"new-title",
"id1",
"/blog/{slug}",
);
expect(redirect.source).toBe("/blog/old-title");
expect(redirect.destination).toBe("/blog/new-title");
expect(redirect.auto).toBe(true);
expect(redirect.groupName).toBe("Auto: slug change");
expect(redirect.type).toBe(301);
});
it("uses fallback URL when no url pattern", async () => {
const redirect = await repo.createAutoRedirect("posts", "old-slug", "new-slug", "id1", null);
expect(redirect.source).toBe("/posts/old-slug");
expect(redirect.destination).toBe("/posts/new-slug");
});
it("collapses existing chains", async () => {
// First rename: A -> B
await repo.createAutoRedirect("posts", "title-a", "title-b", "id1", "/blog/{slug}");
// Second rename: B -> C (should update A's destination to C)
await repo.createAutoRedirect("posts", "title-b", "title-c", "id1", "/blog/{slug}");
// Check that the A -> B redirect now points to C
const aRedirect = await repo.findBySource("/blog/title-a");
expect(aRedirect!.destination).toBe("/blog/title-c");
// And B -> C also exists
const bRedirect = await repo.findBySource("/blog/title-b");
expect(bRedirect!.destination).toBe("/blog/title-c");
});
it("updates existing redirect from same source instead of duplicating", async () => {
// Create A -> B
await repo.createAutoRedirect("posts", "a", "b", "id1", "/blog/{slug}");
// Create A -> C (same source /blog/a, different dest)
// This calls collapseChains first, which doesn't touch /blog/a since
// nothing points to /blog/a as destination.
// Then it finds existing source=/blog/a and updates its destination.
await repo.createAutoRedirect("posts", "a", "c", "id1", "/blog/{slug}");
const all = await repo.findMany({});
// Should only have one redirect from /blog/a
const fromA = all.items.filter((r) => r.source === "/blog/a");
expect(fromA).toHaveLength(1);
expect(fromA[0]!.destination).toBe("/blog/c");
});
});
// --- 404 log ------------------------------------------------------------
describe("log404", () => {
it("logs a 404 entry", async () => {
await repo.log404({ path: "/missing" });
const result = await repo.find404s({});
expect(result.items).toHaveLength(1);
expect(result.items[0]!.path).toBe("/missing");
});
it("logs with metadata", async () => {
await repo.log404({
path: "/missing",
referrer: "https://google.com",
userAgent: "Mozilla/5.0",
ip: "1.2.3.4",
});
const result = await repo.find404s({});
const entry = result.items[0]!;
expect(entry.referrer).toBe("https://google.com");
expect(entry.userAgent).toBe("Mozilla/5.0");
expect(entry.ip).toBe("1.2.3.4");
});
});
describe("find404s", () => {
it("filters by search", async () => {
await repo.log404({ path: "/missing-blog-post" });
await repo.log404({ path: "/about-us" });
const result = await repo.find404s({ search: "blog" });
expect(result.items).toHaveLength(1);
expect(result.items[0]!.path).toBe("/missing-blog-post");
});
it("paginates", async () => {
for (let i = 0; i < 5; i++) {
await repo.log404({ path: `/missing-${i}` });
}
const page1 = await repo.find404s({ limit: 2 });
expect(page1.items).toHaveLength(2);
expect(page1.nextCursor).toBeTruthy();
const page2 = await repo.find404s({ limit: 2, cursor: page1.nextCursor });
expect(page2.items).toHaveLength(2);
});
});
describe("get404Summary", () => {
it("groups by path and counts", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/a" });
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const summary = await repo.get404Summary();
expect(summary).toHaveLength(2);
// Ordered by count desc
expect(summary[0]!.path).toBe("/a");
expect(summary[0]!.count).toBe(3);
expect(summary[1]!.path).toBe("/b");
expect(summary[1]!.count).toBe(1);
});
it("includes top referrer", async () => {
await repo.log404({ path: "/x", referrer: "https://google.com" });
await repo.log404({ path: "/x", referrer: "https://google.com" });
await repo.log404({ path: "/x", referrer: "https://bing.com" });
const summary = await repo.get404Summary();
expect(summary[0]!.topReferrer).toBe("https://google.com");
});
});
describe("delete404", () => {
it("deletes a single 404 entry", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const all = await repo.find404s({});
expect(all.items).toHaveLength(2);
await repo.delete404(all.items[0]!.id);
const remaining = await repo.find404s({});
expect(remaining.items).toHaveLength(1);
});
});
describe("clear404s", () => {
it("removes all 404 entries", async () => {
await repo.log404({ path: "/a" });
await repo.log404({ path: "/b" });
const count = await repo.clear404s();
expect(count).toBe(2);
const result = await repo.find404s({});
expect(result.items).toHaveLength(0);
});
});
describe("prune404s", () => {
it("removes entries older than cutoff", async () => {
await repo.log404({ path: "/old" });
// All entries were just created, so pruning with a future date should clear them
const count = await repo.prune404s("2099-01-01T00:00:00.000Z");
expect(count).toBe(1);
});
it("keeps entries newer than cutoff", async () => {
await repo.log404({ path: "/new" });
const count = await repo.prune404s("2000-01-01T00:00:00.000Z");
expect(count).toBe(0);
const result = await repo.find404s({});
expect(result.items).toHaveLength(1);
});
});
});

View File

@@ -0,0 +1,579 @@
/**
* Tests for seed --on-conflict modes: skip, update, error
*
* Verifies that applySeed() correctly handles conflicts when records
* already exist in the database.
*/
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Database } from "../../src/database/types.js";
import { applySeed } from "../../src/seed/apply.js";
import type { SeedFile } from "../../src/seed/types.js";
import { setupTestDatabase, teardownTestDatabase } from "../utils/test-db.js";
/**
* Minimal seed file with one collection, one byline, one redirect, and one section
*/
function createTestSeed(overrides?: Partial<SeedFile>): SeedFile {
return {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
labelSingular: "Post",
fields: [
{ slug: "title", label: "Title", type: "string" },
{ slug: "body", label: "Body", type: "text" },
],
},
],
bylines: [
{
id: "byline-1",
slug: "jane-doe",
displayName: "Jane Doe",
bio: "Original bio",
},
],
redirects: [
{
source: "/old-page",
destination: "/new-page",
type: 301,
},
],
sections: [
{
slug: "hero",
title: "Hero Section",
description: "Original description",
content: [{ _type: "block", _key: "1" }],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello World", body: "Original body" },
},
],
},
...overrides,
};
}
/**
* Seed file with updated values for all entities
*/
function createUpdatedSeed(): SeedFile {
return {
version: "1",
collections: [
{
slug: "posts",
label: "Blog Posts",
labelSingular: "Blog Post",
fields: [
{ slug: "title", label: "Post Title", type: "string" },
{ slug: "body", label: "Post Body", type: "text" },
],
},
],
bylines: [
{
id: "byline-1",
slug: "jane-doe",
displayName: "Jane Smith",
bio: "Updated bio",
},
],
redirects: [
{
source: "/old-page",
destination: "/newer-page",
type: 302,
},
],
sections: [
{
slug: "hero",
title: "Updated Hero",
description: "Updated description",
content: [{ _type: "block", _key: "2" }],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello World Updated", body: "Updated body" },
},
],
},
};
}
describe("applySeed onConflict modes", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabase();
});
afterEach(async () => {
await teardownTestDatabase(db);
});
describe("onConflict: skip (default)", () => {
it("skips existing collections", async () => {
const seed = createTestSeed();
// First apply
await applySeed(db, seed, { includeContent: true });
// Second apply with default (skip)
const result = await applySeed(db, seed, { includeContent: true });
expect(result.collections.created).toBe(0);
expect(result.collections.skipped).toBe(1);
expect(result.collections.updated).toBe(0);
});
it("skips existing bylines", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const result = await applySeed(db, seed, { includeContent: true });
expect(result.bylines.created).toBe(0);
expect(result.bylines.skipped).toBe(1);
expect(result.bylines.updated).toBe(0);
});
it("skips existing redirects", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const result = await applySeed(db, seed);
expect(result.redirects.created).toBe(0);
expect(result.redirects.skipped).toBe(1);
expect(result.redirects.updated).toBe(0);
});
it("skips existing sections", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const result = await applySeed(db, seed);
expect(result.sections.created).toBe(0);
expect(result.sections.skipped).toBe(1);
expect(result.sections.updated).toBe(0);
});
it("skips existing content", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const result = await applySeed(db, seed, { includeContent: true });
expect(result.content.created).toBe(0);
expect(result.content.skipped).toBe(1);
expect(result.content.updated).toBe(0);
});
it("defaults to skip when onConflict is not specified", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
// No onConflict specified -- should default to skip
const result = await applySeed(db, seed, { includeContent: true });
expect(result.collections.skipped).toBe(1);
expect(result.collections.created).toBe(0);
expect(result.collections.updated).toBe(0);
});
});
describe("onConflict: update", () => {
it("updates existing collections and fields", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.collections.updated).toBe(1);
expect(result.collections.created).toBe(0);
expect(result.fields.updated).toBe(2);
// Verify the collection was actually updated
const row = await db
.selectFrom("_emdash_collections")
.selectAll()
.where("slug", "=", "posts")
.executeTakeFirst();
expect(row?.label).toBe("Blog Posts");
expect(row?.label_singular).toBe("Blog Post");
});
it("updates existing bylines", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.bylines.updated).toBe(1);
expect(result.bylines.created).toBe(0);
// Verify the byline was actually updated
const row = await db
.selectFrom("_emdash_bylines")
.selectAll()
.where("slug", "=", "jane-doe")
.executeTakeFirst();
expect(row?.display_name).toBe("Jane Smith");
expect(row?.bio).toBe("Updated bio");
});
it("updates existing redirects", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
onConflict: "update",
});
expect(result.redirects.updated).toBe(1);
expect(result.redirects.created).toBe(0);
// Verify the redirect was actually updated
const row = await db
.selectFrom("_emdash_redirects")
.selectAll()
.where("source", "=", "/old-page")
.executeTakeFirst();
expect(row?.destination).toBe("/newer-page");
expect(row?.type).toBe(302);
});
it("updates existing sections", async () => {
const seed = createTestSeed();
await applySeed(db, seed);
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
onConflict: "update",
});
expect(result.sections.updated).toBe(1);
expect(result.sections.created).toBe(0);
// Verify the section was actually updated
const row = await db
.selectFrom("_emdash_sections")
.selectAll()
.where("slug", "=", "hero")
.executeTakeFirst();
expect(row?.title).toBe("Updated Hero");
expect(row?.description).toBe("Updated description");
});
it("updates existing content", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
const updatedSeed = createUpdatedSeed();
const result = await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.content.updated).toBe(1);
expect(result.content.created).toBe(0);
// Verify the content was actually updated
const row = await db
.selectFrom("ec_posts" as any)
.selectAll()
.where("slug", "=", "hello-world")
.executeTakeFirstOrThrow();
expect((row as Record<string, unknown>).title).toBe("Hello World Updated");
expect((row as Record<string, unknown>).body).toBe("Updated body");
});
});
describe("onConflict: error", () => {
it("throws on existing collection", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
await expect(
applySeed(db, seed, {
includeContent: true,
onConflict: "error",
}),
).rejects.toThrow('Conflict: collection "posts" already exists');
});
it("throws on existing byline", async () => {
// Seed without collections to get past collections step
const seed = createTestSeed({ collections: [] });
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: byline "jane-doe" already exists',
);
});
it("throws on existing redirect", async () => {
const seed = createTestSeed({
collections: [],
bylines: [],
sections: [],
});
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: redirect "/old-page" already exists',
);
});
it("throws on existing section", async () => {
const seed = createTestSeed({
collections: [],
bylines: [],
redirects: [],
});
await applySeed(db, seed);
await expect(applySeed(db, seed, { onConflict: "error" })).rejects.toThrow(
'Conflict: section "hero" already exists',
);
});
it("throws on existing content", async () => {
// First apply creates collections and content
const seed = createTestSeed({
bylines: [],
redirects: [],
sections: [],
});
await applySeed(db, seed, { includeContent: true });
// Second apply with only content (collections already exist, skip them)
const contentOnlySeed = createTestSeed({
collections: [],
bylines: [],
redirects: [],
sections: [],
});
await expect(
applySeed(db, contentOnlySeed, {
includeContent: true,
onConflict: "error",
}),
).rejects.toThrow('Conflict: content "hello-world" in "posts" already exists');
});
});
describe("mixed scenarios", () => {
it("creates new records alongside existing ones in update mode", async () => {
const seed = createTestSeed();
await applySeed(db, seed, { includeContent: true });
// Add a new content entry to the seed
const extendedSeed = createUpdatedSeed();
const posts = extendedSeed.content!["posts"];
if (!posts) throw new Error("posts missing from seed");
posts.push({
id: "post-2",
slug: "second-post",
status: "published",
data: { title: "Second Post", body: "New content" },
});
const result = await applySeed(db, extendedSeed, {
includeContent: true,
onConflict: "update",
});
expect(result.content.updated).toBe(1);
expect(result.content.created).toBe(1);
});
it("clears taxonomy assignments on content update when seed removes them", async () => {
// Seed with a taxonomy and content that has taxonomy assignments
const seed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
taxonomies: [
{
name: "categories",
label: "Categories",
hierarchical: false,
collections: ["posts"],
terms: [
{ slug: "news", label: "News" },
{ slug: "tech", label: "Tech" },
],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello" },
taxonomies: { categories: ["news", "tech"] },
},
],
},
};
await applySeed(db, seed, { includeContent: true });
// Verify both terms are attached
const beforeRows = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.execute();
expect(beforeRows).toHaveLength(2);
// Re-apply with only one taxonomy term
const updatedSeed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
taxonomies: [
{
name: "categories",
label: "Categories",
hierarchical: false,
collections: ["posts"],
terms: [
{ slug: "news", label: "News" },
{ slug: "tech", label: "Tech" },
],
},
],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello Updated" },
taxonomies: { categories: ["tech"] },
},
],
},
};
await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
// Should only have "tech" now, not both
const afterRows = await db
.selectFrom("content_taxonomies")
.selectAll()
.where("collection", "=", "posts")
.execute();
expect(afterRows).toHaveLength(1);
});
it("clears byline assignments on content update when seed removes them", async () => {
const seed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
bylines: [{ id: "byline-1", slug: "jane-doe", displayName: "Jane Doe" }],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello" },
bylines: [{ byline: "byline-1" }],
},
],
},
};
await applySeed(db, seed, { includeContent: true });
// Verify byline is attached
const beforeRows = await db
.selectFrom("_emdash_content_bylines")
.selectAll()
.where("collection_slug", "=", "posts")
.execute();
expect(beforeRows).toHaveLength(1);
// Re-apply without bylines on the content entry
const updatedSeed: SeedFile = {
version: "1",
collections: [
{
slug: "posts",
label: "Posts",
fields: [{ slug: "title", label: "Title", type: "string" }],
},
],
bylines: [{ id: "byline-1", slug: "jane-doe", displayName: "Jane Doe" }],
content: {
posts: [
{
id: "post-1",
slug: "hello-world",
status: "published",
data: { title: "Hello Updated" },
// No bylines -- should clear existing
},
],
},
};
await applySeed(db, updatedSeed, {
includeContent: true,
onConflict: "update",
});
// Should have no bylines now
const afterRows = await db
.selectFrom("_emdash_content_bylines")
.selectAll()
.where("collection_slug", "=", "posts")
.execute();
expect(afterRows).toHaveLength(0);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,374 @@
/**
* Integration test server helper.
*
* Bootstraps an isolated Astro dev server from a minimal fixture,
* runs setup, seeds test data, and creates auth tokens. Each test
* suite gets a fresh database and server process.
*
* Usage:
*
* const ctx = await createTestServer({ port: 4399 });
* // ctx.client — EmDashClient (devBypass auth)
* // ctx.token — PAT bearer token for CLI tests
* // ctx.baseUrl — http://localhost:4399
* // ctx.cwd — working directory of the running server
* await ctx.cleanup();
*/
import { execFile, spawn } from "node:child_process";
import { existsSync, mkdtempSync, rmSync, symlinkSync, unlinkSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { EmDashClient } from "../../src/client/index.js";
const execAsync = promisify(execFile);
// Test regex patterns
const SESSION_COOKIE_REGEX = /^([^;]+)/;
// ---------------------------------------------------------------------------
// Paths
// ---------------------------------------------------------------------------
const FIXTURE_DIR = resolve(import.meta.dirname, "fixture");
// Borrow node_modules from demos/simple — it has all the deps we need
// and is maintained by pnpm workspace resolution.
const DONOR_NODE_MODULES = resolve(import.meta.dirname, "../../../../demos/simple/node_modules");
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface TestServerOptions {
port: number;
/** Server startup timeout in ms (default: 30_000) */
timeout?: number;
/** Seed test data after setup (default: true) */
seed?: boolean;
}
export interface TestServerContext {
/** Base URL of the running server */
baseUrl: string;
/** Working directory containing the fixture */
cwd: string;
/** EmDashClient authenticated via dev-bypass session */
client: EmDashClient;
/** PAT bearer token with full scopes (for CLI / raw fetch tests) */
token: string;
/** Seeded collection slugs */
collections: string[];
/** Seeded content IDs keyed by collection */
contentIds: Record<string, string[]>;
/** Session cookie string for raw fetch calls needing session auth */
sessionCookie: string;
/** Stop the server and remove the temp directory */
cleanup: () => Promise<void>;
}
// ---------------------------------------------------------------------------
// Node.js version guard
// ---------------------------------------------------------------------------
/**
* Astro requires Node.js >= 22.12.0. Call from a `beforeAll` to fail the
* suite immediately when the environment is misconfigured rather than
* silently skipping.
*/
export function assertNodeVersion(): void {
const [major, minor] = process.versions.node.split(".").map(Number) as [number, number];
const ok = major! > 22 || (major === 22 && minor! >= 12);
if (!ok) {
throw new Error(
`Integration tests require Node.js >= 22.12.0 (running ${process.versions.node}). ` +
`Update your Node version instead of skipping tests.`,
);
}
}
// ---------------------------------------------------------------------------
// Build guard
// ---------------------------------------------------------------------------
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../..");
const CLI_BINARY = resolve(import.meta.dirname, "../../dist/cli/index.mjs");
let buildPromise: Promise<void> | null = null;
/**
* Ensure the workspace is built before starting integration tests.
* Runs `pnpm build` once (cached across test suites via module-level promise).
* Skips if the CLI binary already exists.
*/
export function ensureBuilt(): Promise<void> {
if (!buildPromise) {
buildPromise = doBuild();
}
return buildPromise;
}
async function doBuild(): Promise<void> {
if (existsSync(CLI_BINARY)) return;
console.log("[integration] Built artifacts missing — running pnpm build...");
await execAsync("pnpm", ["build"], {
cwd: WORKSPACE_ROOT,
timeout: 120_000,
});
console.log("[integration] Build complete.");
}
// ---------------------------------------------------------------------------
// Server lifecycle
// ---------------------------------------------------------------------------
async function waitForServer(url: string, timeoutMs: number): Promise<void> {
const start = Date.now();
while (Date.now() - start < timeoutMs) {
try {
const res = await fetch(url, { signal: AbortSignal.timeout(2000) });
// Any HTTP response (even 500) means the server is up.
// We only keep waiting on connection errors (caught below).
if (res.status > 0) return;
} catch {
// Server not ready yet — connection refused / timeout
}
await new Promise((r) => setTimeout(r, 500));
}
throw new Error(`Server at ${url} did not start within ${timeoutMs}ms`);
}
/**
* Create an Astro dev server for integration testing.
*
* Runs the fixture in-place to avoid Astro virtual module resolution
* issues with symlinked temp dirs. Uses a temp directory only for the
* database file — source files stay at their real paths.
*/
export async function createTestServer(options: TestServerOptions): Promise<TestServerContext> {
const { port, timeout = 60_000, seed = true } = options;
const baseUrl = `http://localhost:${port}`;
// --- 0. Ensure workspace is built ---
await ensureBuilt();
// --- 1. Run fixture in-place, temp dir only for DB ---
const workDir = FIXTURE_DIR;
const tempDataDir = mkdtempSync(join(tmpdir(), "emdash-integration-"));
const dbPath = join(tempDataDir, "test.db");
// Ensure node_modules symlink exists in the fixture dir.
// Multiple test suites may race to create this — handle EEXIST gracefully.
const fixtureNodeModules = join(FIXTURE_DIR, "node_modules");
let createdSymlink = false;
if (!existsSync(fixtureNodeModules)) {
try {
symlinkSync(DONOR_NODE_MODULES, fixtureNodeModules);
createdSymlink = true;
} catch (err: unknown) {
if ((err as NodeJS.ErrnoException).code !== "EEXIST") throw err;
}
}
// --- 2. Start dev server ---
const astroBin = join(fixtureNodeModules, ".bin", "astro");
const server = spawn(astroBin, ["dev", "--port", String(port)], {
cwd: workDir,
env: {
...process.env,
EMDASH_TEST_DB: `file:${dbPath}`,
},
stdio: "pipe",
});
// Always capture server output. Forward to stderr when DEBUG is set,
// and always keep a ring buffer of the last 5 KB for error reporting.
let serverOutput = "";
const MAX_OUTPUT = 5000;
function appendOutput(chunk: string): void {
if (process.env.DEBUG) process.stderr.write(`[integration:${port}] ${chunk}`);
serverOutput += chunk;
if (serverOutput.length > MAX_OUTPUT * 2) {
serverOutput = serverOutput.slice(-MAX_OUTPUT);
}
}
server.stdout?.on("data", (data: Buffer) => appendOutput(data.toString()));
server.stderr?.on("data", (data: Buffer) => appendOutput(data.toString()));
// Track for cleanup
let stopped = false;
async function cleanup(): Promise<void> {
if (stopped) return;
stopped = true;
server.kill("SIGTERM");
await new Promise((r) => setTimeout(r, 1000));
// Force kill if still alive
if (!server.killed) {
server.kill("SIGKILL");
await new Promise((r) => setTimeout(r, 500));
}
// Remove temp data directory
rmSync(tempDataDir, { recursive: true, force: true });
// Remove symlink if we created it
if (createdSymlink && existsSync(fixtureNodeModules)) {
try {
unlinkSync(fixtureNodeModules);
} catch {}
}
}
try {
// --- 3. Wait for server to be ready ---
await waitForServer(`${baseUrl}/_emdash/api/setup/dev-bypass`, timeout);
// --- 4. Run setup + create PAT in one request ---
// The ?token query param tells the dev-bypass endpoint to also
// create a PAT with full scopes and return it in the response.
const setupRes = await fetch(`${baseUrl}/_emdash/api/setup/dev-bypass?token=1`);
if (!setupRes.ok) {
const body = await setupRes.text().catch(() => "");
throw new Error(`Setup bypass failed (${setupRes.status}): ${body}`);
}
const setupJson = (await setupRes.json()) as {
data: { user: { id: string; email: string }; token?: string };
};
const setupData = setupJson.data;
const token = setupData.token;
if (!token) {
throw new Error("Setup bypass did not return a PAT token");
}
// Extract session cookie for raw fetch calls that need session auth
const setCookie = setupRes.headers.get("set-cookie");
let sessionCookie = "";
if (setCookie) {
const match = setCookie.match(SESSION_COOKIE_REGEX);
if (match) sessionCookie = match[1]!;
}
// --- 5. Create client authenticated via PAT ---
const client = new EmDashClient({
baseUrl,
token,
});
// --- 8. Seed test data ---
const collections: string[] = [];
const contentIds: Record<string, string[]> = {};
if (seed) {
await seedTestData(client, collections, contentIds);
}
return {
baseUrl,
cwd: workDir,
client,
token,
collections,
contentIds,
sessionCookie,
cleanup,
};
} catch (error) {
// Include server output in error for CI debugging
const msg = error instanceof Error ? error.message : String(error);
await cleanup();
throw new Error(
`${msg}\n\nServer output (last ${MAX_OUTPUT} chars):\n${serverOutput.slice(-MAX_OUTPUT)}`,
{
cause: error,
},
);
}
}
// ---------------------------------------------------------------------------
// Seed data
// ---------------------------------------------------------------------------
/**
* Seeds sample content into the test server.
*
* Collections and fields are created by the seed file
* (fixture/.emdash/seed.json) during dev-bypass setup.
* This function only creates content entries.
*
* Content:
* - posts: 3 items (2 published, 1 draft)
* - pages: 2 items (1 published, 1 draft)
*/
async function seedTestData(
client: EmDashClient,
collections: string[],
contentIds: Record<string, string[]>,
): Promise<void> {
collections.push("posts");
collections.push("pages");
const postIds: string[] = [];
const post1 = await client.create("posts", {
data: {
title: "First Post",
body: "Hello **world**. This is the first post.",
excerpt: "The very first post",
},
slug: "first-post",
});
postIds.push(post1.id);
await client.publish("posts", post1.id);
const post2 = await client.create("posts", {
data: {
title: "Second Post",
body: "A second post with a [link](https://example.com).",
excerpt: "Another post",
},
slug: "second-post",
});
postIds.push(post2.id);
await client.publish("posts", post2.id);
const post3 = await client.create("posts", {
data: {
title: "Draft Post",
body: "This post is still a draft.",
excerpt: "Not published yet",
},
slug: "draft-post",
});
postIds.push(post3.id);
contentIds["posts"] = postIds;
const pageIds: string[] = [];
const page1 = await client.create("pages", {
data: {
title: "About",
body: "# About Us\n\nWe are a **test** fixture.",
},
slug: "about",
});
pageIds.push(page1.id);
await client.publish("pages", page1.id);
const page2 = await client.create("pages", {
data: {
title: "Contact",
body: "Get in touch.",
},
slug: "contact",
});
pageIds.push(page2.id);
contentIds["pages"] = pageIds;
}

View File

@@ -0,0 +1,259 @@
import { execFile, spawn } from "node:child_process";
import { resolve } from "node:path";
import { promisify } from "node:util";
import { describe, expect, it } from "vitest";
import { ensureBuilt } from "../server.js";
interface RuntimeSiteCase {
name: string;
dir: string;
port: number;
mode: "runtime";
startupTimeoutMs: number;
waitPath?: string;
setupPath?: string | null;
frontendPath?: string;
frontendStatuses?: number[];
requireDoctype?: boolean;
}
interface TypecheckSiteCase {
name: string;
dir: string;
mode: "typecheck";
}
type SiteCase = RuntimeSiteCase | TypecheckSiteCase;
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const execAsync = promisify(execFile);
const SITE_MATRIX: SiteCase[] = [
// Demos
{
name: "demos/simple",
dir: resolve(WORKSPACE_ROOT, "demos/simple"),
port: 4601,
mode: "runtime",
startupTimeoutMs: 60_000,
},
{
name: "demos/cloudflare",
dir: resolve(WORKSPACE_ROOT, "demos/cloudflare"),
port: 4602,
mode: "runtime",
startupTimeoutMs: 120_000,
},
{
name: "demos/playground",
dir: resolve(WORKSPACE_ROOT, "demos/playground"),
port: 4603,
mode: "runtime",
startupTimeoutMs: 120_000,
waitPath: "/playground",
frontendPath: "/playground",
requireDoctype: false,
},
{
name: "demos/preview",
dir: resolve(WORKSPACE_ROOT, "demos/preview"),
port: 4604,
mode: "runtime",
startupTimeoutMs: 120_000,
setupPath: null,
frontendStatuses: [400],
requireDoctype: false,
},
// Postgres demo requires DATABASE_URL — skip when not available
...(process.env.DATABASE_URL
? [
{
name: "demos/postgres",
dir: resolve(WORKSPACE_ROOT, "demos/postgres"),
port: 4605,
mode: "runtime" as const,
startupTimeoutMs: 90_000,
},
]
: []),
{
name: "demos/plugins-demo",
dir: resolve(WORKSPACE_ROOT, "demos/plugins-demo"),
port: 4606,
mode: "runtime",
startupTimeoutMs: 90_000,
},
// Templates
{
name: "templates/blank",
dir: resolve(WORKSPACE_ROOT, "templates/blank"),
port: 4611,
mode: "runtime",
startupTimeoutMs: 60_000,
},
{
name: "templates/blog",
dir: resolve(WORKSPACE_ROOT, "templates/blog"),
port: 4612,
mode: "runtime",
startupTimeoutMs: 60_000,
},
{
name: "templates/blog-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/blog-cloudflare"),
port: 4613,
mode: "runtime",
startupTimeoutMs: 120_000,
},
{
name: "templates/marketing",
dir: resolve(WORKSPACE_ROOT, "templates/marketing"),
port: 4614,
mode: "runtime",
startupTimeoutMs: 90_000,
},
{
name: "templates/marketing-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/marketing-cloudflare"),
port: 4615,
mode: "runtime",
startupTimeoutMs: 120_000,
},
{
name: "templates/portfolio",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio"),
port: 4616,
mode: "runtime",
startupTimeoutMs: 90_000,
},
{
name: "templates/portfolio-cloudflare",
dir: resolve(WORKSPACE_ROOT, "templates/portfolio-cloudflare"),
port: 4617,
mode: "runtime",
startupTimeoutMs: 120_000,
},
];
async function waitForServer(url: string, timeoutMs: number): Promise<void> {
const startedAt = Date.now();
while (Date.now() - startedAt < timeoutMs) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(3000),
});
if (res.status > 0) return;
} catch {
// retry
}
await new Promise((resolveSleep) => setTimeout(resolveSleep, 500));
}
throw new Error(`Server at ${url} did not start within ${timeoutMs}ms`);
}
async function fetchWithRetry(url: string, retries = 10, delayMs = 1500): Promise<Response> {
let lastError: unknown;
for (let attempt = 0; attempt <= retries; attempt++) {
try {
const res = await fetch(url, {
redirect: "manual",
signal: AbortSignal.timeout(15_000),
});
if (res.status < 500) return res;
lastError = new Error(`${url} returned ${res.status}`);
} catch (error) {
lastError = error;
}
if (attempt < retries) {
await new Promise((resolveSleep) => setTimeout(resolveSleep, delayMs));
}
}
throw lastError instanceof Error ? lastError : new Error(`Request failed for ${url}`);
}
describe.sequential("Site smoke matrix", () => {
for (const site of SITE_MATRIX) {
if (site.mode === "typecheck") {
it(`${site.name} typechecks`, { timeout: 120_000 }, async () => {
await execAsync("pnpm", ["run", "typecheck"], {
cwd: site.dir,
timeout: 120_000,
});
});
continue;
}
const waitPath = site.waitPath ?? "/_emdash/admin/";
const setupPath = site.setupPath ?? "/_emdash/api/setup/dev-bypass?redirect=/";
const frontendPath = site.frontendPath ?? "/";
const frontendStatuses = site.frontendStatuses ?? [200, 302, 307, 308];
const requireDoctype = site.requireDoctype ?? true;
it(
`${site.name} boots and serves admin + frontend`,
{ timeout: site.startupTimeoutMs + 120_000 },
async () => {
await ensureBuilt();
const baseUrl = `http://localhost:${site.port}`;
const serverProcess = spawn("pnpm", ["exec", "astro", "dev", "--port", String(site.port)], {
cwd: site.dir,
env: {
...process.env,
CI: "true",
},
stdio: "pipe",
});
let output = "";
serverProcess.stdout?.on("data", (data: Buffer) => {
output += data.toString();
});
serverProcess.stderr?.on("data", (data: Buffer) => {
output += data.toString();
});
try {
await waitForServer(`${baseUrl}${waitPath}`, site.startupTimeoutMs);
if (setupPath) {
const setupRes = await fetchWithRetry(`${baseUrl}${setupPath}`);
expect(setupRes.status).toBeLessThan(500);
}
const adminRes = await fetchWithRetry(`${baseUrl}/_emdash/admin/`);
expect(adminRes.status).toBeLessThan(500);
const frontendRes = await fetchWithRetry(`${baseUrl}${frontendPath}`);
expect(frontendStatuses).toContain(frontendRes.status);
const body = await frontendRes.text();
if (requireDoctype) {
expect(body).toContain("<!DOCTYPE html>");
}
} catch (error) {
throw new Error(
`${site.name} smoke failed: ${error instanceof Error ? error.message : String(error)}\n\n` +
output.slice(-3000),
{ cause: error },
);
} finally {
serverProcess.kill("SIGTERM");
await new Promise((resolveSleep) => setTimeout(resolveSleep, 1200));
if (!serverProcess.killed) {
serverProcess.kill("SIGKILL");
await new Promise((resolveSleep) => setTimeout(resolveSleep, 500));
}
}
},
);
}
});

View File

@@ -0,0 +1,401 @@
/**
* Smoke tests for template/demo seed fixtures.
*
* Validates that all seed files are well-formed, can be applied
* to a fresh database, and that the resulting database passes
* doctor checks. Does NOT start a dev server — these are fast,
* programmatic tests that exercise the seed/validate/apply/doctor
* pipeline directly.
*
* Also shells out to the CLI binary for seed --validate and doctor
* commands to ensure the CLI interface works correctly.
*/
import { execFile } from "node:child_process";
import { existsSync, readFileSync, readdirSync, mkdtempSync, rmSync, mkdirSync } from "node:fs";
import { tmpdir } from "node:os";
import { join, resolve } from "node:path";
import { promisify } from "node:util";
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import { createDatabase } from "../../../src/database/connection.js";
import { runMigrations } from "../../../src/database/migrations/runner.js";
import { applySeed } from "../../../src/seed/apply.js";
import type { SeedFile } from "../../../src/seed/types.js";
import { validateSeed } from "../../../src/seed/validate.js";
import { LocalStorage } from "../../../src/storage/local.js";
import { ensureBuilt } from "../server.js";
const exec = promisify(execFile);
const WORKSPACE_ROOT = resolve(import.meta.dirname, "../../../../..");
const CLI_BIN = resolve(import.meta.dirname, "../../../dist/cli/index.mjs");
const VALIDATION_FAILED_RE = /validation failed/i;
// ---------------------------------------------------------------------------
// Discover all templates and demos with seed files
// ---------------------------------------------------------------------------
interface SiteFixture {
/** Human-readable name for test output */
name: string;
/** Absolute path to the template/theme directory */
dir: string;
/** Absolute path to the seed file */
seedPath: string;
/** Parsed seed file contents */
seed: SeedFile;
}
function discoverFixtures(): SiteFixture[] {
const fixtures: SiteFixture[] = [];
const dirs = [
{ prefix: "templates", path: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", path: resolve(WORKSPACE_ROOT, "demos") },
];
for (const { prefix, path: parentDir } of dirs) {
if (!existsSync(parentDir)) continue;
for (const entry of readdirSync(parentDir)) {
const dir = join(parentDir, entry);
// Check for seed path in package.json first (emdash.seed config)
let seedPath = join(dir, ".emdash", "seed.json");
const pkgPath = join(dir, "package.json");
if (existsSync(pkgPath)) {
try {
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
if (pkg.emdash?.seed) {
seedPath = join(dir, pkg.emdash.seed);
}
} catch {
// Ignore parse errors
}
}
if (!existsSync(seedPath)) continue;
const raw = readFileSync(seedPath, "utf-8");
const seed = JSON.parse(raw) as SeedFile;
fixtures.push({
name: `${prefix}/${entry}`,
dir,
seedPath,
seed,
});
}
}
return fixtures;
}
const fixtures = discoverFixtures();
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
describe("Seed Fixture Smoke Tests", () => {
let tempDirs: string[] = [];
beforeAll(async () => {
// Ensure CLI binary is built for CLI-based tests
await ensureBuilt();
}, 120_000);
afterEach(() => {
// Clean up any temp directories created during tests
for (const dir of tempDirs) {
rmSync(dir, { recursive: true, force: true });
}
tempDirs = [];
});
function createTempDir(): string {
const dir = mkdtempSync(join(tmpdir(), "emdash-smoke-"));
tempDirs.push(dir);
return dir;
}
// Sanity check: we actually found fixtures to test
it("discovers at least one template/demo with a seed file", () => {
expect(fixtures.length).toBeGreaterThanOrEqual(1);
const names = fixtures.map((f) => f.name);
// At minimum the blog template should always be present.
expect(names).toContain("templates/blog");
});
// -----------------------------------------------------------------------
// Per-fixture tests
// -----------------------------------------------------------------------
for (const fixture of fixtures) {
describe(fixture.name, () => {
// --- Seed file is valid JSON with correct structure ---
it("has a valid seed.json that parses as JSON", () => {
expect(fixture.seed).toBeDefined();
expect(fixture.seed.version).toBe("1");
});
// --- Programmatic validation ---
it("passes programmatic seed validation", () => {
const result = validateSeed(fixture.seed);
if (!result.valid) {
// Include errors in failure message for debuggability
expect.fail(`Seed validation failed:\n${result.errors.join("\n")}`);
}
expect(result.valid).toBe(true);
});
// --- CLI --validate ---
it("passes CLI seed --validate", async () => {
const { stdout, stderr } = await exec(
"node",
[CLI_BIN, "seed", fixture.seedPath, "--validate"],
{
cwd: fixture.dir,
timeout: 15_000,
},
);
// The validate command should succeed (exit 0) — if it throws,
// the test will fail with the error message
expect(stdout + stderr).not.toMatch(VALIDATION_FAILED_RE);
});
// --- Seed applies to fresh database ---
it("applies seed to a fresh database without errors", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
// Create database and run migrations
const db = createDatabase({ url: `file:${dbPath}` });
try {
const { applied } = await runMigrations(db);
expect(applied.length).toBeGreaterThan(0);
// Set up local storage for media resolution
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
// Apply seed
const result = await applySeed(db, fixture.seed, {
includeContent: true,
onConflict: "skip",
storage,
mediaBasePath: join(fixture.dir, ".emdash"),
});
// Verify collections were created
if (fixture.seed.collections && fixture.seed.collections.length > 0) {
expect(result.collections.created).toBeGreaterThan(0);
}
// Verify fields were created
const totalFields =
fixture.seed.collections?.reduce((sum, c) => sum + (c.fields?.length ?? 0), 0) ?? 0;
if (totalFields > 0) {
expect(result.fields.created).toBeGreaterThan(0);
}
// Verify content was created if seed has content
if (fixture.seed.content) {
const totalEntries = Object.values(fixture.seed.content).reduce(
(sum, entries) => sum + (Array.isArray(entries) ? entries.length : 0),
0,
);
if (totalEntries > 0) {
expect(result.content.created).toBeGreaterThan(0);
}
}
// Verify taxonomy processing completed (some may be pre-seeded by migrations)
if (fixture.seed.taxonomies && fixture.seed.taxonomies.length > 0) {
// Taxonomies either created or already existed — just verify no crash
expect(result.taxonomies.created + result.taxonomies.terms).toBeGreaterThanOrEqual(0);
}
// Verify menus if present
if (fixture.seed.menus && fixture.seed.menus.length > 0) {
expect(result.menus.created).toBeGreaterThan(0);
}
} finally {
await db.destroy();
}
});
// --- CLI seed apply + doctor ---
it("passes CLI doctor after seed apply", { timeout: 30_000 }, async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
// Apply seed via CLI (this also runs migrations)
await exec("node", [CLI_BIN, "seed", fixture.seedPath, "--database", dbPath], {
cwd: fixture.dir,
timeout: 30_000,
});
// Run doctor and verify all checks pass
const { stdout } = await exec("node", [CLI_BIN, "doctor", "--database", dbPath, "--json"], {
cwd: fixture.dir,
timeout: 15_000,
});
const checks = JSON.parse(stdout) as Array<{
name: string;
status: "pass" | "warn" | "fail";
message: string;
}>;
// No failures allowed
const failures = checks.filter((c) => c.status === "fail");
if (failures.length > 0) {
expect.fail(
`Doctor failures:\n${failures.map((f) => ` ${f.name}: ${f.message}`).join("\n")}`,
);
}
// Database, migrations, and collections should all pass
const dbCheck = checks.find((c) => c.name === "database");
expect(dbCheck?.status).toBe("pass");
const migrationsCheck = checks.find((c) => c.name === "migrations");
expect(migrationsCheck?.status).toBe("pass");
const collectionsCheck = checks.find((c) => c.name === "collections");
expect(collectionsCheck?.status).toBe("pass");
});
// --- Idempotent re-apply ---
it(
"can re-apply seed with on-conflict=skip without errors",
{ timeout: 30_000 },
async () => {
const tempDir = createTempDir();
const dbPath = join(tempDir, "test.db");
const uploadsDir = join(tempDir, "uploads");
mkdirSync(uploadsDir, { recursive: true });
const db = createDatabase({ url: `file:${dbPath}` });
try {
await runMigrations(db);
const storage = new LocalStorage({
directory: uploadsDir,
baseUrl: "/_emdash/api/media/file",
});
const seedOpts = {
includeContent: true,
onConflict: "skip" as const,
storage,
seedDir: join(fixture.dir, ".emdash"),
};
// First apply
await applySeed(db, fixture.seed, seedOpts);
// Second apply — should not throw
const result2 = await applySeed(db, fixture.seed, seedOpts);
// Everything should be skipped on second apply
expect(result2.collections.created).toBe(0);
} finally {
await db.destroy();
}
},
);
// --- package.json has emdash.seed pointing to seed file ---
it("has package.json with emdash.seed pointing to the seed file", () => {
const pkgPath = join(fixture.dir, "package.json");
if (!existsSync(pkgPath)) return; // blank template has no seed, already filtered
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
// Either emdash.seed is set, or we rely on the .emdash/seed.json convention
const seedRef = pkg.emdash?.seed;
if (seedRef) {
const resolvedSeedPath = resolve(fixture.dir, seedRef);
expect(existsSync(resolvedSeedPath)).toBe(true);
} else {
// Convention: .emdash/seed.json exists (which it does since we're iterating fixtures)
expect(existsSync(fixture.seedPath)).toBe(true);
}
});
});
}
// -----------------------------------------------------------------------
// Cross-cutting: all templates/demos have required files
// -----------------------------------------------------------------------
describe("Required files", () => {
const roots = [
{ prefix: "templates", dir: resolve(WORKSPACE_ROOT, "templates") },
{ prefix: "demos", dir: resolve(WORKSPACE_ROOT, "demos") },
].filter((root) => existsSync(root.dir));
const allDirs = roots
.flatMap((root) =>
readdirSync(root.dir).map((entry) => ({
name: `${root.prefix}/${entry}`,
dir: join(root.dir, entry),
})),
)
.filter((d) => existsSync(join(d.dir, "package.json")));
for (const { name, dir } of allDirs) {
describe(name, () => {
it("has astro.config.mjs", () => {
expect(existsSync(join(dir, "astro.config.mjs"))).toBe(true);
});
it("has tsconfig.json", () => {
expect(existsSync(join(dir, "tsconfig.json"))).toBe(true);
});
it("has live.config.ts with emdashLoader", () => {
const liveConfig = join(dir, "src", "live.config.ts");
expect(existsSync(liveConfig)).toBe(true);
const content = readFileSync(liveConfig, "utf-8");
expect(content).toContain("emdashLoader");
expect(content).toContain("defineLiveCollection");
});
it("has typecheck script in package.json", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.scripts?.typecheck || pkg.scripts?.check).toBeDefined();
});
it("uses workspace:* for emdash dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
expect(pkg.dependencies?.emdash).toBe("workspace:*");
});
it("uses catalog: for astro dependency", () => {
const pkg = JSON.parse(readFileSync(join(dir, "package.json"), "utf-8"));
const astroVersion = pkg.dependencies?.astro;
expect(astroVersion).toBe("catalog:");
});
});
}
});
});

View File

@@ -0,0 +1,169 @@
/**
* Integration test for the full preview snapshot auth flow.
*
* Tests the complete chain that would have caught bug #3:
* signPreviewUrl → middleware builds header → snapshot endpoint parses and verifies
*
* The signing side (signPreviewUrl) lives in @emdashcms/cloudflare, but we
* inline the same HMAC logic here to test the format contract without
* cross-package imports.
*/
import { sql } from "kysely";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
generateSnapshot,
parsePreviewSignatureHeader,
verifyPreviewSignature,
} from "../../../src/api/handlers/snapshot.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
const SECRET = "test-preview-secret";
/**
* Sign a preview URL using the same HMAC-SHA256 logic as
* @emdashcms/cloudflare signPreviewUrl(). Inlined here so we test
* the format contract without cross-package deps.
*/
async function signPreview(
source: string,
ttl = 3600,
): Promise<{ source: string; exp: number; sig: string }> {
const exp = Math.floor(Date.now() / 1000) + ttl;
const encoder = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(SECRET),
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
const buffer = await crypto.subtle.sign("HMAC", key, encoder.encode(`${source}:${exp}`));
const sig = Array.from(new Uint8Array(buffer), (b) => b.toString(16).padStart(2, "0")).join("");
return { source, exp, sig };
}
/**
* Build the X-Preview-Signature header value the same way the
* preview middleware does: "source:exp:sig"
*/
function buildSignatureHeader(parts: { source: string; exp: number; sig: string }): string {
return `${parts.source}:${parts.exp}:${parts.sig}`;
}
describe("preview snapshot auth flow", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await db.destroy();
});
it("end-to-end: signed preview URL → header → snapshot access", async () => {
// 1. Insert some content so snapshot has data
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('p1', 'test-post', 'published', 'Test', 'Body', datetime('now'), datetime('now'), 1)
`.execute(db);
// 2. Sign a preview URL (same logic as @emdashcms/cloudflare signPreviewUrl)
const signed = await signPreview("https://mysite.com");
// 3. Build the header the way the preview middleware does
const headerValue = buildSignatureHeader(signed);
// 4. Parse the header the way the snapshot endpoint does
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
expect(parsed!.source).toBe("https://mysite.com");
expect(parsed!.exp).toBe(signed.exp);
expect(parsed!.sig).toBe(signed.sig);
// 5. Verify the signature the way the snapshot endpoint does
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, parsed!.sig, SECRET);
expect(valid).toBe(true);
// 6. Actually generate the snapshot (proves auth would grant access)
const snapshot = await generateSnapshot(db);
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0]!.slug).toBe("test-post");
});
it("rejects tampered signature", async () => {
const signed = await signPreview("https://mysite.com");
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
// Tamper with the signature
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, "a".repeat(64), SECRET);
expect(valid).toBe(false);
});
it("rejects wrong secret", async () => {
const signed = await signPreview("https://mysite.com");
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
const valid = await verifyPreviewSignature(
parsed!.source,
parsed!.exp,
parsed!.sig,
"wrong-secret",
);
expect(valid).toBe(false);
});
it("rejects expired signature", async () => {
// Sign with TTL of -1 (already expired)
const signed = await signPreview("https://mysite.com", -1);
const headerValue = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(headerValue);
expect(parsed).not.toBeNull();
const valid = await verifyPreviewSignature(parsed!.source, parsed!.exp, parsed!.sig, SECRET);
expect(valid).toBe(false);
});
});
describe("parsePreviewSignatureHeader", () => {
it("parses source URLs with colons correctly", async () => {
const signed = await signPreview("https://mysite.com:8080");
const header = buildSignatureHeader(signed);
const parsed = parsePreviewSignatureHeader(header);
expect(parsed).not.toBeNull();
expect(parsed!.source).toBe("https://mysite.com:8080");
expect(parsed!.exp).toBe(signed.exp);
expect(parsed!.sig).toBe(signed.sig);
});
it("rejects empty string", () => {
expect(parsePreviewSignatureHeader("")).toBeNull();
});
it("rejects header with no colons", () => {
expect(parsePreviewSignatureHeader("noseparators")).toBeNull();
});
it("rejects header with sig wrong length", () => {
expect(parsePreviewSignatureHeader("https://x.com:12345:tooshort")).toBeNull();
});
it("rejects header with non-numeric exp", () => {
expect(parsePreviewSignatureHeader(`https://x.com:notanumber:${"a".repeat(64)}`)).toBeNull();
});
});

View File

@@ -0,0 +1,217 @@
import { sql } from "kysely";
import type { Kysely } from "kysely";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { Snapshot } from "../../../src/api/handlers/snapshot.js";
import { generateSnapshot } from "../../../src/api/handlers/snapshot.js";
import type { Database } from "../../../src/database/types.js";
import { setupTestDatabaseWithCollections } from "../../utils/test-db.js";
describe("generateSnapshot", () => {
let db: Kysely<Database>;
beforeEach(async () => {
db = await setupTestDatabaseWithCollections();
});
afterEach(async () => {
await db.destroy();
});
it("returns empty tables when no content exists", async () => {
const snapshot = await generateSnapshot(db);
expect(snapshot.generatedAt).toBeTruthy();
expect(typeof snapshot.generatedAt).toBe("string");
// Schema should include ec_post and ec_page (even with no rows)
expect(snapshot.schema).toHaveProperty("ec_post");
expect(snapshot.schema).toHaveProperty("ec_page");
expect(snapshot.schema.ec_post.columns).toContain("id");
expect(snapshot.schema.ec_post.columns).toContain("title");
expect(snapshot.schema.ec_post.columns).toContain("slug");
expect(snapshot.schema.ec_post.columns).toContain("status");
// System tables with data should appear
expect(snapshot.schema).toHaveProperty("_emdash_collections");
expect(snapshot.schema).toHaveProperty("_emdash_fields");
// _emdash_collections should have 2 rows (post + page)
expect(snapshot.tables._emdash_collections).toHaveLength(2);
});
it("includes published content and excludes drafts by default", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'hello-world', 'published', 'Hello World', 'Content here', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a draft post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('draft1', 'draft-post', 'draft', 'Draft Post', 'Draft content', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db);
// Only published content should appear
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0].slug).toBe("hello-world");
});
it("includes drafts when includeDrafts is true", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'hello-world', 'published', 'Hello World', 'Content', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a draft post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('draft1', 'draft-post', 'draft', 'Draft Post', 'Draft', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db, { includeDrafts: true });
// Both should appear
expect(snapshot.tables.ec_post).toHaveLength(2);
});
it("excludes soft-deleted content", async () => {
// Insert a published post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('pub1', 'live-post', 'published', 'Live', 'Content', datetime('now'), datetime('now'), 1)
`.execute(db);
// Insert a soft-deleted post
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, deleted_at, version)
VALUES ('del1', 'deleted-post', 'published', 'Deleted', 'Gone', datetime('now'), datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot = await generateSnapshot(db);
expect(snapshot.tables.ec_post).toHaveLength(1);
expect(snapshot.tables.ec_post[0].slug).toBe("live-post");
});
it("excludes auth and security tables", async () => {
const snapshot = await generateSnapshot(db);
// These should not appear in schema or tables
expect(snapshot.schema).not.toHaveProperty("users");
expect(snapshot.schema).not.toHaveProperty("sessions");
expect(snapshot.schema).not.toHaveProperty("credentials");
expect(snapshot.schema).not.toHaveProperty("challenges");
expect(snapshot.schema).not.toHaveProperty("_emdash_api_tokens");
expect(snapshot.schema).not.toHaveProperty("_emdash_oauth_tokens");
});
it("includes system tables needed for rendering", async () => {
const snapshot = await generateSnapshot(db);
// These system tables should have schema entries
expect(snapshot.schema).toHaveProperty("_emdash_collections");
expect(snapshot.schema).toHaveProperty("_emdash_fields");
expect(snapshot.schema).toHaveProperty("_emdash_migrations");
expect(snapshot.schema).toHaveProperty("options");
});
it("includes column type info in schema", async () => {
const snapshot = await generateSnapshot(db);
const postSchema = snapshot.schema.ec_post;
expect(postSchema).toBeDefined();
expect(postSchema.types).toBeDefined();
// PRAGMA table_info returns types as declared (case-sensitive)
// Kysely creates tables with lowercase types
expect(postSchema.types!.id.toLowerCase()).toBe("text");
expect(postSchema.types!.version.toLowerCase()).toBe("integer");
});
it("snapshot shape matches DO expectation", async () => {
await sql`
INSERT INTO ec_post (id, slug, status, title, content, created_at, updated_at, version)
VALUES ('p1', 'test', 'published', 'Test', 'Body', datetime('now'), datetime('now'), 1)
`.execute(db);
const snapshot: Snapshot = await generateSnapshot(db);
// Verify shape matches what EmDashPreviewDB.applySnapshot expects
expect(snapshot).toHaveProperty("tables");
expect(snapshot).toHaveProperty("schema");
expect(snapshot).toHaveProperty("generatedAt");
expect(typeof snapshot.generatedAt).toBe("string");
// Tables are Record<string, Record<string, unknown>[]>
for (const [tableName, rows] of Object.entries(snapshot.tables)) {
expect(typeof tableName).toBe("string");
expect(Array.isArray(rows)).toBe(true);
for (const row of rows) {
expect(typeof row).toBe("object");
}
}
// Schema has columns and types
for (const [tableName, info] of Object.entries(snapshot.schema)) {
expect(typeof tableName).toBe("string");
expect(Array.isArray(info.columns)).toBe(true);
if (info.types) {
expect(typeof info.types).toBe("object");
}
}
});
it("filters options table to safe rendering prefixes only", async () => {
// Insert site settings (safe — should be included)
await sql`INSERT INTO options (name, value) VALUES ('site:title', '"My Site"')`.execute(db);
await sql`INSERT INTO options (name, value) VALUES ('site:tagline', '"Welcome"')`.execute(db);
// Insert plugin secrets (unsafe — should be excluded)
await sql`INSERT INTO options (name, value) VALUES ('plugin:smtp:api_key', '"sk-secret-123"')`.execute(
db,
);
await sql`INSERT INTO options (name, value) VALUES ('plugin:seo:license', '"lic-456"')`.execute(
db,
);
// Insert setup/auth data (unsafe — should be excluded)
await sql`INSERT INTO options (name, value) VALUES ('emdash:setup_complete', 'true')`.execute(
db,
);
await sql`INSERT INTO options (name, value) VALUES ('emdash:passkey_pending:user1', '{"challenge":"abc"}')`.execute(
db,
);
const snapshot = await generateSnapshot(db);
const optionsRows = snapshot.tables.options;
expect(optionsRows).toBeDefined();
expect(optionsRows).toHaveLength(2);
const names = optionsRows.map((r) => r.name);
expect(names).toContain("site:title");
expect(names).toContain("site:tagline");
expect(names).not.toContain("plugin:smtp:api_key");
expect(names).not.toContain("plugin:seo:license");
expect(names).not.toContain("emdash:setup_complete");
expect(names).not.toContain("emdash:passkey_pending:user1");
});
it("discovers content tables dynamically", async () => {
// The test setup creates ec_post and ec_page
const snapshot = await generateSnapshot(db);
expect(snapshot.schema).toHaveProperty("ec_post");
expect(snapshot.schema).toHaveProperty("ec_page");
// Verify column discovery matches what we created
expect(snapshot.schema.ec_post.columns).toContain("title");
expect(snapshot.schema.ec_post.columns).toContain("content");
expect(snapshot.schema.ec_page.columns).toContain("title");
expect(snapshot.schema.ec_page.columns).toContain("content");
});
});

View File

@@ -0,0 +1,337 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!-- WordPress WXR fixture for e2e tests -->
<rss version="2.0"
xmlns:excerpt="http://wordpress.org/export/1.2/excerpt/"
xmlns:content="http://purl.org/rss/1.0/modules/content/"
xmlns:wfw="http://wellformedweb.org/CommentAPI/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:wp="http://wordpress.org/export/1.2/"
>
<channel>
<title>Test Blog</title>
<link>https://example.com</link>
<description>A test WordPress site</description>
<pubDate>Sun, 19 Jan 2025 12:00:00 +0000</pubDate>
<language>en-US</language>
<wp:wxr_version>1.2</wp:wxr_version>
<wp:base_site_url>https://example.com</wp:base_site_url>
<wp:base_blog_url>https://example.com</wp:base_blog_url>
<wp:author>
<wp:author_id>1</wp:author_id>
<wp:author_login><![CDATA[admin]]></wp:author_login>
<wp:author_email><![CDATA[admin@example.com]]></wp:author_email>
<wp:author_display_name><![CDATA[Site Admin]]></wp:author_display_name>
</wp:author>
<wp:category>
<wp:term_id>2</wp:term_id>
<wp:category_nicename><![CDATA[tutorials]]></wp:category_nicename>
<wp:category_parent></wp:category_parent>
<wp:cat_name><![CDATA[Tutorials]]></wp:cat_name>
</wp:category>
<wp:category>
<wp:term_id>3</wp:term_id>
<wp:category_nicename><![CDATA[news]]></wp:category_nicename>
<wp:category_parent></wp:category_parent>
<wp:cat_name><![CDATA[News]]></wp:cat_name>
</wp:category>
<wp:tag>
<wp:term_id>4</wp:term_id>
<wp:tag_slug><![CDATA[featured]]></wp:tag_slug>
<wp:tag_name><![CDATA[Featured]]></wp:tag_name>
</wp:tag>
<!-- Post 1: Simple Gutenberg content -->
<item>
<title>Hello World</title>
<link>https://example.com/2025/01/hello-world/</link>
<pubDate>Mon, 15 Jan 2025 10:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=1</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Welcome to our new blog! This is a <strong>test post</strong> with some <em>formatting</em>.</p>
<!-- /wp:paragraph -->
<!-- wp:heading -->
<h2>Getting Started</h2>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>Here's how to get started with our platform.</p>
<!-- /wp:paragraph -->
<!-- wp:list -->
<ul>
<li>Step one</li>
<li>Step two</li>
<li>Step three</li>
</ul>
<!-- /wp:list -->]]></content:encoded>
<excerpt:encoded><![CDATA[Welcome to our new blog!]]></excerpt:encoded>
<wp:post_id>1</wp:post_id>
<wp:post_date><![CDATA[2025-01-15 10:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-15 10:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-15 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-15 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[hello-world]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<category domain="category" nicename="tutorials"><![CDATA[Tutorials]]></category>
<category domain="post_tag" nicename="featured"><![CDATA[Featured]]></category>
<wp:postmeta>
<wp:meta_key><![CDATA[_edit_last]]></wp:meta_key>
<wp:meta_value><![CDATA[1]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[_yoast_wpseo_title]]></wp:meta_key>
<wp:meta_value><![CDATA[Hello World - Welcome Post]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[_yoast_wpseo_metadesc]]></wp:meta_key>
<wp:meta_value><![CDATA[Our first blog post welcoming visitors.]]></wp:meta_value>
</wp:postmeta>
<wp:postmeta>
<wp:meta_key><![CDATA[custom_field]]></wp:meta_key>
<wp:meta_value><![CDATA[custom value]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Post 2: With image and quote -->
<item>
<title>Advanced Features</title>
<link>https://example.com/2025/01/advanced-features/</link>
<pubDate>Wed, 17 Jan 2025 14:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=2</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Let's explore some advanced features.</p>
<!-- /wp:paragraph -->
<!-- wp:image {"id":100,"sizeSlug":"large"} -->
<figure class="wp-block-image size-large"><img src="https://example.com/wp-content/uploads/2025/01/hero.jpg" alt="Hero image" class="wp-image-100"/><figcaption>Our hero image</figcaption></figure>
<!-- /wp:image -->
<!-- wp:quote -->
<blockquote class="wp-block-quote"><p>This is an inspiring quote about technology.</p><cite>Famous Person</cite></blockquote>
<!-- /wp:quote -->
<!-- wp:code -->
<pre class="wp-block-code"><code>const hello = "world";
console.log(hello);</code></pre>
<!-- /wp:code -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>2</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 14:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 14:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 14:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 14:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[advanced-features]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<category domain="category" nicename="tutorials"><![CDATA[Tutorials]]></category>
<wp:postmeta>
<wp:meta_key><![CDATA[_thumbnail_id]]></wp:meta_key>
<wp:meta_value><![CDATA[100]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Post 3: Draft post -->
<item>
<title>Work in Progress</title>
<link>https://example.com/?p=3</link>
<pubDate>Thu, 18 Jan 2025 09:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=3</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>This post is still being written.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>3</wp:post_id>
<wp:post_date><![CDATA[2025-01-18 09:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-18 09:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-18 10:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-18 10:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[open]]></wp:ping_status>
<wp:post_name><![CDATA[work-in-progress]]></wp:post_name>
<wp:status><![CDATA[draft]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[post]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Page 1: About page -->
<item>
<title>About Us</title>
<link>https://example.com/about/</link>
<pubDate>Sat, 01 Jan 2025 12:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?page_id=10</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Welcome to our About page. We are a team of passionate developers.</p>
<!-- /wp:paragraph -->
<!-- wp:heading {"level":3} -->
<h3>Our Mission</h3>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>To build great software that helps people.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>10</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-10 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-10 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[about]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[page]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Page 2: Contact page (child of About) -->
<item>
<title>Contact</title>
<link>https://example.com/about/contact/</link>
<pubDate>Sat, 01 Jan 2025 12:30:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?page_id=11</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:paragraph -->
<p>Get in touch with us at <a href="mailto:hello@example.com">hello@example.com</a>.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>11</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:30:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:30:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-01 12:30:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-01 12:30:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[contact]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>10</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[page]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Attachment -->
<item>
<title>hero</title>
<link>https://example.com/hero/</link>
<pubDate>Wed, 17 Jan 2025 13:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/wp-content/uploads/2025/01/hero.jpg</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[Hero image for the site]]></excerpt:encoded>
<wp:post_id>100</wp:post_id>
<wp:post_date><![CDATA[2025-01-17 13:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-17 13:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-17 13:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-17 13:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[open]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[hero]]></wp:post_name>
<wp:status><![CDATA[inherit]]></wp:status>
<wp:post_parent>2</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[attachment]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
<wp:attachment_url><![CDATA[https://example.com/wp-content/uploads/2025/01/hero.jpg]]></wp:attachment_url>
<wp:postmeta>
<wp:meta_key><![CDATA[_wp_attached_file]]></wp:meta_key>
<wp:meta_value><![CDATA[2025/01/hero.jpg]]></wp:meta_value>
</wp:postmeta>
</item>
<!-- Nav menu item (should be skipped) -->
<item>
<title>Home</title>
<link>https://example.com/?p=50</link>
<pubDate>Sat, 01 Jan 2025 12:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=50</guid>
<description></description>
<content:encoded><![CDATA[]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>50</wp:post_id>
<wp:post_date><![CDATA[2025-01-01 12:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-01 12:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-01 12:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[home]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>1</wp:menu_order>
<wp:post_type><![CDATA[nav_menu_item]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
<!-- Reusable Block (wp_block) - should be imported as section -->
<item>
<title>Newsletter CTA</title>
<link>https://example.com/?p=100</link>
<pubDate>Mon, 20 Jan 2025 10:00:00 +0000</pubDate>
<dc:creator><![CDATA[admin]]></dc:creator>
<guid isPermaLink="false">https://example.com/?p=100</guid>
<description></description>
<content:encoded><![CDATA[<!-- wp:heading {"level":3} -->
<h3>Subscribe to Our Newsletter</h3>
<!-- /wp:heading -->
<!-- wp:paragraph -->
<p>Get the latest updates delivered to your inbox.</p>
<!-- /wp:paragraph -->]]></content:encoded>
<excerpt:encoded><![CDATA[]]></excerpt:encoded>
<wp:post_id>100</wp:post_id>
<wp:post_date><![CDATA[2025-01-20 10:00:00]]></wp:post_date>
<wp:post_date_gmt><![CDATA[2025-01-20 10:00:00]]></wp:post_date_gmt>
<wp:post_modified><![CDATA[2025-01-20 10:00:00]]></wp:post_modified>
<wp:post_modified_gmt><![CDATA[2025-01-20 10:00:00]]></wp:post_modified_gmt>
<wp:comment_status><![CDATA[closed]]></wp:comment_status>
<wp:ping_status><![CDATA[closed]]></wp:ping_status>
<wp:post_name><![CDATA[newsletter-cta]]></wp:post_name>
<wp:status><![CDATA[publish]]></wp:status>
<wp:post_parent>0</wp:post_parent>
<wp:menu_order>0</wp:menu_order>
<wp:post_type><![CDATA[wp_block]]></wp:post_type>
<wp:post_password><![CDATA[]]></wp:post_password>
<wp:is_sticky>0</wp:is_sticky>
</item>
</channel>
</rss>

View File

@@ -0,0 +1,508 @@
/**
* E2E tests for WordPress import CLI
*
* Tests the full two-phase import flow:
* - Phase 1: Prepare (analyze WXR, generate config)
* - Phase 2: Execute (import content using config)
*
* Also tests: --dry-run, --resume, --json flags
*/
import { mkdtemp, rm, readFile, writeFile, readdir } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, it, expect, beforeEach, afterEach } from "vitest";
import {
prepareWordPressImport,
executeWordPressImport,
type MigrationConfig,
type ImportProgress,
} from "../../../src/cli/commands/import/wordpress.js";
const FIXTURE_PATH = join(import.meta.dirname, "fixtures", "sample-export.xml");
describe("WordPress Import Integration", () => {
let testDir: string;
beforeEach(async () => {
testDir = await mkdtemp(join(tmpdir(), "emdash-wp-import-"));
});
afterEach(async () => {
await rm(testDir, { recursive: true, force: true });
});
describe("Phase 1: Prepare", () => {
it("analyzes WXR and generates migration config", async () => {
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
// Check config was created
const configContent = await readFile(configPath, "utf-8");
const config: MigrationConfig = JSON.parse(configContent);
// Verify site info
expect(config.site.title).toBe("Test Blog");
expect(config.site.url).toBe("https://example.com");
// Verify collections discovered
expect(config.collections.post).toEqual({
collection: "posts",
enabled: true,
count: 3,
});
expect(config.collections.page).toEqual({
collection: "pages",
enabled: true,
count: 2,
});
// nav_menu_item should be disabled (if it exists in the export)
if (config.collections.nav_menu_item) {
expect(config.collections.nav_menu_item.enabled).toBe(false);
}
// Verify custom fields discovered
expect(config.fields._yoast_wpseo_title).toEqual({
field: "seo.title",
type: "string",
enabled: true,
count: 1,
samples: expect.any(Array),
});
expect(config.fields._yoast_wpseo_metadesc?.field).toBe("seo.description");
expect(config.fields._thumbnail_id?.field).toBe("featuredImage");
expect(config.fields.custom_field?.enabled).toBe(true);
// Internal fields should be disabled
expect(config.fields._edit_last?.enabled).toBe(false);
});
it("generates suggested live.config.ts", async () => {
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
const liveConfigPath = join(testDir, "suggested-live.config.ts");
const liveConfig = await readFile(liveConfigPath, "utf-8");
// Collections are now created via Admin UI, so this generates helpful comments
expect(liveConfig).toContain("Suggested EmDash collections");
expect(liveConfig).toContain("/_emdash/admin/content-types");
expect(liveConfig).toContain('post → "posts"');
expect(liveConfig).toContain('page → "pages"');
expect(liveConfig).toContain("portableText");
});
it("dry-run does not create files", async () => {
const configPath = join(testDir, ".wp-migration.json");
const result = await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: true,
json: false,
});
// Result should indicate dry run
expect(result.dryRun).toBe(true);
expect(result.files).toContainEqual({
path: configPath,
action: "would_create",
});
// Files should NOT exist
await expect(readFile(configPath)).rejects.toThrow();
await expect(readFile(join(testDir, "suggested-live.config.ts"))).rejects.toThrow();
});
it("returns structured JSON result", async () => {
const configPath = join(testDir, ".wp-migration.json");
const result = await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: true,
});
expect(result.success).toBe(true);
expect(result.phase).toBe("prepare");
expect(result.summary.postsAnalyzed).toBe(7); // 3 posts + 2 pages + 1 attachment + 1 wp_block (excludes nav_menu_item)
expect(result.files.length).toBe(2);
expect(result.nextSteps.length).toBeGreaterThan(0);
});
});
describe("Phase 2: Execute", () => {
let configPath: string;
beforeEach(async () => {
// Run prepare first to create config
configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
});
it("imports posts and pages to correct directories", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Check posts directory
const posts = await readdir(join(testDir, "posts"));
expect(posts).toContain("hello-world.json");
expect(posts).toContain("advanced-features.json");
expect(posts).toContain("work-in-progress.json");
expect(posts.length).toBe(3);
// Check pages directory
const pages = await readdir(join(testDir, "pages"));
expect(pages).toContain("about.json");
expect(pages).toContain("contact.json");
expect(pages.length).toBe(2);
});
it("converts Gutenberg blocks to Portable Text", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
// Check content is Portable Text array
expect(Array.isArray(post.content)).toBe(true);
expect(post.content.length).toBeGreaterThan(0);
// Check for expected block types
const blockTypes = post.content.map((b: { _type: string }) => b._type);
expect(blockTypes).toContain("block"); // paragraphs and headings
// Check paragraph content
const firstBlock = post.content[0];
expect(firstBlock._type).toBe("block");
expect(firstBlock.children[0].text).toContain("Welcome to our new blog");
});
it("maps custom fields correctly", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
// Check SEO fields (nested)
expect(post.seo?.title).toBe("Hello World - Welcome Post");
expect(post.seo?.description).toBe("Our first blog post welcoming visitors.");
// Check custom field
expect(post.custom_field).toBe("custom value");
});
it("preserves post metadata", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "hello-world.json"), "utf-8");
const post = JSON.parse(postContent);
expect(post.title).toBe("Hello World");
expect(post.status).toBe("published");
expect(post.author).toBe("admin");
expect(post.excerpt).toBe("Welcome to our new blog!");
expect(post.categories).toContain("tutorials");
expect(post.tags).toContain("featured");
// Check WordPress metadata preserved
expect(post._wp.id).toBe(1);
expect(post._wp.link).toBe("https://example.com/2025/01/hello-world/");
});
it("handles draft posts correctly", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const postContent = await readFile(join(testDir, "posts", "work-in-progress.json"), "utf-8");
const post = JSON.parse(postContent);
expect(post.status).toBe("draft");
});
it("creates redirects map", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const redirectsContent = await readFile(join(testDir, "_redirects.json"), "utf-8");
const redirects = JSON.parse(redirectsContent);
expect(redirects["https://example.com/2025/01/hello-world/"]).toBe("/posts/hello-world");
expect(redirects["https://example.com/about/"]).toBe("/pages/about");
});
it("dry-run shows what would be created", async () => {
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: true,
json: false,
resume: false,
});
expect(result.dryRun).toBe(true);
expect(result.summary.postsImported).toBe(5);
// Check files would be created
const wouldCreate = result.files.filter((f) => f.action === "would_create");
expect(wouldCreate.length).toBeGreaterThan(0);
// Actual files should NOT exist
await expect(readdir(join(testDir, "posts"))).rejects.toThrow();
});
it("creates progress file for resumability", async () => {
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
const progressContent = await readFile(join(testDir, ".wp-migration-progress.json"), "utf-8");
const progress: ImportProgress = JSON.parse(progressContent);
expect(progress.importedPosts.length).toBe(5);
expect(progress.stats.importedPosts).toBe(5);
expect(progress.stats.totalPosts).toBe(7); // 3 posts + 2 pages + 1 attachment + 1 wp_block (nav_menu_item excluded)
expect(progress.errors.length).toBe(0);
});
it("resume skips already-imported posts", async () => {
// First import
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Second import with resume
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
// All should be skipped (resumed)
expect(result.summary.postsImported).toBe(0);
expect(result.summary.postsSkipped).toBe(7); // 5 content items + 1 attachment + 1 wp_block
});
it("resume imports only new posts", async () => {
// First import
await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
});
// Modify progress to simulate partial import
const progressPath = join(testDir, ".wp-migration-progress.json");
const progressContent = await readFile(progressPath, "utf-8");
const progress: ImportProgress = JSON.parse(progressContent);
// Remove last 2 posts from imported list
progress.importedPosts = progress.importedPosts.slice(0, 3);
progress.stats.importedPosts = 3;
await writeFile(progressPath, JSON.stringify(progress, null, 2));
// Delete those files too
await rm(join(testDir, "pages", "about.json"));
await rm(join(testDir, "pages", "contact.json"));
// Resume import
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
// Should import only the 2 missing pages
expect(result.summary.postsImported).toBe(2);
expect(result.summary.postsSkipped).toBe(5); // 3 + 1 attachment + 1 wp_block
// Files should exist again
const pages = await readdir(join(testDir, "pages"));
expect(pages).toContain("about.json");
expect(pages).toContain("contact.json");
});
it("returns structured JSON result", async () => {
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: false,
});
expect(result.success).toBe(true);
expect(result.phase).toBe("execute");
expect(result.summary.postsImported).toBe(5);
expect(result.summary.errors).toBe(0);
expect(result.files.length).toBeGreaterThan(0);
expect(result.files.every((f) => f.action === "created")).toBe(true);
});
it("skips disabled post types", async () => {
// Modify config to disable pages
const config: MigrationConfig = JSON.parse(await readFile(configPath, "utf-8"));
config.collections.page.enabled = false;
await writeFile(configPath, JSON.stringify(config, null, 2));
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: false,
});
// Only posts should be imported
expect(result.summary.postsImported).toBe(3);
expect(result.summary.postsSkipped).toBe(4); // 2 pages + 1 attachment + 1 wp_block
// Pages directory should not exist
await expect(readdir(join(testDir, "pages"))).rejects.toThrow();
});
});
describe("Edge Cases", () => {
it("handles missing config file gracefully", async () => {
const badConfigPath = join(testDir, "nonexistent.json");
await expect(
executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath: badConfigPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: false,
resume: false,
}),
).rejects.toThrow();
});
it("handles empty progress file on resume", async () => {
// Create config first
const configPath = join(testDir, ".wp-migration.json");
await prepareWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
verbose: false,
dryRun: false,
json: false,
});
// Resume without prior import should work (fresh start)
const result = await executeWordPressImport(FIXTURE_PATH, {
outputDir: testDir,
configPath,
skipMedia: true,
verbose: false,
dryRun: false,
json: true,
resume: true,
});
expect(result.summary.postsImported).toBe(5);
});
});
});

View File

@@ -0,0 +1,366 @@
/**
* Integration tests using WordPress Theme Unit Test data
*
* Tests the full WordPress migration pipeline against the official
* WordPress Theme Unit Test dataset. The test data is downloaded from
* GitHub on first run and cached locally.
*
* @see https://github.com/WordPress/theme-test-data
*/
import { createReadStream, existsSync } from "node:fs";
import { mkdir, writeFile } from "node:fs/promises";
import { dirname, join } from "node:path";
import { gutenbergToPortableText } from "@emdashcms/gutenberg-to-portable-text";
import { describe, it, expect, beforeAll } from "vitest";
import { parseWxr } from "../../../src/cli/wxr/parser.js";
// Test regex patterns
const PARAGRAPH_WITH_TEXT_REGEX = /<p[^>]*>[^<]+<\/p>/;
const TEST_DATA_PATH = join(
process.cwd(),
"../../examples/wp-theme-unit-test/themeunittestdata.wordpress.xml",
);
const TEST_DATA_URL =
"https://raw.githubusercontent.com/WordPress/theme-test-data/master/themeunittestdata.wordpress.xml";
/**
* Download the WordPress theme unit test data if it doesn't exist locally.
*/
async function ensureTestData(): Promise<void> {
if (existsSync(TEST_DATA_PATH)) return;
console.log(`Downloading WordPress theme unit test data from ${TEST_DATA_URL}...`);
const response = await fetch(TEST_DATA_URL);
if (!response.ok) {
throw new Error(`Failed to download test data: ${response.status} ${response.statusText}`);
}
const data = await response.text();
await mkdir(dirname(TEST_DATA_PATH), { recursive: true });
await writeFile(TEST_DATA_PATH, data, "utf-8");
console.log(`Downloaded to ${TEST_DATA_PATH}`);
}
describe("WordPress Theme Unit Test Migration", () => {
let wxrData: Awaited<ReturnType<typeof parseWxr>>;
beforeAll(async () => {
await ensureTestData();
const stream = createReadStream(TEST_DATA_PATH, { encoding: "utf-8" });
wxrData = await parseWxr(stream);
});
describe("WXR Parsing", () => {
it("parses site metadata", () => {
expect(wxrData.site.title).toBe("Theme Unit Test Data");
expect(wxrData.site.link).toBe("https://wpthemetestdata.wordpress.com");
expect(wxrData.site.language).toBe("en");
});
it("parses all posts", () => {
// Theme Unit Test has many posts covering different scenarios
expect(wxrData.posts.length).toBeGreaterThan(50);
});
it("parses all pages", () => {
const pages = wxrData.posts.filter((p) => p.postType === "page");
expect(pages.length).toBeGreaterThan(10);
});
it("parses categories with hierarchy", () => {
expect(wxrData.categories.length).toBeGreaterThan(20);
// Check for parent-child relationships
const parentCategory = wxrData.categories.find((c) => c.nicename === "parent-category");
expect(parentCategory).toBeDefined();
const childCategory = wxrData.categories.find((c) => c.nicename === "child-category-01");
expect(childCategory).toBeDefined();
expect(childCategory?.parent).toBe("parent-category");
});
it("parses tags", () => {
expect(wxrData.tags.length).toBeGreaterThan(50);
// Check for specific tags
const wpTag = wxrData.tags.find((t) => t.slug === "wordpress");
expect(wpTag).toBeDefined();
expect(wpTag?.name).toBe("WordPress");
});
it("parses authors", () => {
expect(wxrData.authors.length).toBeGreaterThanOrEqual(1);
const author = wxrData.authors.find((a) => a.login === "themereviewteam");
expect(author).toBeDefined();
expect(author?.displayName).toBe("Theme Reviewer");
});
it("parses attachments", () => {
expect(wxrData.attachments.length).toBeGreaterThan(0);
});
it("parses post categories and tags", () => {
// Find a post with both categories and tags
const postsWithTaxonomies = wxrData.posts.filter(
(p) => p.categories.length > 0 || p.tags.length > 0,
);
expect(postsWithTaxonomies.length).toBeGreaterThan(0);
});
});
describe("Gutenberg Block Conversion", () => {
it("converts paragraph blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:paragraph"));
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
expect(result.length).toBeGreaterThan(0);
const block = result.find((b) => b._type === "block");
expect(block).toBeDefined();
});
it("converts heading blocks with different levels", () => {
const post = wxrData.posts.find((p) => p.title === "WP 6.1 Font size scale");
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
// Should have h2 headings
const headings = result.filter(
(b) => b._type === "block" && (b as any).style?.startsWith("h"),
);
expect(headings.length).toBeGreaterThan(0);
});
it("converts list blocks", () => {
// Find a post with list content
const post = wxrData.posts.find((p) => p.content?.includes("wp:list"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const listItems = result.filter((b) => b._type === "block" && (b as any).listItem);
expect(listItems.length).toBeGreaterThan(0);
}
});
it("converts image blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:image"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const images = result.filter((b) => b._type === "image");
expect(images.length).toBeGreaterThan(0);
}
});
it("converts quote blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:quote"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const quotes = result.filter(
(b) => b._type === "block" && (b as any).style === "blockquote",
);
expect(quotes.length).toBeGreaterThan(0);
}
});
it("converts code blocks", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:code"));
if (post) {
const result = gutenbergToPortableText(post.content || "");
const codeBlocks = result.filter((b) => b._type === "code");
expect(codeBlocks.length).toBeGreaterThan(0);
}
});
it("converts group blocks by flattening", () => {
const post = wxrData.posts.find((p) => p.content?.includes("wp:group"));
expect(post).toBeDefined();
const result = gutenbergToPortableText(post!.content || "");
// Groups should be flattened - no group type in output
const groups = result.filter((b) => b._type === "group");
expect(groups.length).toBe(0);
// But their content should still be present
expect(result.length).toBeGreaterThan(0);
});
it("handles classic editor content", () => {
// Find a post in the "Classic" category
const classicPost = wxrData.posts.find((p) => p.categories.includes("classic"));
if (classicPost && classicPost.content) {
// Classic content doesn't have wp: comments
const hasGutenbergBlocks = classicPost.content.includes("<!-- wp:");
if (!hasGutenbergBlocks && classicPost.content.trim()) {
const result = gutenbergToPortableText(classicPost.content);
expect(result.length).toBeGreaterThan(0);
}
}
});
it("preserves inline formatting", () => {
const post = wxrData.posts.find(
(p) => p.content?.includes("<strong>") || p.content?.includes("<em>"),
);
if (post) {
const result = gutenbergToPortableText(post.content || "");
const blocksWithMarks = result.filter(
(b) => b._type === "block" && (b as any).children?.some((c: any) => c.marks?.length > 0),
);
// Should have some formatted text
expect(blocksWithMarks.length).toBeGreaterThanOrEqual(0);
}
});
it("handles empty content gracefully", () => {
const result = gutenbergToPortableText("");
expect(result).toEqual([]);
});
it("handles malformed blocks gracefully", () => {
// Test with incomplete block markers
const malformed = "<!-- wp:paragraph --><p>Test<!-- /wp:paragraph";
const result = gutenbergToPortableText(malformed);
// Should not throw, may produce partial output or fallback
expect(Array.isArray(result)).toBe(true);
});
});
describe("Edge Cases", () => {
it("handles posts with special characters in title", () => {
// Find posts with special characters
const specialPosts = wxrData.posts.filter(
(p) => p.title?.includes("&") || p.title?.includes("<") || p.title?.includes('"'),
);
// Should parse without errors
expect(specialPosts).toBeDefined();
});
it("handles posts with very long content", () => {
// Find the longest post
const longestPost = wxrData.posts.reduce((longest, current) => {
const currentLength = current.content?.length || 0;
const longestLength = longest?.content?.length || 0;
return currentLength > longestLength ? current : longest;
}, wxrData.posts[0]);
if (longestPost?.content) {
const result = gutenbergToPortableText(longestPost.content);
expect(result.length).toBeGreaterThan(0);
}
});
it("handles deeply nested blocks", () => {
// Find posts with nested structures (columns, groups)
const nestedPost = wxrData.posts.find(
(p) => p.content?.includes("wp:columns") || p.content?.includes("wp:group"),
);
if (nestedPost) {
const result = gutenbergToPortableText(nestedPost.content || "");
expect(Array.isArray(result)).toBe(true);
}
});
it("handles posts with embeds", () => {
const embedPost = wxrData.posts.find((p) => p.content?.includes("wp:embed"));
if (embedPost) {
const result = gutenbergToPortableText(embedPost.content || "");
const embeds = result.filter((b) => b._type === "embed");
expect(embeds.length).toBeGreaterThanOrEqual(0);
}
});
});
describe("Content Integrity", () => {
it("preserves all text content through conversion", () => {
// Take a sample of posts and verify text isn't lost
const samplePosts = wxrData.posts.slice(0, 10);
for (const post of samplePosts) {
if (!post.content) continue;
const result = gutenbergToPortableText(post.content);
// Extract all text from result
const extractedText = result
.map((block) => {
if (block._type === "block" && (block as any).children) {
return (block as any).children.map((c: any) => c.text || "").join("");
}
if (block._type === "code") {
return (block as any).code || "";
}
return "";
})
.join(" ")
.trim();
// If there was content, we should have extracted some text
// (unless it was all images/embeds)
if (post.content.includes("<p>") || post.content.includes("wp:paragraph")) {
// Only check if there was actual text content
const hasTextContent = PARAGRAPH_WITH_TEXT_REGEX.test(post.content);
if (hasTextContent) {
expect(extractedText.length).toBeGreaterThan(0);
}
}
}
});
});
describe("Statistics", () => {
it("reports conversion statistics", () => {
let totalPosts = 0;
let successfulConversions = 0;
let failedConversions = 0;
let totalBlocks = 0;
const blockTypes = new Map<string, number>();
for (const post of wxrData.posts) {
totalPosts++;
try {
const result = gutenbergToPortableText(post.content || "");
successfulConversions++;
totalBlocks += result.length;
for (const block of result) {
const type = block._type;
blockTypes.set(type, (blockTypes.get(type) || 0) + 1);
}
} catch {
failedConversions++;
}
}
// Log statistics (visible in test output with --reporter=verbose)
console.log("\n=== WordPress Migration Statistics ===");
console.log(`Total posts: ${totalPosts}`);
console.log(`Successful: ${successfulConversions}`);
console.log(`Failed: ${failedConversions}`);
console.log(`Total blocks generated: ${totalBlocks}`);
console.log("\nBlock types:");
for (const [type, count] of blockTypes.entries()) {
console.log(` ${type}: ${count}`);
}
console.log("=====================================\n");
// All conversions should succeed
expect(failedConversions).toBe(0);
expect(successfulConversions).toBe(totalPosts);
});
});
});