first commit

This commit is contained in:
Matt Kane
2026-04-01 10:44:22 +01:00
commit 43fcb9a131
1789 changed files with 395041 additions and 0 deletions

89
packages/core/NOTES.md Normal file
View File

@@ -0,0 +1,89 @@
# emdash
The core EmDash CMS package - an Astro-native, agent-portable reimplementation of WordPress.
## Installation
```shell
npm install emdash
```
## Features
- **Content Management** - Collections, fields, Live Collections integration
- **Media Library** - Upload via signed URLs, S3-compatible storage
- **Full-Text Search** - FTS5 with Porter stemming, per-collection config
- **Navigation Menus** - Hierarchical menus with URL resolution
- **Taxonomies** - Categories, tags, custom taxonomies
- **Widget Areas** - Content, menu, and component widgets
- **Sections** - Reusable content blocks
- **Plugin System** - Hooks, storage, settings, admin pages
- **WordPress Import** - WXR, REST API, WordPress.com
## Quick Start
```typescript
// astro.config.mjs
import { defineConfig } from "astro/config";
import emdash, { local } from "emdash/astro";
import { sqlite } from "emdash/db";
export default defineConfig({
integrations: [
emdash({
database: sqlite({ url: "file:./data.db" }),
storage: local({
directory: "./uploads",
baseUrl: "/_emdash/api/media/file",
}),
}),
],
});
```
```typescript
// src/live.config.ts
import { defineLiveCollection } from "astro:content";
import { emdashLoader } from "emdash/runtime";
export const collections = {
_emdash: defineLiveCollection({ loader: emdashLoader() }),
};
```
## API
```typescript
import {
getEmDashCollection,
getEmDashEntry,
getSiteSettings,
getMenu,
getTaxonomyTerms,
getWidgetArea,
search,
} from "emdash";
// Content
const { entries } = await getEmDashCollection("posts");
const { entry } = await getEmDashEntry("posts", "hello-world");
// Site settings
const settings = await getSiteSettings();
// Navigation
const menu = await getMenu("primary");
// Taxonomies
const categories = await getTaxonomyTerms("categories");
// Widgets
const sidebar = await getWidgetArea("sidebar");
// Search
const results = await search("hello world", { collections: ["posts"] });
```
## Documentation
See the [documentation site](https://docs.emdashcms.com) for guides, API reference, and plugin development.

46
packages/core/locals.d.ts vendored Normal file
View File

@@ -0,0 +1,46 @@
/**
* EmDash Astro type declarations
*
* Augments App.Locals with EmDash types.
* Referenced via triple-slash directive in the generated emdash-env.d.ts.
*/
import type { User } from "@emdashcms/auth";
import type { EmDashHandlers, EmDashManifest } from "./dist/types.d.mts";
declare global {
namespace App {
interface Locals {
/**
* EmDash API handlers - available on /_emdash/* routes
*/
emdash: EmDashHandlers;
/**
* EmDash manifest - the serialized admin configuration
*/
emdashManifest: EmDashManifest;
/**
* Authenticated user - set by auth middleware when a valid session exists
*/
user?: User;
/**
* Per-session Durable Object database for playground mode.
*
* Set by the playground middleware (@emdashcms/cloudflare). Read by
* the runtime middleware and request-context middleware to set the
* database in ALS for the current request.
*
* This exists because Vite SSR loads two copies of request-context.ts
* (dist for integration middleware, source for the loader). locals
* bridges the DB across that module boundary.
*/
__playgroundDb?: unknown;
}
}
}
export {};

235
packages/core/package.json Normal file
View File

@@ -0,0 +1,235 @@
{
"name": "emdash",
"version": "0.0.0",
"description": "Astro-native CMS with WordPress migration support",
"type": "module",
"main": "dist/index.mjs",
"bin": {
"emdash": "./dist/cli/index.mjs",
"em": "./dist/cli/index.mjs"
},
"files": [
"dist",
"src",
"locals.d.ts"
],
"exports": {
".": {
"types": "./dist/index.d.mts",
"default": "./dist/index.mjs"
},
"./astro": {
"types": "./dist/astro/index.d.mts",
"default": "./dist/astro/index.mjs"
},
"./middleware": {
"types": "./dist/astro/middleware.d.mts",
"default": "./dist/astro/middleware.mjs"
},
"./middleware/setup": {
"types": "./dist/astro/middleware/setup.d.mts",
"default": "./dist/astro/middleware/setup.mjs"
},
"./middleware/auth": {
"types": "./dist/astro/middleware/auth.d.mts",
"default": "./dist/astro/middleware/auth.mjs"
},
"./middleware/redirect": {
"types": "./dist/astro/middleware/redirect.d.mts",
"default": "./dist/astro/middleware/redirect.mjs"
},
"./ui": "./src/ui.ts",
"./ui/search": "./src/components/LiveSearch.astro",
"./cli": {
"types": "./dist/cli/index.d.mts",
"default": "./dist/cli/index.mjs"
},
"./routes/*": "./src/astro/routes/*",
"./db": {
"types": "./dist/db/index.d.mts",
"default": "./dist/db/index.mjs"
},
"./db/sqlite": {
"types": "./dist/db/sqlite.d.mts",
"default": "./dist/db/sqlite.mjs"
},
"./db/libsql": {
"types": "./dist/db/libsql.d.mts",
"default": "./dist/db/libsql.mjs"
},
"./db/postgres": {
"types": "./dist/db/postgres.d.mts",
"default": "./dist/db/postgres.mjs"
},
"./storage/local": {
"types": "./dist/storage/local.d.mts",
"default": "./dist/storage/local.mjs"
},
"./storage/s3": {
"types": "./dist/storage/s3.d.mts",
"default": "./dist/storage/s3.mjs"
},
"./media": {
"types": "./dist/media/index.d.mts",
"default": "./dist/media/index.mjs"
},
"./media/local-runtime": {
"types": "./dist/media/local-runtime.d.mts",
"default": "./dist/media/local-runtime.mjs"
},
"./runtime": {
"types": "./dist/runtime.d.mts",
"default": "./dist/runtime.mjs"
},
"./request-context": {
"types": "./dist/request-context.d.mts",
"default": "./dist/request-context.mjs"
},
"./seed": {
"types": "./dist/seed/index.d.mts",
"default": "./dist/seed/index.mjs"
},
"./middleware/request-context": {
"types": "./dist/astro/middleware/request-context.d.mts",
"default": "./dist/astro/middleware/request-context.mjs"
},
"./locals": {
"types": "./locals.d.ts"
},
"./client": {
"types": "./dist/client/index.d.mts",
"default": "./dist/client/index.mjs"
},
"./client/cf-access": {
"types": "./dist/client/cf-access.d.mts",
"default": "./dist/client/cf-access.mjs"
},
"./seo": {
"types": "./dist/seo/index.d.mts",
"default": "./dist/seo/index.mjs"
},
"./page": {
"types": "./dist/page/index.d.mts",
"default": "./dist/page/index.mjs"
},
"./plugin-utils": {
"types": "./dist/plugin-utils.d.mts",
"default": "./dist/plugin-utils.mjs"
},
"./plugins/adapt-sandbox-entry": {
"types": "./dist/plugins/adapt-sandbox-entry.d.mts",
"default": "./dist/plugins/adapt-sandbox-entry.mjs"
}
},
"imports": {
"#api/schemas.js": "./src/api/schemas/index.js",
"#api/*": "./src/api/*",
"#db/*": "./src/database/*",
"#auth/*": "./src/auth/*",
"#schema/*": "./src/schema/*",
"#search/*": "./src/search/*",
"#sections/*": "./src/sections/*",
"#menus/*": "./src/menus/*",
"#widgets/*": "./src/widgets/*",
"#import/*": "./src/import/*",
"#utils/*": "./src/utils/*",
"#preview/*": "./src/preview/*",
"#seed/*": "./src/seed/*",
"#settings/*": "./src/settings/*",
"#seo/*": "./src/seo/*",
"#plugins/*": "./src/plugins/*",
"#media/*": "./src/media/*",
"#mcp/*": "./src/mcp/*",
"#comments/*": "./src/comments/*",
"#types": "./src/astro/types.js"
},
"scripts": {
"build": "tsdown",
"dev": "tsdown --watch",
"prepublishOnly": "node --run build",
"typecheck": "tsgo --noEmit",
"check": "publint && attw --pack --ignore-rules=cjs-resolves-to-esm --ignore-rules=no-resolution --ignore-rules=internal-resolution-error",
"test": "vitest"
},
"dependencies": {
"@emdashcms/admin": "workspace:*",
"@emdashcms/auth": "workspace:*",
"@emdashcms/gutenberg-to-portable-text": "workspace:*",
"@floating-ui/react": "^0.27.16",
"@modelcontextprotocol/sdk": "^1.26.0",
"@portabletext/toolkit": "^5.0.1",
"@tiptap/core": "catalog:",
"@tiptap/extension-focus": "catalog:",
"@tiptap/extension-image": "catalog:",
"@tiptap/extension-link": "catalog:",
"@tiptap/extension-placeholder": "catalog:",
"@tiptap/extension-text-align": "catalog:",
"@tiptap/extension-typography": "catalog:",
"@tiptap/extension-underline": "catalog:",
"@tiptap/react": "catalog:",
"@tiptap/starter-kit": "catalog:",
"@tiptap/suggestion": "catalog:",
"@unpic/placeholder": "^0.1.2",
"arctic": "^3.7.0",
"astro-portabletext": "^0.11.0",
"better-sqlite3": "catalog:",
"blurhash": "^2.0.5",
"citty": "^0.1.6",
"consola": "^3.4.2",
"croner": "^10.0.1",
"image-size": "^2.0.2",
"jose": "^6.1.3",
"jpeg-js": "^0.4.4",
"kysely": "^0.27.0",
"mime": "^4.1.0",
"modern-tar": "^0.7.5",
"picocolors": "^1.1.1",
"sanitize-html": "^2.17.1",
"sax": "^1.4.1",
"ulidx": "^2.4.1",
"upng-js": "^2.1.0",
"zod": "^4.3.5"
},
"optionalDependencies": {
"@libsql/kysely-libsql": "^0.4.0",
"pg": "^8.0.0"
},
"peerDependencies": {
"@astrojs/react": ">=5.0.0-beta.0",
"@tanstack/react-query": ">=5.0.0",
"@tanstack/react-router": ">=1.100.0",
"astro": ">=6.0.0-beta.0",
"react": ">=18.0.0",
"react-dom": ">=18.0.0"
},
"devDependencies": {
"@apidevtools/swagger-parser": "^12.1.0",
"@arethetypeswrong/cli": "catalog:",
"@emdashcms/blocks": "workspace:*",
"@types/better-sqlite3": "^7.6.12",
"@types/pg": "^8.16.0",
"@types/sanitize-html": "^2.16.0",
"@types/sax": "^1.2.7",
"@vitest/ui": "^4.0.17",
"publint": "catalog:",
"tsdown": "catalog:",
"typescript": "catalog:",
"vite": "^6.0.0",
"vitest": "catalog:",
"zod-openapi": "^5.4.6"
},
"repository": {
"type": "git",
"url": "git+https://github.com/cloudflare/emdash.git",
"directory": "packages/core"
},
"homepage": "https://github.com/cloudflare/emdash",
"keywords": [
"astro",
"cms",
"content",
"wordpress"
],
"author": "Matt Kane",
"license": "MIT"
}

View File

@@ -0,0 +1,63 @@
/**
* Authorization helpers for API routes
*
* Thin wrappers around @emdashcms/auth RBAC that return HTTP responses.
* Auth middleware handles authentication; these handle authorization.
*/
import type { Permission, RoleLevel } from "@emdashcms/auth";
import { hasPermission, canActOnOwn } from "@emdashcms/auth";
import { apiError } from "./error.js";
interface UserLike {
id: string;
role: RoleLevel;
}
/**
* Check if user has a permission. Returns a 401/403 Response if not, or null if authorized.
*
* Usage:
* ```ts
* const denied = requirePerm(user, "schema:manage");
* if (denied) return denied;
* ```
*/
export function requirePerm(
user: UserLike | null | undefined,
permission: Permission,
): Response | null {
if (!user) {
return apiError("UNAUTHORIZED", "Authentication required", 401);
}
if (!hasPermission(user, permission)) {
return apiError("FORBIDDEN", "Insufficient permissions", 403);
}
return null;
}
/**
* Check if user can act on a resource, considering ownership.
* Returns a 401/403 Response if not, or null if authorized.
*
* Usage:
* ```ts
* const denied = requireOwnerPerm(user, item.authorId, "content:edit_own", "content:edit_any");
* if (denied) return denied;
* ```
*/
export function requireOwnerPerm(
user: UserLike | null | undefined,
ownerId: string,
ownPermission: Permission,
anyPermission: Permission,
): Response | null {
if (!user) {
return apiError("UNAUTHORIZED", "Authentication required", 401);
}
if (!canActOnOwn(user, ownerId, ownPermission, anyPermission)) {
return apiError("FORBIDDEN", "Insufficient permissions", 403);
}
return null;
}

View File

@@ -0,0 +1,48 @@
/**
* CSRF protection utilities.
*
* Two mechanisms:
* 1. Custom header check (X-EmDash-Request: 1) — used for authenticated API routes.
* Browsers block cross-origin custom headers, so presence proves same-origin.
* 2. Origin check — used for public API routes that skip auth. Compares the Origin
* header against the request origin. Same approach as Astro's `checkOrigin`.
*/
import { apiError } from "./error.js";
/**
* Origin-based CSRF check for public API routes that skip auth.
*
* State-changing requests (POST/PUT/DELETE) to public endpoints must either:
* 1. Include the X-EmDash-Request: 1 header (custom header blocked cross-origin), OR
* 2. Have an Origin header matching the request origin
*
* This prevents cross-origin form submissions (which can't set custom headers)
* and cross-origin fetch (blocked by CORS unless allowed). Same-origin requests
* always include a matching Origin header.
*
* Returns a 403 Response if the check fails, or null if allowed.
*/
export function checkPublicCsrf(request: Request, url: URL): Response | null {
// Custom header present — browser blocks cross-origin custom headers
const csrfHeader = request.headers.get("X-EmDash-Request");
if (csrfHeader === "1") return null;
// Check Origin header — present on all POST/PUT/DELETE from browsers
const origin = request.headers.get("Origin");
if (origin) {
try {
const originUrl = new URL(origin);
if (originUrl.origin === url.origin) return null;
} catch {
// Malformed Origin — fall through to reject
}
return apiError("CSRF_REJECTED", "Cross-origin request blocked", 403);
}
// No Origin header — non-browser client (curl, server-to-server).
// Allow these through since CSRF is a browser-specific attack vector.
// Server-to-server requests don't carry ambient credentials (cookies).
return null;
}

View File

@@ -0,0 +1,99 @@
/**
* Standardized API error responses.
*
* All API routes should use these utilities instead of inline
* `new Response(JSON.stringify({ error: ... }), ...)` patterns.
*/
import { mapErrorStatus } from "./errors.js";
import type { ApiResult } from "./types.js";
// Re-export everything from errors.ts so existing `import { mapErrorStatus } from "./error.js"` still works
export * from "./errors.js";
/**
* Standard cache headers for all API responses.
*
* Cache-Control: private, no-store -- prevents CDN/proxy caching of authenticated data.
* no-store already tells caches not to store the response, so Vary is unnecessary.
*/
const API_CACHE_HEADERS: HeadersInit = {
"Cache-Control": "private, no-store",
};
/**
* Create a standardized error response.
*
* Always returns `{ error: { code, message } }` with correct Content-Type.
* Use this for all error responses in API routes.
*/
export function apiError(code: string, message: string, status: number): Response {
return Response.json({ error: { code, message } }, { status, headers: API_CACHE_HEADERS });
}
/**
* Create a standardized success response.
*
* Always returns `{ data: T }` with correct status code.
* Use this for all success responses in API routes.
*/
export function apiSuccess<T>(data: T, status = 200): Response {
return Response.json({ data }, { status, headers: API_CACHE_HEADERS });
}
/**
* Handle an unknown error in a catch block.
*
* - Logs the full error server-side
* - Returns a generic message to the client (never leaks error.message)
* - Use `fallbackMessage` for the public-facing message
* - Use `fallbackCode` for the error code
*/
export function handleError(
error: unknown,
fallbackMessage: string,
fallbackCode: string,
): Response {
console.error(`[${fallbackCode}]`, error);
return apiError(fallbackCode, fallbackMessage, 500);
}
/**
* Standard initialization check.
*
* Returns an error response if EmDash is not initialized, or null if OK.
* Usage: `const err = requireInit(emdash); if (err) return err;`
*/
export function requireInit(emdash: unknown): Response | null {
if (!emdash || typeof emdash !== "object") {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
return null;
}
/**
* Standard database check.
*
* Returns an error response if the database is not available, or null if OK.
* Usage: `const err = requireDb(emdash?.db); if (err) return err;`
*/
export function requireDb(db: unknown): Response | null {
if (!db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
return null;
}
/**
* Convert an ApiResult into an HTTP Response.
*
* Collapses the handler-to-response boilerplate:
* - Success: returns `apiSuccess(result.data, successStatus)`
* - Error: returns `apiError(code, message, mapErrorStatus(code))`
*/
export function unwrapResult<T>(result: ApiResult<T>, successStatus = 200): Response {
if (!result.success) {
return apiError(result.error.code, result.error.message, mapErrorStatus(result.error.code));
}
return apiSuccess(result.data, successStatus);
}

View File

@@ -0,0 +1,445 @@
/**
* Typed error codes and status mapping for the EmDash REST API.
*
* All handler-level and route-level error codes are defined here.
* Routes and handlers should import error codes from this module
* instead of using ad-hoc strings.
*/
export const ErrorCode = {
// Shared (used across domains)
NOT_FOUND: "NOT_FOUND",
VALIDATION_ERROR: "VALIDATION_ERROR",
INVALID_INPUT: "INVALID_INPUT",
INVALID_JSON: "INVALID_JSON",
CONFLICT: "CONFLICT",
NOT_CONFIGURED: "NOT_CONFIGURED",
UNAUTHORIZED: "UNAUTHORIZED",
FORBIDDEN: "FORBIDDEN",
RATE_LIMITED: "RATE_LIMITED",
NOT_AUTHENTICATED: "NOT_AUTHENTICATED",
NOT_IMPLEMENTED: "NOT_IMPLEMENTED",
NOT_SUPPORTED: "NOT_SUPPORTED",
MISSING_PARAM: "MISSING_PARAM",
CSRF_REJECTED: "CSRF_REJECTED",
// Content
CONTENT_CREATE_ERROR: "CONTENT_CREATE_ERROR",
CONTENT_UPDATE_ERROR: "CONTENT_UPDATE_ERROR",
CONTENT_DELETE_ERROR: "CONTENT_DELETE_ERROR",
CONTENT_LIST_ERROR: "CONTENT_LIST_ERROR",
CONTENT_GET_ERROR: "CONTENT_GET_ERROR",
CONTENT_DUPLICATE_ERROR: "CONTENT_DUPLICATE_ERROR",
CONTENT_RESTORE_ERROR: "CONTENT_RESTORE_ERROR",
CONTENT_PUBLISH_ERROR: "CONTENT_PUBLISH_ERROR",
CONTENT_UNPUBLISH_ERROR: "CONTENT_UNPUBLISH_ERROR",
CONTENT_SCHEDULE_ERROR: "CONTENT_SCHEDULE_ERROR",
CONTENT_UNSCHEDULE_ERROR: "CONTENT_UNSCHEDULE_ERROR",
CONTENT_DISCARD_DRAFT_ERROR: "CONTENT_DISCARD_DRAFT_ERROR",
CONTENT_COMPARE_ERROR: "CONTENT_COMPARE_ERROR",
CONTENT_TRANSLATIONS_ERROR: "CONTENT_TRANSLATIONS_ERROR",
CONTENT_COUNT_ERROR: "CONTENT_COUNT_ERROR",
// Revisions
REVISION_LIST_ERROR: "REVISION_LIST_ERROR",
REVISION_GET_ERROR: "REVISION_GET_ERROR",
REVISION_RESTORE_ERROR: "REVISION_RESTORE_ERROR",
INVALID_REVISION: "INVALID_REVISION",
// Schema
SCHEMA_LIST_ERROR: "SCHEMA_LIST_ERROR",
SCHEMA_GET_ERROR: "SCHEMA_GET_ERROR",
SCHEMA_CREATE_ERROR: "SCHEMA_CREATE_ERROR",
SCHEMA_UPDATE_ERROR: "SCHEMA_UPDATE_ERROR",
SCHEMA_DELETE_ERROR: "SCHEMA_DELETE_ERROR",
SCHEMA_EXPORT_ERROR: "SCHEMA_EXPORT_ERROR",
SCHEMA_FIELD_LIST_ERROR: "SCHEMA_FIELD_LIST_ERROR",
SCHEMA_FIELD_GET_ERROR: "SCHEMA_FIELD_GET_ERROR",
SCHEMA_FIELD_CREATE_ERROR: "SCHEMA_FIELD_CREATE_ERROR",
SCHEMA_FIELD_UPDATE_ERROR: "SCHEMA_FIELD_UPDATE_ERROR",
SCHEMA_FIELD_DELETE_ERROR: "SCHEMA_FIELD_DELETE_ERROR",
SCHEMA_FIELD_REORDER_ERROR: "SCHEMA_FIELD_REORDER_ERROR",
ORPHAN_LIST_ERROR: "ORPHAN_LIST_ERROR",
ORPHAN_REGISTER_ERROR: "ORPHAN_REGISTER_ERROR",
COLLECTION_EXISTS: "COLLECTION_EXISTS",
COLLECTION_NOT_FOUND: "COLLECTION_NOT_FOUND",
TABLE_NOT_FOUND: "TABLE_NOT_FOUND",
FIELD_EXISTS: "FIELD_EXISTS",
RESERVED_SLUG: "RESERVED_SLUG",
INVALID_SLUG: "INVALID_SLUG",
CREATE_FAILED: "CREATE_FAILED",
UPDATE_FAILED: "UPDATE_FAILED",
REGISTER_FAILED: "REGISTER_FAILED",
// Media
MEDIA_LIST_ERROR: "MEDIA_LIST_ERROR",
MEDIA_GET_ERROR: "MEDIA_GET_ERROR",
MEDIA_CREATE_ERROR: "MEDIA_CREATE_ERROR",
MEDIA_UPDATE_ERROR: "MEDIA_UPDATE_ERROR",
MEDIA_DELETE_ERROR: "MEDIA_DELETE_ERROR",
NO_STORAGE: "NO_STORAGE",
NO_FILE: "NO_FILE",
INVALID_TYPE: "INVALID_TYPE",
UPLOAD_ERROR: "UPLOAD_ERROR",
UPLOAD_URL_ERROR: "UPLOAD_URL_ERROR",
CONFIRM_ERROR: "CONFIRM_ERROR",
CONFIRM_FAILED: "CONFIRM_FAILED",
FILE_NOT_FOUND: "FILE_NOT_FOUND",
INVALID_STATE: "INVALID_STATE",
FILE_SERVE_ERROR: "FILE_SERVE_ERROR",
STORAGE_NOT_CONFIGURED: "STORAGE_NOT_CONFIGURED",
PROVIDER_LIST_ERROR: "PROVIDER_LIST_ERROR",
PROVIDER_UPLOAD_ERROR: "PROVIDER_UPLOAD_ERROR",
PROVIDER_GET_ERROR: "PROVIDER_GET_ERROR",
PROVIDER_DELETE_ERROR: "PROVIDER_DELETE_ERROR",
// Comments
COMMENT_LIST_ERROR: "COMMENT_LIST_ERROR",
COMMENT_GET_ERROR: "COMMENT_GET_ERROR",
COMMENT_STATUS_ERROR: "COMMENT_STATUS_ERROR",
COMMENT_DELETE_ERROR: "COMMENT_DELETE_ERROR",
COMMENT_BULK_ERROR: "COMMENT_BULK_ERROR",
COMMENT_INBOX_ERROR: "COMMENT_INBOX_ERROR",
COMMENT_COUNTS_ERROR: "COMMENT_COUNTS_ERROR",
COMMENT_CREATE_ERROR: "COMMENT_CREATE_ERROR",
COMMENTS_DISABLED: "COMMENTS_DISABLED",
COMMENTS_CLOSED: "COMMENTS_CLOSED",
COMMENT_REJECTED: "COMMENT_REJECTED",
// Auth
ACCOUNT_DISABLED: "ACCOUNT_DISABLED",
ADMIN_EXISTS: "ADMIN_EXISTS",
SETUP_COMPLETE: "SETUP_COMPLETE",
CREDENTIAL_EXISTS: "CREDENTIAL_EXISTS",
CHALLENGE_EXPIRED: "CHALLENGE_EXPIRED",
PASSKEY_REGISTER_ERROR: "PASSKEY_REGISTER_ERROR",
PASSKEY_REGISTER_OPTIONS_ERROR: "PASSKEY_REGISTER_OPTIONS_ERROR",
PASSKEY_OPTIONS_ERROR: "PASSKEY_OPTIONS_ERROR",
PASSKEY_VERIFY_ERROR: "PASSKEY_VERIFY_ERROR",
PASSKEY_LIST_ERROR: "PASSKEY_LIST_ERROR",
PASSKEY_RENAME_ERROR: "PASSKEY_RENAME_ERROR",
PASSKEY_DELETE_ERROR: "PASSKEY_DELETE_ERROR",
PASSKEY_LIMIT: "PASSKEY_LIMIT",
LAST_PASSKEY: "LAST_PASSKEY",
LOGOUT_ERROR: "LOGOUT_ERROR",
SELF_ROLE_CHANGE: "SELF_ROLE_CHANGE",
EMAIL_IN_USE: "EMAIL_IN_USE",
EMAIL_NOT_CONFIGURED: "EMAIL_NOT_CONFIGURED",
USER_EXISTS: "USER_EXISTS",
INVALID_TOKEN: "INVALID_TOKEN",
TOKEN_EXPIRED: "TOKEN_EXPIRED",
DOMAIN_NOT_ALLOWED: "DOMAIN_NOT_ALLOWED",
INVITE_CREATE_ERROR: "INVITE_CREATE_ERROR",
INVITE_VALIDATE_ERROR: "INVITE_VALIDATE_ERROR",
INVITE_COMPLETE_ERROR: "INVITE_COMPLETE_ERROR",
SIGNUP_VERIFY_ERROR: "SIGNUP_VERIFY_ERROR",
SIGNUP_COMPLETE_ERROR: "SIGNUP_COMPLETE_ERROR",
RECOVERY_SEND_ERROR: "RECOVERY_SEND_ERROR",
USER_LIST_ERROR: "USER_LIST_ERROR",
USER_DETAIL_ERROR: "USER_DETAIL_ERROR",
USER_UPDATE_ERROR: "USER_UPDATE_ERROR",
USER_DISABLE_ERROR: "USER_DISABLE_ERROR",
USER_ENABLE_ERROR: "USER_ENABLE_ERROR",
// OAuth (internal codes -- distinct from RFC OAuthErrorCode)
UNSUPPORTED_RESPONSE_TYPE: "UNSUPPORTED_RESPONSE_TYPE",
INVALID_REDIRECT_URI: "INVALID_REDIRECT_URI",
INVALID_CLIENT: "INVALID_CLIENT",
INVALID_SCOPE: "INVALID_SCOPE",
AUTHORIZATION_ERROR: "AUTHORIZATION_ERROR",
INVALID_GRANT: "INVALID_GRANT",
UNSUPPORTED_GRANT_TYPE: "UNSUPPORTED_GRANT_TYPE",
INVALID_CODE: "INVALID_CODE",
EXPIRED_CODE: "EXPIRED_CODE",
INSUFFICIENT_ROLE: "INSUFFICIENT_ROLE",
TOKEN_EXCHANGE_ERROR: "TOKEN_EXCHANGE_ERROR",
TOKEN_REFRESH_ERROR: "TOKEN_REFRESH_ERROR",
TOKEN_REVOKE_ERROR: "TOKEN_REVOKE_ERROR",
TOKEN_CREATE_ERROR: "TOKEN_CREATE_ERROR",
TOKEN_LIST_ERROR: "TOKEN_LIST_ERROR",
TOKEN_ERROR: "TOKEN_ERROR",
DEVICE_CODE_ERROR: "DEVICE_CODE_ERROR",
AUTHORIZE_ERROR: "AUTHORIZE_ERROR",
CLIENT_LIST_ERROR: "CLIENT_LIST_ERROR",
CLIENT_GET_ERROR: "CLIENT_GET_ERROR",
CLIENT_CREATE_ERROR: "CLIENT_CREATE_ERROR",
CLIENT_UPDATE_ERROR: "CLIENT_UPDATE_ERROR",
CLIENT_DELETE_ERROR: "CLIENT_DELETE_ERROR",
// Allowed domains
DOMAIN_LIST_ERROR: "DOMAIN_LIST_ERROR",
DOMAIN_CREATE_ERROR: "DOMAIN_CREATE_ERROR",
DOMAIN_UPDATE_ERROR: "DOMAIN_UPDATE_ERROR",
DOMAIN_DELETE_ERROR: "DOMAIN_DELETE_ERROR",
// Plugins / Marketplace
PLUGIN_LIST_ERROR: "PLUGIN_LIST_ERROR",
PLUGIN_GET_ERROR: "PLUGIN_GET_ERROR",
PLUGIN_ENABLE_ERROR: "PLUGIN_ENABLE_ERROR",
PLUGIN_DISABLE_ERROR: "PLUGIN_DISABLE_ERROR",
PLUGIN_ID_CONFLICT: "PLUGIN_ID_CONFLICT",
MARKETPLACE_NOT_CONFIGURED: "MARKETPLACE_NOT_CONFIGURED",
MARKETPLACE_UNAVAILABLE: "MARKETPLACE_UNAVAILABLE",
MARKETPLACE_ERROR: "MARKETPLACE_ERROR",
SANDBOX_NOT_AVAILABLE: "SANDBOX_NOT_AVAILABLE",
ALREADY_INSTALLED: "ALREADY_INSTALLED",
ALREADY_UP_TO_DATE: "ALREADY_UP_TO_DATE",
NO_VERSION: "NO_VERSION",
MANIFEST_MISMATCH: "MANIFEST_MISMATCH",
MANIFEST_VERSION_MISMATCH: "MANIFEST_VERSION_MISMATCH",
AUDIT_FAILED: "AUDIT_FAILED",
CHECKSUM_MISMATCH: "CHECKSUM_MISMATCH",
INVALID_BUNDLE: "INVALID_BUNDLE",
BUNDLE_EXTRACT_FAILED: "BUNDLE_EXTRACT_FAILED",
BUNDLE_DOWNLOAD_FAILED: "BUNDLE_DOWNLOAD_FAILED",
CAPABILITY_ESCALATION: "CAPABILITY_ESCALATION",
ROUTE_VISIBILITY_ESCALATION: "ROUTE_VISIBILITY_ESCALATION",
INSTALL_FAILED: "INSTALL_FAILED",
UNINSTALL_FAILED: "UNINSTALL_FAILED",
SEARCH_FAILED: "SEARCH_FAILED",
GET_PLUGIN_FAILED: "GET_PLUGIN_FAILED",
GET_THEME_FAILED: "GET_THEME_FAILED",
THEME_SEARCH_FAILED: "THEME_SEARCH_FAILED",
UPDATE_CHECK_FAILED: "UPDATE_CHECK_FAILED",
EXCLUSIVE_HOOKS_LIST_ERROR: "EXCLUSIVE_HOOKS_LIST_ERROR",
EXCLUSIVE_HOOK_SET_ERROR: "EXCLUSIVE_HOOK_SET_ERROR",
// Menus
MENU_LIST_ERROR: "MENU_LIST_ERROR",
MENU_CREATE_ERROR: "MENU_CREATE_ERROR",
MENU_GET_ERROR: "MENU_GET_ERROR",
MENU_UPDATE_ERROR: "MENU_UPDATE_ERROR",
MENU_DELETE_ERROR: "MENU_DELETE_ERROR",
MENU_ITEM_CREATE_ERROR: "MENU_ITEM_CREATE_ERROR",
MENU_ITEM_UPDATE_ERROR: "MENU_ITEM_UPDATE_ERROR",
MENU_ITEM_DELETE_ERROR: "MENU_ITEM_DELETE_ERROR",
MENU_REORDER_ERROR: "MENU_REORDER_ERROR",
// Taxonomies
TAXONOMY_LIST_ERROR: "TAXONOMY_LIST_ERROR",
TAXONOMY_CREATE_ERROR: "TAXONOMY_CREATE_ERROR",
TERM_LIST_ERROR: "TERM_LIST_ERROR",
TERM_CREATE_ERROR: "TERM_CREATE_ERROR",
TERM_GET_ERROR: "TERM_GET_ERROR",
TERM_UPDATE_ERROR: "TERM_UPDATE_ERROR",
TERM_DELETE_ERROR: "TERM_DELETE_ERROR",
TERMS_GET_ERROR: "TERMS_GET_ERROR",
TERMS_SET_ERROR: "TERMS_SET_ERROR",
// Sections
SECTION_LIST_ERROR: "SECTION_LIST_ERROR",
SECTION_CREATE_ERROR: "SECTION_CREATE_ERROR",
SECTION_GET_ERROR: "SECTION_GET_ERROR",
SECTION_UPDATE_ERROR: "SECTION_UPDATE_ERROR",
SECTION_DELETE_ERROR: "SECTION_DELETE_ERROR",
// Redirects
REDIRECT_LIST_ERROR: "REDIRECT_LIST_ERROR",
REDIRECT_CREATE_ERROR: "REDIRECT_CREATE_ERROR",
REDIRECT_GET_ERROR: "REDIRECT_GET_ERROR",
REDIRECT_UPDATE_ERROR: "REDIRECT_UPDATE_ERROR",
REDIRECT_DELETE_ERROR: "REDIRECT_DELETE_ERROR",
NOT_FOUND_LIST_ERROR: "NOT_FOUND_LIST_ERROR",
NOT_FOUND_SUMMARY_ERROR: "NOT_FOUND_SUMMARY_ERROR",
NOT_FOUND_CLEAR_ERROR: "NOT_FOUND_CLEAR_ERROR",
NOT_FOUND_PRUNE_ERROR: "NOT_FOUND_PRUNE_ERROR",
// Widgets
WIDGET_AREA_LIST_ERROR: "WIDGET_AREA_LIST_ERROR",
WIDGET_AREA_CREATE_ERROR: "WIDGET_AREA_CREATE_ERROR",
WIDGET_AREA_GET_ERROR: "WIDGET_AREA_GET_ERROR",
WIDGET_AREA_DELETE_ERROR: "WIDGET_AREA_DELETE_ERROR",
WIDGET_CREATE_ERROR: "WIDGET_CREATE_ERROR",
WIDGET_UPDATE_ERROR: "WIDGET_UPDATE_ERROR",
WIDGET_DELETE_ERROR: "WIDGET_DELETE_ERROR",
WIDGET_REORDER_ERROR: "WIDGET_REORDER_ERROR",
WIDGET_COMPONENTS_ERROR: "WIDGET_COMPONENTS_ERROR",
// Setup
ALREADY_CONFIGURED: "ALREADY_CONFIGURED",
INVALID_SEED: "INVALID_SEED",
INVALID_REDIRECT: "INVALID_REDIRECT",
SETUP_ERROR: "SETUP_ERROR",
SETUP_STATUS_ERROR: "SETUP_STATUS_ERROR",
SETUP_ADMIN_ERROR: "SETUP_ADMIN_ERROR",
SETUP_VERIFY_ERROR: "SETUP_VERIFY_ERROR",
DEV_BYPASS_ERROR: "DEV_BYPASS_ERROR",
DEV_RESET_ERROR: "DEV_RESET_ERROR",
MIGRATION_ERROR: "MIGRATION_ERROR",
SEED_ERROR: "SEED_ERROR",
// Settings
SETTINGS_READ_ERROR: "SETTINGS_READ_ERROR",
SETTINGS_UPDATE_ERROR: "SETTINGS_UPDATE_ERROR",
EMAIL_SETTINGS_READ_ERROR: "EMAIL_SETTINGS_READ_ERROR",
EMAIL_TEST_ERROR: "EMAIL_TEST_ERROR",
// Search
SEARCH_ERROR: "SEARCH_ERROR",
STATS_ERROR: "STATS_ERROR",
SUGGESTION_ERROR: "SUGGESTION_ERROR",
REBUILD_ERROR: "REBUILD_ERROR",
// Import
WXR_ANALYZE_ERROR: "WXR_ANALYZE_ERROR",
WXR_PREPARE_ERROR: "WXR_PREPARE_ERROR",
WXR_IMPORT_ERROR: "WXR_IMPORT_ERROR",
IMPORT_ERROR: "IMPORT_ERROR",
REWRITE_ERROR: "REWRITE_ERROR",
WP_PLUGIN_ANALYZE_ERROR: "WP_PLUGIN_ANALYZE_ERROR",
WP_PLUGIN_IMPORT_ERROR: "WP_PLUGIN_IMPORT_ERROR",
SSRF_BLOCKED: "SSRF_BLOCKED",
PROBE_ERROR: "PROBE_ERROR",
// Dashboard
DASHBOARD_ERROR: "DASHBOARD_ERROR",
DASHBOARD_STATS_ERROR: "DASHBOARD_STATS_ERROR",
// Misc
SNAPSHOT_ERROR: "SNAPSHOT_ERROR",
TYPEGEN_ERROR: "TYPEGEN_ERROR",
SITEMAP_ERROR: "SITEMAP_ERROR",
NO_DB: "NO_DB",
INVALID_REQUEST: "INVALID_REQUEST",
UNKNOWN_ACTION: "UNKNOWN_ACTION",
} as const;
export type ErrorCode = (typeof ErrorCode)[keyof typeof ErrorCode];
/**
* OAuth RFC 6749 error codes.
*
* These MUST be lowercase per the RFC spec. Used only by OAuth token endpoints.
* Separate from ErrorCode to prevent mixing conventions.
*/
export const OAuthErrorCode = {
INVALID_GRANT: "invalid_grant",
UNSUPPORTED_GRANT_TYPE: "unsupported_grant_type",
EXPIRED_TOKEN: "expired_token",
ACCESS_DENIED: "access_denied",
AUTHORIZATION_PENDING: "authorization_pending",
} as const;
export type OAuthErrorCode = (typeof OAuthErrorCode)[keyof typeof OAuthErrorCode];
/**
* Map a handler error code to an HTTP status code.
*
* Shared codes have explicit mappings. Domain-specific `*_ERROR` codes
* (used in catch blocks via handleError) default to 500. Everything else
* defaults to 400 (client error).
*/
export function mapErrorStatus(code: string | undefined): number {
switch (code) {
// 400 Bad Request
case ErrorCode.VALIDATION_ERROR:
case ErrorCode.INVALID_INPUT:
case ErrorCode.INVALID_JSON:
case ErrorCode.MISSING_PARAM:
case ErrorCode.INVALID_REQUEST:
case ErrorCode.NOT_SUPPORTED:
case ErrorCode.INVALID_SLUG:
case ErrorCode.RESERVED_SLUG:
case ErrorCode.INVALID_TYPE:
case ErrorCode.NO_FILE:
case ErrorCode.INVALID_STATE:
case ErrorCode.INVALID_SEED:
case ErrorCode.INVALID_REDIRECT:
case ErrorCode.INVALID_TOKEN:
case ErrorCode.INVALID_REVISION:
case ErrorCode.INVALID_CODE:
case ErrorCode.CHALLENGE_EXPIRED:
case ErrorCode.EXPIRED_CODE:
case ErrorCode.LAST_PASSKEY:
case ErrorCode.PASSKEY_LIMIT:
case ErrorCode.ADMIN_EXISTS:
case ErrorCode.SETUP_COMPLETE:
case ErrorCode.SELF_ROLE_CHANGE:
case ErrorCode.SSRF_BLOCKED:
case ErrorCode.UNKNOWN_ACTION:
return 400;
// 401 Unauthorized
case ErrorCode.UNAUTHORIZED:
case ErrorCode.NOT_AUTHENTICATED:
return 401;
// 403 Forbidden
case ErrorCode.FORBIDDEN:
case ErrorCode.CSRF_REJECTED:
case ErrorCode.ACCOUNT_DISABLED:
case ErrorCode.COMMENTS_DISABLED:
case ErrorCode.COMMENTS_CLOSED:
case ErrorCode.COMMENT_REJECTED:
case ErrorCode.DOMAIN_NOT_ALLOWED:
case ErrorCode.INSUFFICIENT_ROLE:
case ErrorCode.CAPABILITY_ESCALATION:
case ErrorCode.ROUTE_VISIBILITY_ESCALATION:
case ErrorCode.AUDIT_FAILED:
return 403;
// 404 Not Found
case ErrorCode.NOT_FOUND:
case ErrorCode.TABLE_NOT_FOUND:
case ErrorCode.COLLECTION_NOT_FOUND:
case ErrorCode.FILE_NOT_FOUND:
case ErrorCode.NO_VERSION:
return 404;
// 409 Conflict
case ErrorCode.CONFLICT:
case ErrorCode.COLLECTION_EXISTS:
case ErrorCode.FIELD_EXISTS:
case ErrorCode.CREDENTIAL_EXISTS:
case ErrorCode.EMAIL_IN_USE:
case ErrorCode.USER_EXISTS:
case ErrorCode.PLUGIN_ID_CONFLICT:
case ErrorCode.ALREADY_INSTALLED:
case ErrorCode.ALREADY_CONFIGURED:
case ErrorCode.ALREADY_UP_TO_DATE:
return 409;
// 410 Gone
case ErrorCode.TOKEN_EXPIRED:
return 410;
// 422 Unprocessable Entity
case ErrorCode.CHECKSUM_MISMATCH:
case ErrorCode.INVALID_BUNDLE:
case ErrorCode.BUNDLE_EXTRACT_FAILED:
return 422;
// 429 Too Many Requests
case ErrorCode.RATE_LIMITED:
return 429;
// 500 Internal Server Error
case ErrorCode.NOT_CONFIGURED:
case ErrorCode.NO_STORAGE:
case ErrorCode.NO_DB:
case ErrorCode.STORAGE_NOT_CONFIGURED:
case ErrorCode.EMAIL_NOT_CONFIGURED:
return 500;
// 501 Not Implemented
case ErrorCode.NOT_IMPLEMENTED:
return 501;
// 502 Bad Gateway
case ErrorCode.BUNDLE_DOWNLOAD_FAILED:
return 502;
// 503 Service Unavailable
case ErrorCode.MARKETPLACE_UNAVAILABLE:
case ErrorCode.MARKETPLACE_NOT_CONFIGURED:
case ErrorCode.SANDBOX_NOT_AVAILABLE:
return 503;
// Domain-specific *_ERROR codes are catch-block codes -- always 500.
// WARNING: If adding a new code that ends in _ERROR but represents a
// client error (4xx), add it to an explicit case above or it will
// be incorrectly mapped to 500.
default:
return code?.endsWith("_ERROR") ? 500 : 400;
}
}

View File

@@ -0,0 +1,9 @@
/** HTML-escape a string to prevent XSS when interpolated into HTML/JS */
export function escapeHtml(str: string): string {
return str
.replaceAll("&", "&amp;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;")
.replaceAll('"', "&quot;")
.replaceAll("'", "&#x27;");
}

View File

@@ -0,0 +1,240 @@
/**
* API token management handlers.
*
* Creates, lists, and revokes Personal Access Tokens (PATs).
* Token format: ec_pat_<base64url>
* Only the SHA-256 hash is stored — raw token shown once at creation.
*/
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import { hashApiToken, generatePrefixedToken } from "../../auth/api-tokens.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface ApiTokenInfo {
id: string;
name: string;
prefix: string;
scopes: string[];
userId: string;
expiresAt: string | null;
lastUsedAt: string | null;
createdAt: string;
}
export interface ApiTokenCreateResult {
/** The raw token — shown once, never stored */
token: string;
/** Token metadata */
info: ApiTokenInfo;
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/**
* Create a new API token for a user.
*/
export async function handleApiTokenCreate(
db: Kysely<Database>,
userId: string,
input: {
name: string;
scopes: string[];
expiresAt?: string;
},
): Promise<ApiResult<ApiTokenCreateResult>> {
try {
const id = ulid();
const { raw, hash, prefix } = generatePrefixedToken("ec_pat_");
await db
.insertInto("_emdash_api_tokens")
.values({
id,
name: input.name,
token_hash: hash,
prefix,
user_id: userId,
scopes: JSON.stringify(input.scopes),
expires_at: input.expiresAt ?? null,
})
.execute();
const info: ApiTokenInfo = {
id,
name: input.name,
prefix,
scopes: input.scopes,
userId,
expiresAt: input.expiresAt ?? null,
lastUsedAt: null,
createdAt: new Date().toISOString(),
};
return { success: true, data: { token: raw, info } };
} catch {
return {
success: false,
error: {
code: "TOKEN_CREATE_ERROR",
message: "Failed to create API token",
},
};
}
}
/**
* List all API tokens for a user (never returns the raw token or hash).
*/
export async function handleApiTokenList(
db: Kysely<Database>,
userId: string,
): Promise<ApiResult<{ items: ApiTokenInfo[] }>> {
try {
const rows = await db
.selectFrom("_emdash_api_tokens")
.select([
"id",
"name",
"prefix",
"scopes",
"user_id",
"expires_at",
"last_used_at",
"created_at",
])
.where("user_id", "=", userId)
.orderBy("created_at", "desc")
.execute();
const items: ApiTokenInfo[] = rows.map((row) => ({
id: row.id,
name: row.name,
prefix: row.prefix,
scopes: JSON.parse(row.scopes) as string[],
userId: row.user_id,
expiresAt: row.expires_at,
lastUsedAt: row.last_used_at,
createdAt: row.created_at,
}));
return { success: true, data: { items } };
} catch {
return {
success: false,
error: {
code: "TOKEN_LIST_ERROR",
message: "Failed to list API tokens",
},
};
}
}
/**
* Revoke (delete) an API token.
*/
export async function handleApiTokenRevoke(
db: Kysely<Database>,
tokenId: string,
userId: string,
): Promise<ApiResult<{ revoked: boolean }>> {
try {
const result = await db
.deleteFrom("_emdash_api_tokens")
.where("id", "=", tokenId)
.where("user_id", "=", userId)
.executeTakeFirst();
if (result.numDeletedRows === 0n) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Token not found" },
};
}
return { success: true, data: { revoked: true } };
} catch {
return {
success: false,
error: {
code: "TOKEN_REVOKE_ERROR",
message: "Failed to revoke API token",
},
};
}
}
/**
* Resolve a raw API token (ec_pat_...) to a user ID and scopes.
* Updates last_used_at on successful lookup.
* Returns null if the token is invalid or expired.
*/
export async function resolveApiToken(
db: Kysely<Database>,
rawToken: string,
): Promise<{ userId: string; scopes: string[] } | null> {
const hash = hashApiToken(rawToken);
const row = await db
.selectFrom("_emdash_api_tokens")
.select(["id", "user_id", "scopes", "expires_at"])
.where("token_hash", "=", hash)
.executeTakeFirst();
if (!row) return null;
// Check expiry
if (row.expires_at && new Date(row.expires_at) < new Date()) {
return null;
}
// Update last_used_at (fire-and-forget, don't block the request)
db.updateTable("_emdash_api_tokens")
.set({ last_used_at: new Date().toISOString() })
.where("id", "=", row.id)
.execute()
.catch(() => {}); // Non-critical, swallow errors
return {
userId: row.user_id,
scopes: JSON.parse(row.scopes) as string[],
};
}
/**
* Resolve an OAuth access token (ec_oat_...) to a user ID and scopes.
* Returns null if the token is invalid or expired.
*/
export async function resolveOAuthToken(
db: Kysely<Database>,
rawToken: string,
): Promise<{ userId: string; scopes: string[] } | null> {
const hash = hashApiToken(rawToken);
const row = await db
.selectFrom("_emdash_oauth_tokens")
.select(["user_id", "scopes", "expires_at", "token_type"])
.where("token_hash", "=", hash)
.where("token_type", "=", "access")
.executeTakeFirst();
if (!row) return null;
// Check expiry
if (new Date(row.expires_at) < new Date()) {
return null;
}
return {
userId: row.user_id,
scopes: JSON.parse(row.scopes) as string[],
};
}

View File

@@ -0,0 +1,314 @@
/**
* Comment handlers — business logic for comment API routes.
*
* Standalone functions that return ApiResult<T>. Routes are thin wrappers.
*/
import type { Kysely } from "kysely";
import { CommentRepository } from "../../database/repositories/comment.js";
import type { Comment, CommentStatus, PublicComment } from "../../database/repositories/comment.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
// ---------------------------------------------------------------------------
// Public: List approved comments for content
// ---------------------------------------------------------------------------
export async function handleCommentList(
db: Kysely<Database>,
collection: string,
contentId: string,
options: { limit?: number; cursor?: string; threaded?: boolean } = {},
): Promise<ApiResult<{ items: PublicComment[]; nextCursor?: string; total: number }>> {
try {
const repo = new CommentRepository(db);
// Get total approved count
const total = await repo.countByContent(collection, contentId, "approved");
let publicItems: PublicComment[];
let nextCursor: string | undefined;
if (options.threaded) {
// Threaded mode: fetch all approved comments (capped) so threading
// doesn't lose children that would fall on later pages.
const MAX_THREADED = 500;
const result = await repo.findByContent(collection, contentId, {
status: "approved",
limit: MAX_THREADED,
});
const threaded = CommentRepository.assembleThreads(result.items);
publicItems = threaded.map((c) => CommentRepository.toPublicComment(c));
// No cursor for threaded mode — all comments returned at once
} else {
const result = await repo.findByContent(collection, contentId, {
status: "approved",
limit: options.limit,
cursor: options.cursor,
});
publicItems = result.items.map((c) => CommentRepository.toPublicComment(c));
nextCursor = result.nextCursor;
}
return {
success: true,
data: {
items: publicItems,
nextCursor,
total,
},
};
} catch (error) {
console.error("Comment list error:", error);
return {
success: false,
error: {
code: "COMMENT_LIST_ERROR",
message: "Failed to list comments",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Moderation inbox
// ---------------------------------------------------------------------------
export async function handleCommentInbox(
db: Kysely<Database>,
options: {
status?: CommentStatus;
collection?: string;
search?: string;
limit?: number;
cursor?: string;
} = {},
): Promise<ApiResult<{ items: Comment[]; nextCursor?: string }>> {
try {
const repo = new CommentRepository(db);
const status = options.status ?? "pending";
const result = await repo.findByStatus(status, {
collection: options.collection,
search: options.search,
limit: options.limit,
cursor: options.cursor,
});
return {
success: true,
data: {
items: result.items,
nextCursor: result.nextCursor,
},
};
} catch (error) {
console.error("Comment inbox error:", error);
return {
success: false,
error: {
code: "COMMENT_INBOX_ERROR",
message: "Failed to list comments",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Status counts for inbox badges
// ---------------------------------------------------------------------------
export async function handleCommentCounts(
db: Kysely<Database>,
): Promise<ApiResult<Record<CommentStatus, number>>> {
try {
const repo = new CommentRepository(db);
const counts = await repo.countByStatus();
return { success: true, data: counts };
} catch (error) {
console.error("Comment counts error:", error);
return {
success: false,
error: {
code: "COMMENT_COUNTS_ERROR",
message: "Failed to get comment counts",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Get single comment detail
// ---------------------------------------------------------------------------
export async function handleCommentGet(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<Comment>> {
try {
const repo = new CommentRepository(db);
const comment = await repo.findById(id);
if (!comment) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
};
}
return { success: true, data: comment };
} catch (error) {
console.error("Comment get error:", error);
return {
success: false,
error: {
code: "COMMENT_GET_ERROR",
message: "Failed to get comment",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Change comment status
// ---------------------------------------------------------------------------
export async function handleCommentStatusChange(
db: Kysely<Database>,
id: string,
status: CommentStatus,
): Promise<ApiResult<Comment>> {
try {
const repo = new CommentRepository(db);
const updated = await repo.updateStatus(id, status);
if (!updated) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
};
}
return { success: true, data: updated };
} catch (error) {
console.error("Comment status change error:", error);
return {
success: false,
error: {
code: "COMMENT_STATUS_ERROR",
message: "Failed to update comment status",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Hard delete comment
// ---------------------------------------------------------------------------
export async function handleCommentDelete(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const repo = new CommentRepository(db);
const deleted = await repo.delete(id);
if (!deleted) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Comment not found: ${id}` },
};
}
return { success: true, data: { deleted: true } };
} catch (error) {
console.error("Comment delete error:", error);
return {
success: false,
error: {
code: "COMMENT_DELETE_ERROR",
message: "Failed to delete comment",
},
};
}
}
// ---------------------------------------------------------------------------
// Admin: Bulk operations
// ---------------------------------------------------------------------------
export async function handleCommentBulk(
db: Kysely<Database>,
ids: string[],
action: "approve" | "spam" | "trash" | "delete",
): Promise<ApiResult<{ affected: number }>> {
try {
const repo = new CommentRepository(db);
let affected: number;
if (action === "delete") {
affected = await repo.bulkDelete(ids);
} else {
const statusMap: Record<string, CommentStatus> = {
approve: "approved",
spam: "spam",
trash: "trash",
};
affected = await repo.bulkUpdateStatus(ids, statusMap[action]);
}
return { success: true, data: { affected } };
} catch (error) {
console.error("Comment bulk error:", error);
return {
success: false,
error: {
code: "COMMENT_BULK_ERROR",
message: "Failed to perform bulk operation",
},
};
}
}
// ---------------------------------------------------------------------------
// Anti-spam: Rate limiting
// ---------------------------------------------------------------------------
/**
* Check if an IP has exceeded the comment rate limit.
* Uses ip_hash in the comments table — no separate counter storage.
*/
export async function checkRateLimit(
db: Kysely<Database>,
ipHash: string,
maxPerWindow: number = 5,
windowMinutes: number = 10,
): Promise<boolean> {
const cutoff = new Date(Date.now() - windowMinutes * 60 * 1000).toISOString();
// Count recent comments from this IP
const result = await db
.selectFrom("_emdash_comments")
.select((eb) => eb.fn.count("id").as("count"))
.where("ip_hash", "=", ipHash)
.where("created_at", ">", cutoff)
.executeTakeFirst();
const count = Number(result?.count ?? 0);
return count >= maxPerWindow;
}
/**
* Hash an IP address for storage (never store cleartext IPs).
*
* Uses full SHA-256 with an application salt to prevent rainbow-table
* recovery of IPs. The caller should pass a site-specific secret;
* falls back to a static salt if none is provided.
*/
export async function hashIp(ip: string, salt: string = "emdash-ip-salt"): Promise<string> {
const data = `ip:${salt}:${ip}`;
const buf = await crypto.subtle.digest("SHA-256", new TextEncoder().encode(data));
return Array.from(new Uint8Array(buf), (b) => b.toString(16).padStart(2, "0")).join("");
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,205 @@
/**
* Dashboard stats handler
*
* Returns summary data for the admin dashboard in a single request:
* collection content counts, media count, user count, and recent
* content across all collections.
*/
import { sql, type Kysely } from "kysely";
import { ContentRepository } from "../../database/repositories/content.js";
import { MediaRepository } from "../../database/repositories/media.js";
import { UserRepository } from "../../database/repositories/user.js";
import type { Database } from "../../database/types.js";
import { validateIdentifier } from "../../database/validate.js";
import type { ApiResult } from "../types.js";
export interface CollectionStats {
slug: string;
label: string;
total: number;
published: number;
draft: number;
}
export interface RecentItem {
id: string;
collection: string;
collectionLabel: string;
title: string;
slug: string | null;
status: string;
updatedAt: string;
authorId: string | null;
}
export interface DashboardStats {
collections: CollectionStats[];
mediaCount: number;
userCount: number;
recentItems: RecentItem[];
}
/**
* Fetch dashboard statistics.
*
* Queries are intentionally lightweight — counts use indexed columns,
* and recent items are capped at 10.
*/
export async function handleDashboardStats(
db: Kysely<Database>,
): Promise<ApiResult<DashboardStats>> {
try {
// Discover collections from the system table
const collections = await db
.selectFrom("_emdash_collections")
.select(["slug", "label"])
.orderBy("slug", "asc")
.execute();
// Gather per-collection counts in parallel
const contentRepo = new ContentRepository(db);
const collectionStats: CollectionStats[] = await Promise.all(
collections.map(async (col) => {
const [total, published, draft] = await Promise.all([
contentRepo.count(col.slug),
contentRepo.count(col.slug, { status: "published" }),
contentRepo.count(col.slug, { status: "draft" }),
]);
return {
slug: col.slug,
label: col.label,
total,
published,
draft,
};
}),
);
// Media and user counts
const mediaRepo = new MediaRepository(db);
const userRepo = new UserRepository(db);
const [mediaCount, userCount] = await Promise.all([mediaRepo.count(), userRepo.count()]);
// Recent items across all collections (last 10 updated, any status)
const recentItems = await fetchRecentItems(db, collections);
return {
success: true,
data: {
collections: collectionStats,
mediaCount,
userCount,
recentItems,
},
};
} catch (error) {
console.error("Dashboard stats error:", error);
return {
success: false,
error: {
code: "DASHBOARD_STATS_ERROR",
message: "Failed to load dashboard statistics",
},
};
}
}
/** Raw row shape from the UNION ALL query — all snake_case. */
interface RecentItemRow {
id: string;
collection: string;
collection_label: string;
title: string;
slug: string | null;
status: string;
updated_at: string;
author_id: string | null;
}
/**
* Fetch the 10 most recently updated items across all collections.
*
* Uses UNION ALL over each ec_* table. The query is safe because
* collection slugs come from the system table and are validated.
*
* `title` is not a standard column — it's a user-defined field. We query
* `_emdash_fields` to discover which collections have one and fall back
* to `slug` (which is always present) otherwise.
*/
async function fetchRecentItems(
db: Kysely<Database>,
collections: Array<{ slug: string; label: string }>,
): Promise<RecentItem[]> {
if (collections.length === 0) return [];
// Discover which collections have a "title" column
const titleFields = await db
.selectFrom("_emdash_fields as f")
.innerJoin("_emdash_collections as c", "c.id", "f.collection_id")
.select(["c.slug as collection_slug"])
.where("f.slug", "=", "title")
.execute();
const collectionsWithTitle = new Set(titleFields.map((r) => r.collection_slug));
// Build a UNION ALL query across all content tables.
// Each branch is wrapped in SELECT * FROM (...) so the inner
// ORDER BY + LIMIT is valid SQLite (bare ORDER BY inside UNION
// branches is a syntax error in SQLite).
const subQueries = collections.map((col) => {
validateIdentifier(col.slug);
const table = `ec_${col.slug}`;
const hasTitle = collectionsWithTitle.has(col.slug);
// Use title column if it exists, otherwise fall back to slug → id.
// All output uses snake_case to avoid SQLite quoting issues on D1.
const titleExpr = hasTitle ? sql`COALESCE(title, slug, id)` : sql`COALESCE(slug, id)`;
return sql<RecentItemRow>`
SELECT * FROM (
SELECT
id,
${sql.lit(col.slug)} AS collection,
${sql.lit(col.label)} AS collection_label,
${titleExpr} AS title,
slug,
status,
updated_at,
author_id
FROM ${sql.ref(table)}
WHERE deleted_at IS NULL
ORDER BY updated_at DESC
LIMIT 10
)
`;
});
// Combine with UNION ALL
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
let combined = subQueries[0]!;
for (let i = 1; i < subQueries.length; i++) {
// eslint-disable-next-line typescript-eslint(no-unnecessary-type-assertion) -- noUncheckedIndexedAccess
combined = sql<RecentItemRow>`${combined} UNION ALL ${subQueries[i]!}`;
}
// Final sort + limit across all branches
const result = await sql<RecentItemRow>`
SELECT * FROM (${combined})
ORDER BY updated_at DESC
LIMIT 10
`.execute(db);
// Map snake_case DB rows → camelCase API shape
return result.rows.map((row) => ({
id: row.id,
collection: row.collection,
collectionLabel: row.collection_label,
title: row.title,
slug: row.slug,
status: row.status,
updatedAt: row.updated_at,
authorId: row.author_id,
}));
}

View File

@@ -0,0 +1,687 @@
/**
* OAuth Device Flow handlers (RFC 8628).
*
* EmDash acts as an OAuth 2.0 authorization server. The CLI requests
* a device code, displays a URL + user code, and polls for a token.
* The user opens a browser, logs in, enters the code, and the CLI gets
* an access + refresh token pair.
*
* Uses arctic for code generation and @emdashcms/auth for token utilities.
*/
import { clampScopes } from "@emdashcms/auth";
import type { RoleLevel } from "@emdashcms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import {
generatePrefixedToken,
hashApiToken,
TOKEN_PREFIXES,
VALID_SCOPES,
} from "../../auth/api-tokens.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
import { lookupOAuthClient } from "./oauth-clients.js";
import { lookupUserRoleAndStatus } from "./oauth-user-lookup.js";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** Device codes expire after 15 minutes */
const DEVICE_CODE_TTL_SECONDS = 15 * 60;
/** Default polling interval in seconds */
const DEFAULT_INTERVAL = 5;
/** RFC 8628 §3.5: interval increase on slow_down */
const SLOW_DOWN_INCREMENT = 5;
/** Maximum slow_down interval cap (seconds) */
const MAX_SLOW_DOWN_INTERVAL = 60;
/** Access token TTL: 1 hour */
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
/** Refresh token TTL: 90 days */
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
/** Default scopes for CLI login */
const DEFAULT_SCOPES = [
"content:read",
"content:write",
"media:read",
"media:write",
"schema:read",
] as const;
/** Pattern to normalize user codes (strip hyphens) */
const HYPHEN_PATTERN = /-/g;
/** Characters for user codes (uppercase, no ambiguous chars like 0/O, 1/I) */
const USER_CODE_CHARS = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface DeviceCodeResponse {
device_code: string;
user_code: string;
verification_uri: string;
expires_in: number;
interval: number;
}
export interface TokenResponse {
access_token: string;
refresh_token: string;
token_type: "Bearer";
expires_in: number;
scope: string;
}
// RFC 8628 error codes
export type DeviceFlowError =
| "authorization_pending"
| "slow_down"
| "expired_token"
| "access_denied";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Generate a short human-readable user code (XXXX-XXXX) */
function generateUserCode(): string {
const bytes = new Uint8Array(8);
crypto.getRandomValues(bytes);
const chars = Array.from(bytes, (b) => USER_CODE_CHARS[b % USER_CODE_CHARS.length]).join("");
return `${chars.slice(0, 4)}-${chars.slice(4, 8)}`;
}
/** Get an ISO datetime string offset from now */
function expiresAt(seconds: number): string {
return new Date(Date.now() + seconds * 1000).toISOString();
}
/** Validate and normalize scopes. Returns validated scope list. */
function normalizeScopes(requested?: string[]): string[] {
if (!requested || requested.length === 0) {
return [...DEFAULT_SCOPES];
}
const validSet = new Set<string>(VALID_SCOPES);
return requested.filter((s) => validSet.has(s));
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/**
* POST /oauth/device/code
*
* Issue a device code + user code. The CLI displays the user code
* and tells the user to open the verification URI.
*/
export async function handleDeviceCodeRequest(
db: Kysely<Database>,
input: {
client_id?: string;
scope?: string;
},
verificationUri: string,
): Promise<ApiResult<DeviceCodeResponse>> {
try {
// Parse and validate scopes
const requestedScopes = input.scope ? input.scope.split(" ").filter(Boolean) : [];
const scopes = normalizeScopes(requestedScopes);
if (scopes.length === 0) {
return {
success: false,
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
};
}
const deviceCode = generateCodeVerifier();
const userCode = generateUserCode();
const expires = expiresAt(DEVICE_CODE_TTL_SECONDS);
await db
.insertInto("_emdash_device_codes")
.values({
device_code: deviceCode,
user_code: userCode,
scopes: JSON.stringify(scopes),
status: "pending",
expires_at: expires,
interval: DEFAULT_INTERVAL,
})
.execute();
return {
success: true,
data: {
device_code: deviceCode,
user_code: userCode,
verification_uri: verificationUri,
expires_in: DEVICE_CODE_TTL_SECONDS,
interval: DEFAULT_INTERVAL,
},
};
} catch {
return {
success: false,
error: {
code: "DEVICE_CODE_ERROR",
message: "Failed to create device code",
},
};
}
}
/**
* POST /oauth/device/token
*
* CLI polls this endpoint with the device_code. Returns:
* - 200 with tokens if authorized
* - 400 with error "authorization_pending" while waiting
* - 400 with error "slow_down" if polling too fast
* - 400 with error "expired_token" if the code expired
* - 400 with error "access_denied" if the user denied
*/
export async function handleDeviceTokenExchange(
db: Kysely<Database>,
input: {
device_code: string;
grant_type: string;
},
): Promise<
ApiResult<TokenResponse> & { deviceFlowError?: DeviceFlowError; deviceFlowInterval?: number }
> {
try {
// Validate grant_type
if (input.grant_type !== "urn:ietf:params:oauth:grant-type:device_code") {
return {
success: false,
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
};
}
// Look up the device code
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("device_code", "=", input.device_code)
.executeTakeFirst();
if (!row) {
return {
success: false,
error: { code: "INVALID_GRANT", message: "Invalid device code" },
};
}
const now = new Date();
// Check expiry
if (new Date(row.expires_at) < now) {
// Clean up expired code
await db
.deleteFrom("_emdash_device_codes")
.where("device_code", "=", input.device_code)
.execute();
return {
success: false,
deviceFlowError: "expired_token",
error: { code: "expired_token", message: "The device code has expired" },
};
}
// Check status
if (row.status === "denied") {
// Clean up denied code
await db
.deleteFrom("_emdash_device_codes")
.where("device_code", "=", input.device_code)
.execute();
return {
success: false,
deviceFlowError: "access_denied",
error: { code: "access_denied", message: "The user denied the request" },
};
}
if (row.status === "pending") {
// RFC 8628 §3.5: slow_down enforcement during polling phase.
// Only applies while waiting for authorization — once authorized,
// the final exchange proceeds without throttling.
if (row.last_polled_at) {
const lastPolled = new Date(row.last_polled_at);
const elapsedSeconds = (now.getTime() - lastPolled.getTime()) / 1000;
if (elapsedSeconds < row.interval) {
// Too fast — increase interval by 5s per RFC 8628 §3.5, capped at 60s
const newInterval = Math.min(row.interval + SLOW_DOWN_INCREMENT, MAX_SLOW_DOWN_INTERVAL);
await db
.updateTable("_emdash_device_codes")
.set({
interval: newInterval,
last_polled_at: now.toISOString(),
})
.where("device_code", "=", input.device_code)
.execute();
return {
success: false,
deviceFlowError: "slow_down",
deviceFlowInterval: newInterval,
error: { code: "slow_down", message: "Polling too fast" },
};
}
}
// Update last_polled_at for future slow_down checks
await db
.updateTable("_emdash_device_codes")
.set({ last_polled_at: now.toISOString() })
.where("device_code", "=", input.device_code)
.execute();
return {
success: false,
deviceFlowError: "authorization_pending",
error: { code: "authorization_pending", message: "Authorization pending" },
};
}
if (row.status !== "authorized" || !row.user_id) {
return {
success: false,
error: { code: "INVALID_GRANT", message: "Invalid device code state" },
};
}
// Authorized! Generate tokens.
const scopes = JSON.parse(row.scopes) as string[];
// Generate access token
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
// Generate refresh token
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
// Store both tokens
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: accessToken.hash,
token_type: "access",
user_id: row.user_id,
scopes: JSON.stringify(scopes),
client_type: "cli",
expires_at: accessExpires,
refresh_token_hash: refreshToken.hash,
})
.execute();
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: refreshToken.hash,
token_type: "refresh",
user_id: row.user_id,
scopes: JSON.stringify(scopes),
client_type: "cli",
expires_at: refreshExpires,
refresh_token_hash: null,
})
.execute();
// Consume the device code (delete it)
await db
.deleteFrom("_emdash_device_codes")
.where("device_code", "=", input.device_code)
.execute();
return {
success: true,
data: {
access_token: accessToken.raw,
refresh_token: refreshToken.raw,
token_type: "Bearer",
expires_in: ACCESS_TOKEN_TTL_SECONDS,
scope: scopes.join(" "),
},
};
} catch {
return {
success: false,
error: {
code: "TOKEN_EXCHANGE_ERROR",
message: "Failed to exchange device code",
},
};
}
}
/**
* POST /oauth/device/authorize
*
* The user submits the user_code after logging in via the browser.
* This authorizes the device code, allowing the CLI to exchange it for tokens.
*
* Scopes are clamped to the user's role at this point. The stored scopes
* are replaced with the intersection of requested scopes and the scopes
* the user's role permits. This prevents scope escalation.
*/
export async function handleDeviceAuthorize(
db: Kysely<Database>,
userId: string,
userRole: RoleLevel,
input: {
user_code: string;
action?: "approve" | "deny";
},
): Promise<ApiResult<{ authorized: boolean }>> {
try {
// Normalize user code (strip hyphens, uppercase)
const normalizedCode = input.user_code.replace(HYPHEN_PATTERN, "").toUpperCase();
// Look up the device code by user_code
const row = await db
.selectFrom("_emdash_device_codes")
.selectAll()
.where("status", "=", "pending")
.execute();
// Find the matching code (strip hyphens for comparison)
const match = row.find(
(r) => r.user_code.replace(HYPHEN_PATTERN, "").toUpperCase() === normalizedCode,
);
if (!match) {
return {
success: false,
error: { code: "INVALID_CODE", message: "Invalid or expired code" },
};
}
// Check expiry
if (new Date(match.expires_at) < new Date()) {
await db
.deleteFrom("_emdash_device_codes")
.where("device_code", "=", match.device_code)
.execute();
return {
success: false,
error: { code: "EXPIRED_CODE", message: "This code has expired" },
};
}
const action = input.action ?? "approve";
if (action === "deny") {
await db
.updateTable("_emdash_device_codes")
.set({ status: "denied" })
.where("device_code", "=", match.device_code)
.execute();
return { success: true, data: { authorized: false } };
}
// Clamp requested scopes to those the user's role permits.
// effective_scopes = requested_scopes ∩ scopesForRole(user.role)
const requestedScopes = JSON.parse(match.scopes) as string[];
const effectiveScopes = clampScopes(requestedScopes, userRole);
if (effectiveScopes.length === 0) {
return {
success: false,
error: {
code: "INSUFFICIENT_ROLE",
message: "Your role does not permit any of the requested scopes",
},
};
}
// Approve: set user_id, status, and clamped scopes
await db
.updateTable("_emdash_device_codes")
.set({
status: "authorized",
user_id: userId,
scopes: JSON.stringify(effectiveScopes),
})
.where("device_code", "=", match.device_code)
.execute();
return { success: true, data: { authorized: true } };
} catch {
return {
success: false,
error: {
code: "AUTHORIZE_ERROR",
message: "Failed to authorize device",
},
};
}
}
/**
* POST /oauth/token/refresh
*
* Exchange a refresh token for a new access token.
* The refresh token itself is not rotated (per spec: optional rotation).
*/
export async function handleTokenRefresh(
db: Kysely<Database>,
input: {
refresh_token: string;
grant_type: string;
},
): Promise<ApiResult<TokenResponse>> {
try {
if (input.grant_type !== "refresh_token") {
return {
success: false,
error: { code: "UNSUPPORTED_GRANT_TYPE", message: "Invalid grant_type" },
};
}
if (!input.refresh_token.startsWith(TOKEN_PREFIXES.OAUTH_REFRESH)) {
return {
success: false,
error: { code: "INVALID_GRANT", message: "Invalid refresh token format" },
};
}
const refreshHash = hashApiToken(input.refresh_token);
const row = await db
.selectFrom("_emdash_oauth_tokens")
.selectAll()
.where("token_hash", "=", refreshHash)
.where("token_type", "=", "refresh")
.executeTakeFirst();
if (!row) {
return {
success: false,
error: { code: "INVALID_GRANT", message: "Invalid refresh token" },
};
}
// Check expiry
if (new Date(row.expires_at) < new Date()) {
// Clean up expired refresh token and its access tokens
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
await db
.deleteFrom("_emdash_oauth_tokens")
.where("refresh_token_hash", "=", refreshHash)
.execute();
return {
success: false,
error: { code: "INVALID_GRANT", message: "Refresh token expired" },
};
}
// SEC-42: Revalidate user role before issuing new access token.
// SEC-43: Reject refresh if user is disabled or deleted.
const userInfo = await lookupUserRoleAndStatus(db, row.user_id);
if (!userInfo) {
// User no longer exists — revoke all their tokens
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
return {
success: false,
error: { code: "INVALID_GRANT", message: "User not found" },
};
}
if (userInfo.disabled) {
// User is disabled — revoke all their tokens
await db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", row.user_id).execute();
return {
success: false,
error: { code: "INVALID_GRANT", message: "User account is disabled" },
};
}
// Revalidate stored scopes against the user's current role.
// A demoted user's refresh token may carry stale elevated scopes.
const storedScopes = JSON.parse(row.scopes) as string[];
let scopes = clampScopes(storedScopes, userInfo.role);
// SEC-41: Intersect with the client's registered scopes (if any).
// Same check as the approval path — a client registered with limited
// scopes should never receive elevated scopes on refresh, even if the
// user's role would allow them.
if (row.client_id) {
const client = await lookupOAuthClient(db, row.client_id);
if (client?.scopes?.length) {
scopes = scopes.filter((s: string) => client.scopes!.includes(s));
}
}
if (scopes.length === 0) {
// User's role no longer supports any of the token's scopes — revoke
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", refreshHash).execute();
await db
.deleteFrom("_emdash_oauth_tokens")
.where("refresh_token_hash", "=", refreshHash)
.execute();
return {
success: false,
error: {
code: "INVALID_GRANT",
message: "User role no longer supports any of the token's scopes",
},
};
}
// Delete old access tokens for this refresh token
await db
.deleteFrom("_emdash_oauth_tokens")
.where("refresh_token_hash", "=", refreshHash)
.where("token_type", "=", "access")
.execute();
// Generate new access token
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: accessToken.hash,
token_type: "access",
user_id: row.user_id,
scopes: JSON.stringify(scopes),
client_type: row.client_type,
expires_at: accessExpires,
refresh_token_hash: refreshHash,
})
.execute();
return {
success: true,
data: {
access_token: accessToken.raw,
refresh_token: input.refresh_token, // Return same refresh token
token_type: "Bearer",
expires_in: ACCESS_TOKEN_TTL_SECONDS,
scope: scopes.join(" "),
},
};
} catch {
return {
success: false,
error: {
code: "TOKEN_REFRESH_ERROR",
message: "Failed to refresh token",
},
};
}
}
/**
* POST /oauth/token/revoke
*
* Revoke an access or refresh token. If a refresh token is revoked,
* also revoke all associated access tokens.
*
* Per RFC 7009, this endpoint always returns 200 (even for invalid tokens).
*/
export async function handleTokenRevoke(
db: Kysely<Database>,
input: {
token: string;
},
): Promise<ApiResult<{ revoked: boolean }>> {
try {
const hash = hashApiToken(input.token);
// Look up the token
const row = await db
.selectFrom("_emdash_oauth_tokens")
.select(["token_hash", "token_type", "refresh_token_hash"])
.where("token_hash", "=", hash)
.executeTakeFirst();
if (!row) {
// Per RFC 7009: always 200, even for invalid tokens
return { success: true, data: { revoked: true } };
}
if (row.token_type === "refresh") {
// Revoke refresh token and all its access tokens
await db
.deleteFrom("_emdash_oauth_tokens")
.where("refresh_token_hash", "=", hash)
.execute();
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
} else {
// Revoke just the access token
await db.deleteFrom("_emdash_oauth_tokens").where("token_hash", "=", hash).execute();
}
return { success: true, data: { revoked: true } };
} catch {
return {
success: false,
error: {
code: "TOKEN_REVOKE_ERROR",
message: "Failed to revoke token",
},
};
}
}

View File

@@ -0,0 +1,163 @@
/**
* API handler implementations for EmDash REST endpoints
*
* Re-exports all handlers from their respective modules
*/
// Content handlers
export {
handleContentList,
handleContentGet,
handleContentGetIncludingTrashed,
handleContentCreate,
handleContentUpdate,
handleContentDuplicate,
handleContentDelete,
handleContentRestore,
handleContentPermanentDelete,
handleContentListTrashed,
handleContentCountTrashed,
handleContentSchedule,
handleContentUnschedule,
handleContentPublish,
handleContentUnpublish,
handleContentCountScheduled,
handleContentDiscardDraft,
handleContentCompare,
handleContentTranslations,
type TrashedContentItem,
} from "./content.js";
// Dashboard stats
export {
handleDashboardStats,
type CollectionStats,
type DashboardStats,
type RecentItem,
} from "./dashboard.js";
// Manifest generation
export { generateManifest } from "./manifest.js";
// Revision handlers
export {
handleRevisionList,
handleRevisionGet,
handleRevisionRestore,
type RevisionListResponse,
type RevisionResponse,
} from "./revision.js";
// Media handlers
export {
handleMediaList,
handleMediaGet,
handleMediaCreate,
handleMediaUpdate,
handleMediaDelete,
type MediaListResponse,
type MediaResponse,
} from "./media.js";
// Schema handlers
export {
handleSchemaCollectionList,
handleSchemaCollectionGet,
handleSchemaCollectionCreate,
handleSchemaCollectionUpdate,
handleSchemaCollectionDelete,
handleSchemaFieldList,
handleSchemaFieldGet,
handleSchemaFieldCreate,
handleSchemaFieldUpdate,
handleSchemaFieldDelete,
handleSchemaFieldReorder,
handleOrphanedTableList,
handleOrphanedTableRegister,
type CollectionListResponse,
type CollectionResponse,
type CollectionWithFieldsResponse,
type FieldListResponse,
type FieldResponse,
type OrphanedTable,
type OrphanedTableListResponse,
} from "./schema.js";
// SEO handlers
export { handleSitemapData, type SitemapContentEntry, type SitemapDataResponse } from "./seo.js";
// Plugin handlers
export {
handlePluginList,
handlePluginGet,
handlePluginEnable,
handlePluginDisable,
type PluginInfo,
type PluginListResponse,
type PluginResponse,
} from "./plugins.js";
// Menu handlers
export {
handleMenuList,
handleMenuCreate,
handleMenuGet,
handleMenuUpdate,
handleMenuDelete,
handleMenuItemCreate,
handleMenuItemUpdate,
handleMenuItemDelete,
handleMenuItemReorder,
type MenuListItem,
type MenuWithItems,
type CreateMenuItemInput,
type UpdateMenuItemInput,
type ReorderItem,
} from "./menus.js";
// Section handlers
export {
handleSectionList,
handleSectionCreate,
handleSectionGet,
handleSectionUpdate,
handleSectionDelete,
type SectionListResponse,
} from "./sections.js";
// Settings handlers
export { handleSettingsGet, handleSettingsUpdate } from "./settings.js";
// Taxonomy handlers
export {
handleTaxonomyList,
handleTermList,
handleTermCreate,
handleTermGet,
handleTermUpdate,
handleTermDelete,
type TaxonomyDef,
type TaxonomyListResponse,
type TermData,
type TermWithCount,
type TermListResponse,
type TermResponse,
type TermGetResponse,
} from "./taxonomies.js";
// Marketplace handlers
export {
handleMarketplaceInstall,
handleMarketplaceUpdate,
handleMarketplaceUninstall,
handleMarketplaceUpdateCheck,
handleMarketplaceSearch,
handleMarketplaceGetPlugin,
handleThemeSearch,
handleThemeGetDetail,
loadBundleFromR2,
type MarketplaceInstallResult,
type MarketplaceUpdateResult,
type MarketplaceUpdateCheck,
type MarketplaceUninstallResult,
} from "./marketplace.js";

View File

@@ -0,0 +1,158 @@
/**
* Manifest generation handlers
*/
import { hashString } from "../../utils/hash.js";
import type { ManifestResponse, FieldDescriptor } from "../types.js";
/** Pattern to add spaces before capital letters */
const CAMEL_CASE_PATTERN = /([A-Z])/g;
const FIRST_CHAR_PATTERN = /^./;
// Collection definition shape for manifest generation
interface CollectionDefinition {
schema: {
_def?: { shape?: () => Record<string, unknown> };
shape?: Record<string, unknown>;
};
admin: {
label: string;
labelSingular?: string;
supports?: string[];
};
}
type CollectionMap = Record<string, CollectionDefinition>;
/**
* Generate admin manifest from collections
*/
export async function generateManifest(
collections: CollectionMap,
plugins: Record<
string,
{
adminPages?: Array<{ path: string; component: string }>;
widgets?: string[];
}
> = {},
): Promise<ManifestResponse> {
const manifestCollections: ManifestResponse["collections"] = {};
for (const [name, definition] of Object.entries(collections)) {
// Extract field descriptors from Zod schema
const fields = extractFieldDescriptors(definition.schema);
manifestCollections[name] = {
label: definition.admin.label,
labelSingular: definition.admin.labelSingular || definition.admin.label,
supports: definition.admin.supports || [],
fields,
};
}
// Generate hash from collections (for cache invalidation)
const hash = await hashString(JSON.stringify(manifestCollections));
return {
version: "0.1.0",
hash,
collections: manifestCollections,
plugins,
};
}
/**
* Extract field descriptors from Zod schema
* Note: This is a simplified implementation that handles common types
*/
function extractFieldDescriptors(schema: {
_def?: { shape?: () => Record<string, unknown> };
shape?: Record<string, unknown>;
}): Record<string, FieldDescriptor> {
const fields: Record<string, FieldDescriptor> = {};
// Handle Zod object schema
const shape = typeof schema._def?.shape === "function" ? schema._def.shape() : schema.shape || {};
for (const [name, fieldSchema] of Object.entries(shape)) {
fields[name] = extractFieldType(name, fieldSchema);
}
return fields;
}
/**
* Extract field type from Zod schema
*/
/** Type guard: check if a value is a non-null object */
function isObject(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null;
}
function extractFieldType(name: string, schema: unknown): FieldDescriptor {
if (!isObject(schema)) {
return { kind: "string", label: formatLabel(name) };
}
// Check for custom field markers
if (schema.isPortableText) {
return { kind: "portableText", label: formatLabel(name) };
}
if (schema.isImage) {
return { kind: "image", label: formatLabel(name) };
}
if (schema.isReference) {
return { kind: "reference", label: formatLabel(name) };
}
// Handle standard Zod types
const def = isObject(schema._def) ? schema._def : undefined;
const typeName = typeof def?.typeName === "string" ? def.typeName : undefined;
switch (typeName) {
case "ZodString":
return { kind: "string", label: formatLabel(name) };
case "ZodNumber":
return { kind: "number", label: formatLabel(name) };
case "ZodBoolean":
return { kind: "boolean", label: formatLabel(name) };
case "ZodDate":
return { kind: "datetime", label: formatLabel(name) };
case "ZodEnum": {
const values = Array.isArray(def?.values) ? def.values : [];
return {
kind: "select",
label: formatLabel(name),
options: values
.filter((v): v is string => typeof v === "string")
.map((v) => ({
value: v,
label: v.charAt(0).toUpperCase() + v.slice(1),
})),
};
}
case "ZodArray":
return { kind: "array", label: formatLabel(name) };
case "ZodObject":
return { kind: "object", label: formatLabel(name) };
case "ZodOptional":
case "ZodDefault":
// Unwrap optional/default types
if (def?.innerType) {
return extractFieldType(name, def.innerType);
}
return { kind: "string", label: formatLabel(name) };
default:
return { kind: "string", label: formatLabel(name) };
}
}
/**
* Format field name as label
*/
function formatLabel(name: string): string {
return name
.replace(CAMEL_CASE_PATTERN, " $1")
.replace(FIRST_CHAR_PATTERN, (str) => str.toUpperCase())
.trim();
}

View File

@@ -0,0 +1,930 @@
/**
* Marketplace plugin handlers
*
* Business logic for installing, updating, uninstalling, and checking
* updates for marketplace plugins. Routes are thin wrappers around these.
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import { validatePluginIdentifier } from "../../database/validate.js";
import { pluginManifestSchema } from "../../plugins/manifest-schema.js";
import { normalizeManifestRoute } from "../../plugins/manifest-schema.js";
import {
createMarketplaceClient,
MarketplaceError,
MarketplaceUnavailableError,
type MarketplaceClient,
type MarketplacePluginDetail,
type MarketplaceSearchOpts,
type MarketplaceThemeSearchOpts,
type MarketplaceVersionSummary,
type PluginBundle,
} from "../../plugins/marketplace.js";
import type { SandboxRunner } from "../../plugins/sandbox/types.js";
import { PluginStateRepository } from "../../plugins/state.js";
import type { PluginManifest } from "../../plugins/types.js";
import { EmDashStorageError } from "../../storage/types.js";
import type { Storage } from "../../storage/types.js";
import type { ApiResult } from "../types.js";
// ── Types ──────────────────────────────────────────────────────────
export interface MarketplaceInstallResult {
pluginId: string;
version: string;
capabilities: string[];
}
export interface MarketplaceUpdateResult {
pluginId: string;
oldVersion: string;
newVersion: string;
capabilityChanges: {
added: string[];
removed: string[];
};
routeVisibilityChanges?: {
newlyPublic: string[];
};
}
export interface MarketplaceUpdateCheck {
pluginId: string;
installed: string;
latest: string;
hasUpdate: boolean;
hasCapabilityChanges: boolean;
capabilityChanges?: {
added: string[];
removed: string[];
};
hasRouteVisibilityChanges: boolean;
routeVisibilityChanges?: {
newlyPublic: string[];
};
}
export interface MarketplaceUninstallResult {
pluginId: string;
dataDeleted: boolean;
}
// ── Helpers ────────────────────────────────────────────────────────
/** Semver-like pattern: digits, dots, hyphens, plus signs (e.g. 1.0.0, 1.0.0-beta.1) */
const VERSION_PATTERN = /^[a-z0-9][a-z0-9._+-]*$/i;
function validateVersion(version: string): void {
if (version.includes("..")) throw new Error("Invalid version format");
if (!VERSION_PATTERN.test(version)) {
throw new Error("Invalid version format");
}
}
function getClient(marketplaceUrl: string | undefined): MarketplaceClient | null {
if (!marketplaceUrl) return null;
return createMarketplaceClient(marketplaceUrl);
}
function diffCapabilities(
oldCaps: string[],
newCaps: string[],
): { added: string[]; removed: string[] } {
const oldSet = new Set(oldCaps);
const newSet = new Set(newCaps);
return {
added: newCaps.filter((c) => !oldSet.has(c)),
removed: oldCaps.filter((c) => !newSet.has(c)),
};
}
/**
* Diff route visibility between two manifests.
* Returns routes that changed from private to public (newly exposed).
*/
function diffRouteVisibility(
oldManifest: PluginManifest | undefined,
newManifest: PluginManifest,
): { newlyPublic: string[] } {
const oldPublicRoutes = new Set<string>();
if (oldManifest) {
for (const entry of oldManifest.routes) {
const normalized = normalizeManifestRoute(entry);
if (normalized.public === true) {
oldPublicRoutes.add(normalized.name);
}
}
}
const newlyPublic: string[] = [];
for (const entry of newManifest.routes) {
const normalized = normalizeManifestRoute(entry);
if (normalized.public === true && !oldPublicRoutes.has(normalized.name)) {
newlyPublic.push(normalized.name);
}
}
return { newlyPublic };
}
async function resolveVersionMetadata(
client: MarketplaceClient,
pluginId: string,
pluginDetail: MarketplacePluginDetail,
version: string,
): Promise<MarketplaceVersionSummary | null> {
if (pluginDetail.latestVersion?.version === version) {
return {
version: pluginDetail.latestVersion.version,
minEmDashVersion: pluginDetail.latestVersion.minEmDashVersion,
bundleSize: pluginDetail.latestVersion.bundleSize,
checksum: pluginDetail.latestVersion.checksum,
changelog: pluginDetail.latestVersion.changelog,
capabilities: pluginDetail.latestVersion.capabilities,
status: pluginDetail.latestVersion.status,
auditVerdict: pluginDetail.latestVersion.audit?.verdict ?? null,
imageAuditVerdict: pluginDetail.latestVersion.imageAudit?.verdict ?? null,
publishedAt: pluginDetail.latestVersion.publishedAt,
};
}
const versions = await client.getVersions(pluginId);
return versions.find((v) => v.version === version) ?? null;
}
function validateBundleIdentity(
bundle: PluginBundle,
pluginId: string,
version: string,
): ApiResult<never> | null {
if (bundle.manifest.id !== pluginId) {
return {
success: false,
error: {
code: "MANIFEST_MISMATCH",
message: `Bundle manifest ID (${bundle.manifest.id}) does not match requested plugin (${pluginId})`,
},
};
}
if (bundle.manifest.version !== version) {
return {
success: false,
error: {
code: "MANIFEST_VERSION_MISMATCH",
message: `Bundle manifest version (${bundle.manifest.version}) does not match requested version (${version})`,
},
};
}
return null;
}
/** Store a plugin bundle's files in site-local R2 storage */
async function storeBundleInR2(
storage: Storage,
pluginId: string,
version: string,
bundle: PluginBundle,
): Promise<void> {
validatePluginIdentifier(pluginId, "plugin ID");
validateVersion(version);
const prefix = `marketplace/${pluginId}/${version}`;
// Store manifest
await storage.upload({
key: `${prefix}/manifest.json`,
body: new TextEncoder().encode(JSON.stringify(bundle.manifest)),
contentType: "application/json",
});
// Store backend code
await storage.upload({
key: `${prefix}/backend.js`,
body: new TextEncoder().encode(bundle.backendCode),
contentType: "application/javascript",
});
// Store admin code if present
if (bundle.adminCode) {
await storage.upload({
key: `${prefix}/admin.js`,
body: new TextEncoder().encode(bundle.adminCode),
contentType: "application/javascript",
});
}
}
/** Read a ReadableStream to string */
async function streamToText(stream: ReadableStream<Uint8Array>): Promise<string> {
return new Response(stream).text();
}
/** Load a plugin bundle from site-local R2 storage */
export async function loadBundleFromR2(
storage: Storage,
pluginId: string,
version: string,
): Promise<{ manifest: PluginManifest; backendCode: string; adminCode?: string } | null> {
validatePluginIdentifier(pluginId, "plugin ID");
validateVersion(version);
const prefix = `marketplace/${pluginId}/${version}`;
try {
const manifestResult = await storage.download(`${prefix}/manifest.json`);
const backendResult = await storage.download(`${prefix}/backend.js`);
const manifestText = await streamToText(manifestResult.body);
const backendCode = await streamToText(backendResult.body);
const parsed: unknown = JSON.parse(manifestText);
const result = pluginManifestSchema.safeParse(parsed);
if (!result.success) return null;
// Elements are validated as unknown[] by Zod; cast to PluginManifest
// for the Element[] type (Block Kit validation happens at render time).
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion -- Zod types elements as unknown[]; Element type validated at render time
const manifest = result.data as unknown as PluginManifest;
// Try to load admin code (optional)
let adminCode: string | undefined;
try {
const adminResult = await storage.download(`${prefix}/admin.js`);
adminCode = await streamToText(adminResult.body);
} catch {
// admin.js is optional
}
return { manifest, backendCode, adminCode };
} catch {
return null;
}
}
/** Delete a plugin bundle from site-local R2 storage */
async function deleteBundleFromR2(
storage: Storage,
pluginId: string,
version: string,
): Promise<void> {
validatePluginIdentifier(pluginId, "plugin ID");
validateVersion(version);
const prefix = `marketplace/${pluginId}/${version}`;
const files = ["manifest.json", "backend.js", "admin.js"];
for (const file of files) {
try {
await storage.delete(`${prefix}/${file}`);
} catch {
// Ignore missing files
}
}
}
// ── Install ────────────────────────────────────────────────────────
export async function handleMarketplaceInstall(
db: Kysely<Database>,
storage: Storage | null,
sandboxRunner: SandboxRunner | null,
marketplaceUrl: string | undefined,
pluginId: string,
opts?: { version?: string; configuredPluginIds?: Set<string> },
): Promise<ApiResult<MarketplaceInstallResult>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: {
code: "MARKETPLACE_NOT_CONFIGURED",
message: "Marketplace is not configured",
},
};
}
if (!storage) {
return {
success: false,
error: {
code: "STORAGE_NOT_CONFIGURED",
message: "Storage is required for marketplace plugin installation",
},
};
}
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
return {
success: false,
error: {
code: "SANDBOX_NOT_AVAILABLE",
message: "Sandbox runner is required for marketplace plugins",
},
};
}
try {
// Check if already installed
const stateRepo = new PluginStateRepository(db);
const existing = await stateRepo.get(pluginId);
if (existing && existing.source === "marketplace") {
return {
success: false,
error: {
code: "ALREADY_INSTALLED",
message: `Plugin ${pluginId} is already installed`,
},
};
}
// Block installation if a configured (trusted) plugin with the same ID exists.
// Without this check, the sandboxed plugin could shadow the trusted plugin's
// route handlers while auth decisions are made against the trusted plugin's metadata.
if (opts?.configuredPluginIds?.has(pluginId)) {
return {
success: false,
error: {
code: "PLUGIN_ID_CONFLICT",
message: `Cannot install marketplace plugin "${pluginId}" — a configured plugin with the same ID already exists`,
},
};
}
// Fetch plugin detail from marketplace
const pluginDetail = await client.getPlugin(pluginId);
const version = opts?.version ?? pluginDetail.latestVersion?.version;
if (!version) {
return {
success: false,
error: {
code: "NO_VERSION",
message: `No published versions found for plugin ${pluginId}`,
},
};
}
const versionMetadata = await resolveVersionMetadata(client, pluginId, pluginDetail, version);
if (!versionMetadata) {
return {
success: false,
error: {
code: "NO_VERSION",
message: `Version ${version} was not found for plugin ${pluginId}`,
},
};
}
// Block installation of plugins that haven't passed audit.
// Both "fail" (explicitly malicious) and "warn" (audit error or
// inconclusive) are non-installable — only "pass" or null (no audit
// ran) are allowed through.
if (versionMetadata.auditVerdict === "fail" || versionMetadata.auditVerdict === "warn") {
return {
success: false,
error: {
code: "AUDIT_FAILED",
message:
versionMetadata.auditVerdict === "fail"
? "Plugin failed security audit and cannot be installed"
: "Plugin audit was inconclusive and cannot be installed until reviewed",
},
};
}
// Download and extract bundle
const bundle = await client.downloadBundle(pluginId, version);
// Verify checksum matches marketplace-published checksum
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
return {
success: false,
error: {
code: "CHECKSUM_MISMATCH",
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
},
};
}
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, version);
if (bundleIdentityError) return bundleIdentityError;
// Store bundle in site-local R2
await storeBundleInR2(storage, pluginId, version, bundle);
// Write plugin state
await stateRepo.upsert(pluginId, version, "active", {
source: "marketplace",
marketplaceVersion: version,
displayName: pluginDetail.name,
description: pluginDetail.description ?? undefined,
});
// Fire-and-forget install stat
client.reportInstall(pluginId, version).catch(() => {
// Intentional: never fails the install
});
return {
success: true,
data: {
pluginId,
version,
capabilities: bundle.manifest.capabilities,
},
};
} catch (err) {
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: {
code: "MARKETPLACE_UNAVAILABLE",
message: "Plugin marketplace is currently unavailable",
},
};
}
if (err instanceof MarketplaceError) {
return {
success: false,
error: {
code: err.code ?? "MARKETPLACE_ERROR",
message: err.message,
},
};
}
if (err instanceof EmDashStorageError) {
return {
success: false,
error: {
code: err.code ?? "STORAGE_ERROR",
message: "Storage error while installing plugin",
},
};
}
if (err && typeof err === "object" && "code" in err) {
const code = (err as { code?: unknown }).code;
if (typeof code === "string" && code.trim()) {
return {
success: false,
error: {
code,
message: "Failed to install plugin from marketplace",
},
};
}
}
console.error("Failed to install marketplace plugin:", err);
return {
success: false,
error: {
code: "INSTALL_FAILED",
message: "Failed to install plugin from marketplace",
},
};
}
}
// ── Update ─────────────────────────────────────────────────────────
export async function handleMarketplaceUpdate(
db: Kysely<Database>,
storage: Storage | null,
sandboxRunner: SandboxRunner | null,
marketplaceUrl: string | undefined,
pluginId: string,
opts?: {
version?: string;
confirmCapabilityChanges?: boolean;
confirmRouteVisibilityChanges?: boolean;
},
): Promise<ApiResult<MarketplaceUpdateResult>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
if (!storage) {
return {
success: false,
error: { code: "STORAGE_NOT_CONFIGURED", message: "Storage is required" },
};
}
if (!sandboxRunner || !sandboxRunner.isAvailable()) {
return {
success: false,
error: { code: "SANDBOX_NOT_AVAILABLE", message: "Sandbox runner is required" },
};
}
try {
const stateRepo = new PluginStateRepository(db);
const existing = await stateRepo.get(pluginId);
if (!existing || existing.source !== "marketplace") {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `No marketplace plugin found: ${pluginId}`,
},
};
}
const oldVersion = existing.marketplaceVersion ?? existing.version;
// Get target version
const pluginDetail = await client.getPlugin(pluginId);
const newVersion = opts?.version ?? pluginDetail.latestVersion?.version;
if (!newVersion) {
return {
success: false,
error: { code: "NO_VERSION", message: "No newer version available" },
};
}
if (newVersion === oldVersion) {
return {
success: false,
error: { code: "ALREADY_UP_TO_DATE", message: "Plugin is already up to date" },
};
}
const versionMetadata = await resolveVersionMetadata(
client,
pluginId,
pluginDetail,
newVersion,
);
if (!versionMetadata) {
return {
success: false,
error: {
code: "NO_VERSION",
message: `Version ${newVersion} was not found for plugin ${pluginId}`,
},
};
}
// Download new bundle
const bundle = await client.downloadBundle(pluginId, newVersion);
// Verify checksum matches marketplace-published checksum for this version
if (versionMetadata.checksum && bundle.checksum !== versionMetadata.checksum) {
return {
success: false,
error: {
code: "CHECKSUM_MISMATCH",
message: "Bundle checksum does not match marketplace record. Download may be corrupted.",
},
};
}
const bundleIdentityError = validateBundleIdentity(bundle, pluginId, newVersion);
if (bundleIdentityError) return bundleIdentityError;
// Diff capabilities and route visibility against old version
const oldBundle = await loadBundleFromR2(storage, pluginId, oldVersion);
const oldCaps = oldBundle?.manifest.capabilities ?? [];
const capabilityChanges = diffCapabilities(oldCaps, bundle.manifest.capabilities);
const hasEscalation = capabilityChanges.added.length > 0;
// If capabilities escalated, require explicit confirmation
if (hasEscalation && !opts?.confirmCapabilityChanges) {
return {
success: false,
error: {
code: "CAPABILITY_ESCALATION",
message: "Plugin update requires new capabilities",
details: { capabilityChanges },
},
};
}
// Diff route visibility — routes going from private to public are a
// security-sensitive change that exposes unauthenticated endpoints.
const routeVisibilityChanges = diffRouteVisibility(oldBundle?.manifest, bundle.manifest);
const hasNewPublicRoutes = routeVisibilityChanges.newlyPublic.length > 0;
if (hasNewPublicRoutes && !opts?.confirmRouteVisibilityChanges) {
return {
success: false,
error: {
code: "ROUTE_VISIBILITY_ESCALATION",
message: "Plugin update exposes new public (unauthenticated) routes",
details: { routeVisibilityChanges, capabilityChanges },
},
};
}
// Store new bundle
await storeBundleInR2(storage, pluginId, newVersion, bundle);
// Update state
await stateRepo.upsert(pluginId, newVersion, "active", {
source: "marketplace",
marketplaceVersion: newVersion,
displayName: pluginDetail.name,
description: pluginDetail.description ?? undefined,
});
// Clean up old bundle from R2 (best-effort)
deleteBundleFromR2(storage, pluginId, oldVersion).catch(() => {});
return {
success: true,
data: {
pluginId,
oldVersion,
newVersion,
capabilityChanges,
routeVisibilityChanges: hasNewPublicRoutes ? routeVisibilityChanges : undefined,
},
};
} catch (err) {
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
if (err instanceof MarketplaceError) {
return {
success: false,
error: { code: err.code ?? "MARKETPLACE_ERROR", message: err.message },
};
}
console.error("Failed to update marketplace plugin:", err);
return {
success: false,
error: { code: "UPDATE_FAILED", message: "Failed to update plugin" },
};
}
}
// ── Uninstall ──────────────────────────────────────────────────────
export async function handleMarketplaceUninstall(
db: Kysely<Database>,
storage: Storage | null,
pluginId: string,
opts?: { deleteData?: boolean },
): Promise<ApiResult<MarketplaceUninstallResult>> {
try {
const stateRepo = new PluginStateRepository(db);
const existing = await stateRepo.get(pluginId);
if (!existing || existing.source !== "marketplace") {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `No marketplace plugin found: ${pluginId}`,
},
};
}
const version = existing.marketplaceVersion ?? existing.version;
// Delete bundle from site R2
if (storage) {
await deleteBundleFromR2(storage, pluginId, version);
}
// Optionally delete plugin storage data
let dataDeleted = false;
if (opts?.deleteData) {
try {
await db.deleteFrom("_plugin_storage").where("plugin_id", "=", pluginId).execute();
dataDeleted = true;
} catch {
// Plugin storage table may not have data for this plugin
}
}
// Delete state row
await stateRepo.delete(pluginId);
return {
success: true,
data: { pluginId, dataDeleted },
};
} catch (err) {
console.error("Failed to uninstall marketplace plugin:", err);
return {
success: false,
error: {
code: "UNINSTALL_FAILED",
message: "Failed to uninstall plugin",
},
};
}
}
// ── Update check ───────────────────────────────────────────────────
export async function handleMarketplaceUpdateCheck(
db: Kysely<Database>,
marketplaceUrl: string | undefined,
): Promise<ApiResult<{ items: MarketplaceUpdateCheck[] }>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
try {
const stateRepo = new PluginStateRepository(db);
const marketplacePlugins = await stateRepo.getMarketplacePlugins();
const items: MarketplaceUpdateCheck[] = [];
for (const plugin of marketplacePlugins) {
try {
const detail = await client.getPlugin(plugin.pluginId);
const latest = detail.latestVersion?.version;
const installed = plugin.marketplaceVersion ?? plugin.version;
if (!latest) continue;
const hasUpdate = latest !== installed;
let capabilityChanges: { added: string[]; removed: string[] } | undefined;
let hasCapabilityChanges = false;
if (hasUpdate && detail.latestVersion) {
const oldCaps = detail.capabilities ?? [];
const newCaps = detail.latestVersion.capabilities ?? [];
capabilityChanges = diffCapabilities(oldCaps, newCaps);
hasCapabilityChanges =
capabilityChanges.added.length > 0 || capabilityChanges.removed.length > 0;
}
items.push({
pluginId: plugin.pluginId,
installed,
latest: latest ?? installed,
hasUpdate,
hasCapabilityChanges,
capabilityChanges: hasCapabilityChanges ? capabilityChanges : undefined,
// Route visibility changes require downloading both bundles to compare
// manifests, which is too expensive for a preview check. The actual
// enforcement happens at update time in handleMarketplaceUpdate.
hasRouteVisibilityChanges: false,
});
} catch (err) {
// Skip plugins that can't be checked (marketplace down, plugin delisted)
console.warn(`Failed to check updates for ${plugin.pluginId}:`, err);
}
}
return { success: true, data: { items } };
} catch (err) {
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
console.error("Failed to check marketplace updates:", err);
return {
success: false,
error: { code: "UPDATE_CHECK_FAILED", message: "Failed to check for updates" },
};
}
}
// ── Proxy ──────────────────────────────────────────────────────────
export async function handleMarketplaceSearch(
marketplaceUrl: string | undefined,
query?: string,
opts?: MarketplaceSearchOpts,
): Promise<ApiResult<unknown>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
try {
const result = await client.search(query, opts);
return { success: true, data: result };
} catch (err) {
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
console.error("Failed to search marketplace:", err);
return {
success: false,
error: { code: "SEARCH_FAILED", message: "Failed to search marketplace" },
};
}
}
export async function handleMarketplaceGetPlugin(
marketplaceUrl: string | undefined,
pluginId: string,
): Promise<ApiResult<unknown>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
try {
const result = await client.getPlugin(pluginId);
return { success: true, data: result };
} catch (err) {
if (err instanceof MarketplaceError && err.status === 404) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Plugin not found: ${pluginId}` },
};
}
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
console.error("Failed to get marketplace plugin:", err);
return {
success: false,
error: { code: "GET_PLUGIN_FAILED", message: "Failed to get plugin details" },
};
}
}
// ── Theme proxy handlers ──────────────────────────────────────────
export async function handleThemeSearch(
marketplaceUrl: string | undefined,
query?: string,
opts?: MarketplaceThemeSearchOpts,
): Promise<ApiResult<unknown>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
try {
const result = await client.searchThemes(query, opts);
return { success: true, data: result };
} catch (err) {
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
console.error("Failed to search themes:", err);
return {
success: false,
error: { code: "THEME_SEARCH_FAILED", message: "Failed to search themes" },
};
}
}
export async function handleThemeGetDetail(
marketplaceUrl: string | undefined,
themeId: string,
): Promise<ApiResult<unknown>> {
const client = getClient(marketplaceUrl);
if (!client) {
return {
success: false,
error: { code: "MARKETPLACE_NOT_CONFIGURED", message: "Marketplace is not configured" },
};
}
try {
const result = await client.getTheme(themeId);
return { success: true, data: result };
} catch (err) {
if (err instanceof MarketplaceError && err.status === 404) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Theme not found: ${themeId}` },
};
}
if (err instanceof MarketplaceUnavailableError) {
return {
success: false,
error: { code: "MARKETPLACE_UNAVAILABLE", message: "Marketplace is unavailable" },
};
}
console.error("Failed to get marketplace theme:", err);
return {
success: false,
error: { code: "GET_THEME_FAILED", message: "Failed to get theme details" },
};
}
}

View File

@@ -0,0 +1,207 @@
/**
* Media CRUD handlers
*/
import type { Kysely } from "kysely";
import { MediaRepository, type MediaItem } from "../../database/repositories/media.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
export interface MediaListResponse {
items: MediaItem[];
nextCursor?: string;
}
export interface MediaResponse {
item: MediaItem;
}
/**
* List media items
*/
export async function handleMediaList(
db: Kysely<Database>,
params: {
cursor?: string;
limit?: number;
mimeType?: string;
},
): Promise<ApiResult<MediaListResponse>> {
try {
const repo = new MediaRepository(db);
const result = await repo.findMany({
cursor: params.cursor,
limit: Math.min(params.limit || 50, 100),
mimeType: params.mimeType,
});
return {
success: true,
data: {
items: result.items,
nextCursor: result.nextCursor,
},
};
} catch {
return {
success: false,
error: {
code: "MEDIA_LIST_ERROR",
message: "Failed to list media",
},
};
}
}
/**
* Get single media item
*/
export async function handleMediaGet(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<MediaResponse>> {
try {
const repo = new MediaRepository(db);
const item = await repo.findById(id);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Media item not found: ${id}`,
},
};
}
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "MEDIA_GET_ERROR",
message: "Failed to get media",
},
};
}
}
/**
* Create media item (after file upload)
*/
export async function handleMediaCreate(
db: Kysely<Database>,
input: {
filename: string;
mimeType: string;
size?: number;
width?: number;
height?: number;
alt?: string;
storageKey: string;
contentHash?: string;
blurhash?: string;
dominantColor?: string;
authorId?: string;
},
): Promise<ApiResult<MediaResponse>> {
try {
const repo = new MediaRepository(db);
const item = await repo.create(input);
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "MEDIA_CREATE_ERROR",
message: "Failed to create media",
},
};
}
}
/**
* Update media metadata
*/
export async function handleMediaUpdate(
db: Kysely<Database>,
id: string,
input: {
alt?: string;
caption?: string;
width?: number;
height?: number;
},
): Promise<ApiResult<MediaResponse>> {
try {
const repo = new MediaRepository(db);
const item = await repo.update(id, input);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Media item not found: ${id}`,
},
};
}
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "MEDIA_UPDATE_ERROR",
message: "Failed to update media",
},
};
}
}
/**
* Delete media item
*/
export async function handleMediaDelete(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const repo = new MediaRepository(db);
const deleted = await repo.delete(id);
if (!deleted) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Media item not found: ${id}`,
},
};
}
return {
success: true,
data: { deleted: true },
};
} catch {
return {
success: false,
error: {
code: "MEDIA_DELETE_ERROR",
message: "Failed to delete media",
},
};
}
}

View File

@@ -0,0 +1,493 @@
/**
* Menu CRUD handlers
*
* Business logic for menu and menu-item endpoints.
* Routes are thin wrappers that parse input, check auth, and call these.
*/
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import type { Database, MenuItemTable, MenuTable } from "../../database/types.js";
import type { ApiResult } from "../types.js";
// ---------------------------------------------------------------------------
// Response types
// ---------------------------------------------------------------------------
type MenuRow = Omit<MenuTable, "created_at" | "updated_at"> & {
created_at: string;
updated_at: string;
};
type MenuItemRow = Omit<MenuItemTable, "created_at"> & {
created_at: string;
};
export interface MenuListItem extends MenuRow {
itemCount: number;
}
export interface MenuWithItems extends MenuRow {
items: MenuItemRow[];
}
// ---------------------------------------------------------------------------
// Menu handlers
// ---------------------------------------------------------------------------
/**
* List all menus with item counts.
*/
export async function handleMenuList(db: Kysely<Database>): Promise<ApiResult<MenuListItem[]>> {
try {
const menus = await db
.selectFrom("_emdash_menus")
.select(["id", "name", "label", "created_at", "updated_at"])
.orderBy("name", "asc")
.execute();
const menusWithCounts = await Promise.all(
menus.map(async (menu) => {
const { count } = await db
.selectFrom("_emdash_menu_items")
.select(({ fn }) => fn.countAll<number>().as("count"))
.where("menu_id", "=", menu.id)
.executeTakeFirstOrThrow();
return {
...menu,
itemCount: count,
};
}),
);
return { success: true, data: menusWithCounts };
} catch {
return {
success: false,
error: { code: "MENU_LIST_ERROR", message: "Failed to fetch menus" },
};
}
}
/**
* Create a new menu.
*/
export async function handleMenuCreate(
db: Kysely<Database>,
input: { name: string; label: string },
): Promise<ApiResult<MenuRow>> {
try {
const existing = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", input.name)
.executeTakeFirst();
if (existing) {
return {
success: false,
error: { code: "CONFLICT", message: `Menu with name "${input.name}" already exists` },
};
}
const id = ulid();
await db
.insertInto("_emdash_menus")
.values({
id,
name: input.name,
label: input.label,
})
.execute();
const menu = await db
.selectFrom("_emdash_menus")
.selectAll()
.where("id", "=", id)
.executeTakeFirstOrThrow();
return { success: true, data: menu };
} catch {
return {
success: false,
error: { code: "MENU_CREATE_ERROR", message: "Failed to create menu" },
};
}
}
/**
* Get a single menu with all its items.
*/
export async function handleMenuGet(
db: Kysely<Database>,
name: string,
): Promise<ApiResult<MenuWithItems>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.selectAll()
.where("name", "=", name)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
const items = await db
.selectFrom("_emdash_menu_items")
.selectAll()
.where("menu_id", "=", menu.id)
.orderBy("sort_order", "asc")
.execute();
return { success: true, data: { ...menu, items } };
} catch {
return {
success: false,
error: { code: "MENU_GET_ERROR", message: "Failed to fetch menu" },
};
}
}
/**
* Update a menu's metadata.
*/
export async function handleMenuUpdate(
db: Kysely<Database>,
name: string,
input: { label?: string },
): Promise<ApiResult<MenuRow>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", name)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
if (input.label) {
await db
.updateTable("_emdash_menus")
.set({ label: input.label })
.where("id", "=", menu.id)
.execute();
}
const updated = await db
.selectFrom("_emdash_menus")
.selectAll()
.where("id", "=", menu.id)
.executeTakeFirstOrThrow();
return { success: true, data: updated };
} catch {
return {
success: false,
error: { code: "MENU_UPDATE_ERROR", message: "Failed to update menu" },
};
}
}
/**
* Delete a menu and its items (cascade).
*/
export async function handleMenuDelete(
db: Kysely<Database>,
name: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", name)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
await db.deleteFrom("_emdash_menus").where("id", "=", menu.id).execute();
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: { code: "MENU_DELETE_ERROR", message: "Failed to delete menu" },
};
}
}
// ---------------------------------------------------------------------------
// Menu item handlers
// ---------------------------------------------------------------------------
export interface CreateMenuItemInput {
type: string;
label: string;
referenceCollection?: string;
referenceId?: string;
customUrl?: string;
target?: string;
titleAttr?: string;
cssClasses?: string;
parentId?: string;
sortOrder?: number;
}
/**
* Add an item to a menu.
*/
export async function handleMenuItemCreate(
db: Kysely<Database>,
menuName: string,
input: CreateMenuItemInput,
): Promise<ApiResult<MenuItemRow>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", menuName)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
let sortOrder = input.sortOrder ?? 0;
if (input.sortOrder === undefined) {
const maxOrder = await db
.selectFrom("_emdash_menu_items")
.select(({ fn }) => fn.max("sort_order").as("max"))
.where("menu_id", "=", menu.id)
.where("parent_id", "is", input.parentId ?? null)
.executeTakeFirst();
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Kysely fn.max returns unknown; always a number for sort_order column
sortOrder = ((maxOrder?.max as number) ?? -1) + 1;
}
const id = ulid();
await db
.insertInto("_emdash_menu_items")
.values({
id,
menu_id: menu.id,
parent_id: input.parentId ?? null,
sort_order: sortOrder,
type: input.type,
reference_collection: input.referenceCollection ?? null,
reference_id: input.referenceId ?? null,
custom_url: input.customUrl ?? null,
label: input.label,
title_attr: input.titleAttr ?? null,
target: input.target ?? null,
css_classes: input.cssClasses ?? null,
})
.execute();
const item = await db
.selectFrom("_emdash_menu_items")
.selectAll()
.where("id", "=", id)
.executeTakeFirstOrThrow();
return { success: true, data: item };
} catch {
return {
success: false,
error: { code: "MENU_ITEM_CREATE_ERROR", message: "Failed to create menu item" },
};
}
}
export interface UpdateMenuItemInput {
label?: string;
customUrl?: string;
target?: string;
titleAttr?: string;
cssClasses?: string;
parentId?: string | null;
sortOrder?: number;
}
/**
* Update a menu item.
*/
export async function handleMenuItemUpdate(
db: Kysely<Database>,
menuName: string,
itemId: string,
input: UpdateMenuItemInput,
): Promise<ApiResult<MenuItemRow>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", menuName)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
const item = await db
.selectFrom("_emdash_menu_items")
.select("id")
.where("id", "=", itemId)
.where("menu_id", "=", menu.id)
.executeTakeFirst();
if (!item) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu item not found" },
};
}
const updates: Record<string, unknown> = {};
if (input.label !== undefined) updates.label = input.label;
if (input.customUrl !== undefined) updates.custom_url = input.customUrl;
if (input.target !== undefined) updates.target = input.target;
if (input.titleAttr !== undefined) updates.title_attr = input.titleAttr;
if (input.cssClasses !== undefined) updates.css_classes = input.cssClasses;
if (input.parentId !== undefined) updates.parent_id = input.parentId;
if (input.sortOrder !== undefined) updates.sort_order = input.sortOrder;
if (Object.keys(updates).length > 0) {
await db.updateTable("_emdash_menu_items").set(updates).where("id", "=", itemId).execute();
}
const updated = await db
.selectFrom("_emdash_menu_items")
.selectAll()
.where("id", "=", itemId)
.executeTakeFirstOrThrow();
return { success: true, data: updated };
} catch {
return {
success: false,
error: { code: "MENU_ITEM_UPDATE_ERROR", message: "Failed to update menu item" },
};
}
}
/**
* Delete a menu item.
*/
export async function handleMenuItemDelete(
db: Kysely<Database>,
menuName: string,
itemId: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", menuName)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
const result = await db
.deleteFrom("_emdash_menu_items")
.where("id", "=", itemId)
.where("menu_id", "=", menu.id)
.execute();
if (result[0]?.numDeletedRows === 0n) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu item not found" },
};
}
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: { code: "MENU_ITEM_DELETE_ERROR", message: "Failed to delete menu item" },
};
}
}
export interface ReorderItem {
id: string;
parentId: string | null;
sortOrder: number;
}
/**
* Batch reorder menu items.
*/
export async function handleMenuItemReorder(
db: Kysely<Database>,
menuName: string,
items: ReorderItem[],
): Promise<ApiResult<MenuItemRow[]>> {
try {
const menu = await db
.selectFrom("_emdash_menus")
.select("id")
.where("name", "=", menuName)
.executeTakeFirst();
if (!menu) {
return {
success: false,
error: { code: "NOT_FOUND", message: "Menu not found" },
};
}
for (const item of items) {
await db
.updateTable("_emdash_menu_items")
.set({
parent_id: item.parentId,
sort_order: item.sortOrder,
})
.where("id", "=", item.id)
.where("menu_id", "=", menu.id)
.execute();
}
const updatedItems = await db
.selectFrom("_emdash_menu_items")
.selectAll()
.where("menu_id", "=", menu.id)
.orderBy("sort_order", "asc")
.execute();
return { success: true, data: updatedItems };
} catch {
return {
success: false,
error: { code: "MENU_REORDER_ERROR", message: "Failed to reorder menu items" },
};
}
}

View File

@@ -0,0 +1,429 @@
/**
* OAuth 2.1 Authorization Code + PKCE handlers.
*
* Implements the server side of the authorization code grant for MCP clients
* (Claude Desktop, VS Code, etc.) per the MCP authorization spec (draft).
*
* Uses arctic for PKCE challenge generation and @emdashcms/auth for token
* utilities. Token infrastructure is shared with the device flow.
*/
import { clampScopes, computeS256Challenge } from "@emdashcms/auth";
import type { RoleLevel } from "@emdashcms/auth";
import { generateCodeVerifier } from "arctic";
import type { Kysely } from "kysely";
import {
generatePrefixedToken,
hashApiToken,
TOKEN_PREFIXES,
VALID_SCOPES,
} from "../../auth/api-tokens.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
import { lookupOAuthClient, validateClientRedirectUri } from "./oauth-clients.js";
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
/** Authorization codes expire after 10 minutes (RFC 6749 §4.1.2 recommends short-lived) */
const AUTH_CODE_TTL_SECONDS = 10 * 60;
/** Access token TTL: 1 hour */
const ACCESS_TOKEN_TTL_SECONDS = 60 * 60;
/** Refresh token TTL: 90 days */
const REFRESH_TOKEN_TTL_SECONDS = 90 * 24 * 60 * 60;
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface AuthorizationParams {
response_type: string;
client_id: string;
redirect_uri: string;
scope?: string;
state?: string;
code_challenge: string;
code_challenge_method: string;
resource?: string;
}
export interface TokenExchangeParams {
grant_type: string;
code: string;
redirect_uri: string;
client_id: string;
code_verifier: string;
resource?: string;
}
export interface TokenResponse {
access_token: string;
refresh_token: string;
token_type: "Bearer";
expires_in: number;
scope: string;
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function expiresAt(seconds: number): string {
return new Date(Date.now() + seconds * 1000).toISOString();
}
/**
* Validate a redirect URI per OAuth 2.1 security requirements.
* Allows localhost (loopback) over HTTP, and any HTTPS URL.
*/
export function validateRedirectUri(uri: string): string | null {
try {
const url = new URL(uri);
// Reject protocol-relative URLs
if (uri.startsWith("//")) {
return "Protocol-relative redirect URIs are not allowed";
}
// Allow localhost/loopback over HTTP (for desktop MCP clients)
if (url.protocol === "http:") {
const host = url.hostname;
if (host === "127.0.0.1" || host === "localhost" || host === "[::1]") {
return null; // OK
}
return "HTTP redirect URIs are only allowed for localhost";
}
// Allow HTTPS
if (url.protocol === "https:") {
return null; // OK
}
return `Unsupported redirect URI scheme: ${url.protocol}`;
} catch {
return "Invalid redirect URI";
}
}
/**
* Validate and normalize scopes. Returns validated scope list.
*/
function normalizeScopes(requested?: string): string[] {
if (!requested) return [];
const validSet = new Set<string>(VALID_SCOPES);
const scopes = requested
.split(" ")
.filter(Boolean)
.filter((s) => validSet.has(s));
return scopes;
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/**
* Process an authorization request after the user approves consent.
*
* Generates an authorization code, stores it with the PKCE challenge,
* and returns the redirect URL with the code appended.
*
* Scopes are clamped to the user's role to prevent scope escalation.
*/
export async function handleAuthorizationApproval(
db: Kysely<Database>,
userId: string,
userRole: RoleLevel,
params: AuthorizationParams,
): Promise<ApiResult<{ redirect_url: string }>> {
try {
// Validate response_type
if (params.response_type !== "code") {
return {
success: false,
error: {
code: "UNSUPPORTED_RESPONSE_TYPE",
message: "Only response_type=code is supported",
},
};
}
// Validate redirect_uri scheme/host (basic security check)
const uriError = validateRedirectUri(params.redirect_uri);
if (uriError) {
return {
success: false,
error: { code: "INVALID_REDIRECT_URI", message: uriError },
};
}
// Look up the registered OAuth client
const client = await lookupOAuthClient(db, params.client_id);
if (!client) {
return {
success: false,
error: {
code: "INVALID_CLIENT",
message: "Unknown client_id",
},
};
}
// Validate redirect_uri against client's registered URIs
const clientUriError = validateClientRedirectUri(params.redirect_uri, client.redirectUris);
if (clientUriError) {
return {
success: false,
error: { code: "INVALID_REDIRECT_URI", message: clientUriError },
};
}
// Validate code_challenge_method
if (params.code_challenge_method !== "S256") {
return {
success: false,
error: {
code: "INVALID_REQUEST",
message: "Only S256 code_challenge_method is supported",
},
};
}
// Validate code_challenge is present
if (!params.code_challenge) {
return {
success: false,
error: { code: "INVALID_REQUEST", message: "code_challenge is required" },
};
}
// Validate scopes, then clamp to user's role
const userScopes = clampScopes(normalizeScopes(params.scope), userRole);
// SEC-41: Intersect with client's registered scopes (if restricted).
// A client registered with scopes: ["content:read"] should never receive
// admin or schema:write, regardless of the approving user's role.
const clientScopes = client.scopes;
const scopes = clientScopes?.length
? userScopes.filter((s: string) => clientScopes.includes(s))
: userScopes;
if (scopes.length === 0) {
return {
success: false,
error: { code: "INVALID_SCOPE", message: "No valid scopes requested" },
};
}
// Generate authorization code (high entropy, base64url)
const code = generateCodeVerifier(); // 32 bytes random, base64url
const codeHash = hashApiToken(code);
// Store the authorization code
await db
.insertInto("_emdash_authorization_codes")
.values({
code_hash: codeHash,
client_id: params.client_id,
redirect_uri: params.redirect_uri,
user_id: userId,
scopes: JSON.stringify(scopes),
code_challenge: params.code_challenge,
code_challenge_method: params.code_challenge_method,
resource: params.resource ?? null,
expires_at: expiresAt(AUTH_CODE_TTL_SECONDS),
})
.execute();
// Build the redirect URL
const redirectUrl = new URL(params.redirect_uri);
redirectUrl.searchParams.set("code", code);
if (params.state) {
redirectUrl.searchParams.set("state", params.state);
}
return {
success: true,
data: { redirect_url: redirectUrl.toString() },
};
} catch (error) {
console.error("Authorization error:", error);
return {
success: false,
error: {
code: "AUTHORIZATION_ERROR",
message: "Failed to process authorization",
},
};
}
}
/**
* Exchange an authorization code for access + refresh tokens.
*
* Validates the code, verifies PKCE, and issues tokens using the same
* infrastructure as the device flow (ec_oat_*, ec_ort_*).
*/
export async function handleAuthorizationCodeExchange(
db: Kysely<Database>,
params: TokenExchangeParams,
): Promise<ApiResult<TokenResponse>> {
try {
// Validate grant_type
if (params.grant_type !== "authorization_code") {
return {
success: false,
error: { code: "unsupported_grant_type", message: "Invalid grant_type" },
};
}
// SEC-39: Atomically consume the authorization code using DELETE...RETURNING.
// This prevents TOCTOU double-exchange: two concurrent requests with the
// same code will race on the DELETE, and only one will get a row back.
const codeHash = hashApiToken(params.code);
const row = await db
.deleteFrom("_emdash_authorization_codes")
.where("code_hash", "=", codeHash)
.returningAll()
.executeTakeFirst();
if (!row) {
return {
success: false,
error: { code: "invalid_grant", message: "Invalid authorization code" },
};
}
// Check expiry
if (new Date(row.expires_at) < new Date()) {
return {
success: false,
error: { code: "invalid_grant", message: "Authorization code expired" },
};
}
// Verify redirect_uri matches exactly
if (row.redirect_uri !== params.redirect_uri) {
return {
success: false,
error: { code: "invalid_grant", message: "redirect_uri mismatch" },
};
}
// Verify client_id matches
if (row.client_id !== params.client_id) {
return {
success: false,
error: { code: "invalid_grant", message: "client_id mismatch" },
};
}
// PKCE verification: SHA256(code_verifier) must match stored code_challenge
const derivedChallenge = computeS256Challenge(params.code_verifier);
if (derivedChallenge !== row.code_challenge) {
return {
success: false,
error: { code: "invalid_grant", message: "PKCE verification failed" },
};
}
// Verify resource matches (if stored)
if (row.resource && params.resource && row.resource !== params.resource) {
return {
success: false,
error: { code: "invalid_grant", message: "resource mismatch" },
};
}
// Issue tokens (same as device flow)
const scopes = JSON.parse(row.scopes) as string[];
const accessToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_ACCESS);
const accessExpires = expiresAt(ACCESS_TOKEN_TTL_SECONDS);
const refreshToken = generatePrefixedToken(TOKEN_PREFIXES.OAUTH_REFRESH);
const refreshExpires = expiresAt(REFRESH_TOKEN_TTL_SECONDS);
// Store access token
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: accessToken.hash,
token_type: "access",
user_id: row.user_id,
scopes: JSON.stringify(scopes),
client_type: "mcp",
expires_at: accessExpires,
refresh_token_hash: refreshToken.hash,
client_id: row.client_id,
})
.execute();
// Store refresh token
await db
.insertInto("_emdash_oauth_tokens")
.values({
token_hash: refreshToken.hash,
token_type: "refresh",
user_id: row.user_id,
scopes: JSON.stringify(scopes),
client_type: "mcp",
expires_at: refreshExpires,
refresh_token_hash: null,
client_id: row.client_id,
})
.execute();
return {
success: true,
data: {
access_token: accessToken.raw,
refresh_token: refreshToken.raw,
token_type: "Bearer",
expires_in: ACCESS_TOKEN_TTL_SECONDS,
scope: scopes.join(" "),
},
};
} catch (error) {
console.error("Token exchange error:", error);
return {
success: false,
error: {
code: "TOKEN_EXCHANGE_ERROR",
message: "Failed to exchange authorization code",
},
};
}
}
/**
* Build the authorization denied redirect URL.
*/
export function buildDeniedRedirect(redirectUri: string, state?: string): string {
const url = new URL(redirectUri);
url.searchParams.set("error", "access_denied");
url.searchParams.set("error_description", "The user denied the authorization request");
if (state) {
url.searchParams.set("state", state);
}
return url.toString();
}
/**
* Clean up expired authorization codes.
*/
export async function cleanupExpiredAuthorizationCodes(db: Kysely<Database>): Promise<number> {
const result = await db
.deleteFrom("_emdash_authorization_codes")
.where("expires_at", "<", new Date().toISOString())
.executeTakeFirst();
return Number(result.numDeletedRows);
}

View File

@@ -0,0 +1,353 @@
/**
* OAuth client management handlers.
*
* CRUD operations for registered OAuth clients. Each client has a set
* of pre-registered redirect URIs. The authorization endpoint rejects
* any redirect_uri not in the client's registered set.
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/** Parse a JSON string column into a typed value. */
function parseJsonColumn<T>(value: string): T {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- JSON.parse returns unknown, callers provide the expected shape
return JSON.parse(value) as T;
}
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
export interface OAuthClientInfo {
id: string;
name: string;
redirectUris: string[];
scopes: string[] | null;
createdAt: string;
updatedAt: string;
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/**
* Create a new OAuth client.
*/
export async function handleOAuthClientCreate(
db: Kysely<Database>,
input: {
id: string;
name: string;
redirectUris: string[];
scopes?: string[] | null;
},
): Promise<ApiResult<OAuthClientInfo>> {
try {
if (input.redirectUris.length === 0) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "At least one redirect URI is required",
},
};
}
// Check for duplicate client ID
const existing = await db
.selectFrom("_emdash_oauth_clients")
.select("id")
.where("id", "=", input.id)
.executeTakeFirst();
if (existing) {
return {
success: false,
error: { code: "CONFLICT", message: "OAuth client with this ID already exists" },
};
}
const now = new Date().toISOString();
await db
.insertInto("_emdash_oauth_clients")
.values({
id: input.id,
name: input.name,
redirect_uris: JSON.stringify(input.redirectUris),
scopes: input.scopes ? JSON.stringify(input.scopes) : null,
})
.execute();
return {
success: true,
data: {
id: input.id,
name: input.name,
redirectUris: input.redirectUris,
scopes: input.scopes ?? null,
createdAt: now,
updatedAt: now,
},
};
} catch {
return {
success: false,
error: {
code: "CLIENT_CREATE_ERROR",
message: "Failed to create OAuth client",
},
};
}
}
/**
* List all registered OAuth clients.
*/
export async function handleOAuthClientList(
db: Kysely<Database>,
): Promise<ApiResult<{ items: OAuthClientInfo[] }>> {
try {
const rows = await db
.selectFrom("_emdash_oauth_clients")
.selectAll()
.orderBy("created_at", "desc")
.execute();
const items: OAuthClientInfo[] = rows.map((row) => ({
id: row.id,
name: row.name,
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
createdAt: row.created_at,
updatedAt: row.updated_at,
}));
return { success: true, data: { items } };
} catch {
return {
success: false,
error: {
code: "CLIENT_LIST_ERROR",
message: "Failed to list OAuth clients",
},
};
}
}
/**
* Get a single OAuth client by ID.
*/
export async function handleOAuthClientGet(
db: Kysely<Database>,
clientId: string,
): Promise<ApiResult<OAuthClientInfo>> {
try {
const row = await db
.selectFrom("_emdash_oauth_clients")
.selectAll()
.where("id", "=", clientId)
.executeTakeFirst();
if (!row) {
return {
success: false,
error: { code: "NOT_FOUND", message: "OAuth client not found" },
};
}
return {
success: true,
data: {
id: row.id,
name: row.name,
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
createdAt: row.created_at,
updatedAt: row.updated_at,
},
};
} catch {
return {
success: false,
error: {
code: "CLIENT_GET_ERROR",
message: "Failed to get OAuth client",
},
};
}
}
/**
* Update an OAuth client.
*/
export async function handleOAuthClientUpdate(
db: Kysely<Database>,
clientId: string,
input: {
name?: string;
redirectUris?: string[];
scopes?: string[] | null;
},
): Promise<ApiResult<OAuthClientInfo>> {
try {
const existing = await db
.selectFrom("_emdash_oauth_clients")
.selectAll()
.where("id", "=", clientId)
.executeTakeFirst();
if (!existing) {
return {
success: false,
error: { code: "NOT_FOUND", message: "OAuth client not found" },
};
}
if (input.redirectUris !== undefined && input.redirectUris.length === 0) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "At least one redirect URI is required",
},
};
}
const updates: Record<string, string> = {
updated_at: new Date().toISOString(),
};
if (input.name !== undefined) {
updates.name = input.name;
}
if (input.redirectUris !== undefined) {
updates.redirect_uris = JSON.stringify(input.redirectUris);
}
if (input.scopes !== undefined) {
updates.scopes = input.scopes ? JSON.stringify(input.scopes) : "";
}
await db
.updateTable("_emdash_oauth_clients")
.set(updates)
.where("id", "=", clientId)
.execute();
// Fetch the updated row
const updated = await db
.selectFrom("_emdash_oauth_clients")
.selectAll()
.where("id", "=", clientId)
.executeTakeFirst();
if (!updated) {
return {
success: false,
error: { code: "NOT_FOUND", message: "OAuth client not found after update" },
};
}
return {
success: true,
data: {
id: updated.id,
name: updated.name,
redirectUris: parseJsonColumn<string[]>(updated.redirect_uris),
scopes: updated.scopes ? parseJsonColumn<string[]>(updated.scopes) : null,
createdAt: updated.created_at,
updatedAt: updated.updated_at,
},
};
} catch {
return {
success: false,
error: {
code: "CLIENT_UPDATE_ERROR",
message: "Failed to update OAuth client",
},
};
}
}
/**
* Delete an OAuth client.
*/
export async function handleOAuthClientDelete(
db: Kysely<Database>,
clientId: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const result = await db
.deleteFrom("_emdash_oauth_clients")
.where("id", "=", clientId)
.executeTakeFirst();
if (result.numDeletedRows === 0n) {
return {
success: false,
error: { code: "NOT_FOUND", message: "OAuth client not found" },
};
}
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: {
code: "CLIENT_DELETE_ERROR",
message: "Failed to delete OAuth client",
},
};
}
}
// ---------------------------------------------------------------------------
// Lookup helpers (used by authorization handler)
// ---------------------------------------------------------------------------
/**
* Look up a registered OAuth client by ID.
* Returns the client's redirect URIs or null if the client is not registered.
*/
export async function lookupOAuthClient(
db: Kysely<Database>,
clientId: string,
): Promise<{ redirectUris: string[]; scopes: string[] | null } | null> {
const row = await db
.selectFrom("_emdash_oauth_clients")
.select(["redirect_uris", "scopes"])
.where("id", "=", clientId)
.executeTakeFirst();
if (!row) return null;
return {
redirectUris: parseJsonColumn<string[]>(row.redirect_uris),
scopes: row.scopes ? parseJsonColumn<string[]>(row.scopes) : null,
};
}
/**
* Validate that a redirect URI is in the client's registered set.
*
* Comparison is exact string match (per RFC 6749 §3.1.2.3).
* Returns null if valid, or an error message if not.
*/
export function validateClientRedirectUri(
redirectUri: string,
allowedUris: string[],
): string | null {
if (allowedUris.includes(redirectUri)) {
return null; // OK
}
return "redirect_uri is not registered for this client";
}

View File

@@ -0,0 +1,39 @@
/**
* Shared user lookup for OAuth token operations.
*
* Extracts user role and disabled status from the database. Used by
* handleTokenRefresh() to revalidate scopes against the user's current
* role and reject disabled users.
*/
import { toRoleLevel, type RoleLevel } from "@emdashcms/auth";
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
export interface UserRoleAndStatus {
role: RoleLevel;
disabled: boolean;
}
/**
* Look up a user's current role and disabled status.
* Returns null if the user doesn't exist.
*/
export async function lookupUserRoleAndStatus(
db: Kysely<Database>,
userId: string,
): Promise<UserRoleAndStatus | null> {
const row = await db
.selectFrom("users")
.select(["role", "disabled"])
.where("id", "=", userId)
.executeTakeFirst();
if (!row) return null;
return {
role: toRoleLevel(row.role),
disabled: row.disabled === 1,
};
}

View File

@@ -0,0 +1,254 @@
/**
* Plugin management handlers
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import { PluginStateRepository, type PluginState, type PluginStatus } from "../../plugins/state.js";
import type { ResolvedPlugin } from "../../plugins/types.js";
import type { ApiResult } from "../types.js";
export interface PluginInfo {
id: string;
name: string;
version: string;
package?: string;
enabled: boolean;
status: PluginStatus;
source?: "config" | "marketplace";
marketplaceVersion?: string;
capabilities: string[];
hasAdminPages: boolean;
hasDashboardWidgets: boolean;
hasHooks: boolean;
installedAt?: string;
activatedAt?: string;
deactivatedAt?: string;
/** Description of what the plugin does */
description?: string;
/** URL to the plugin icon on the marketplace */
iconUrl?: string;
}
export interface PluginListResponse {
items: PluginInfo[];
}
export interface PluginResponse {
item: PluginInfo;
}
function marketplaceIconUrl(marketplaceUrl: string, pluginId: string): string {
return `${marketplaceUrl}/api/v1/plugins/${encodeURIComponent(pluginId)}/icon`;
}
/**
* Get plugin info from configured plugin and database state
*/
function buildPluginInfo(
plugin: ResolvedPlugin,
state: PluginState | null,
marketplaceUrl?: string,
): PluginInfo {
// If no state exists, plugin is considered active (default on first run)
const status = state?.status ?? "active";
const enabled = status === "active";
const isMarketplace = (state?.source ?? "config") === "marketplace";
return {
id: plugin.id,
name: state?.displayName || plugin.id,
version: plugin.version,
package: undefined, // v2 doesn't have package field
enabled,
status,
source: state?.source ?? "config",
marketplaceVersion: state?.marketplaceVersion ?? undefined,
capabilities: plugin.capabilities,
hasAdminPages: (plugin.admin.pages?.length ?? 0) > 0,
hasDashboardWidgets: (plugin.admin.widgets?.length ?? 0) > 0,
hasHooks: Object.keys(plugin.hooks ?? {}).length > 0,
installedAt: state?.installedAt?.toISOString(),
activatedAt: state?.activatedAt?.toISOString() ?? undefined,
deactivatedAt: state?.deactivatedAt?.toISOString() ?? undefined,
description: state?.description ?? undefined,
iconUrl:
isMarketplace && marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, plugin.id) : undefined,
};
}
/**
* List all configured plugins with their state
*/
export async function handlePluginList(
db: Kysely<Database>,
configuredPlugins: ResolvedPlugin[],
marketplaceUrl?: string,
): Promise<ApiResult<PluginListResponse>> {
try {
const stateRepo = new PluginStateRepository(db);
const allStates = await stateRepo.getAll();
const stateMap = new Map(allStates.map((s) => [s.pluginId, s]));
const configuredIds = new Set(configuredPlugins.map((p) => p.id));
const items = configuredPlugins.map((plugin) => {
const state = stateMap.get(plugin.id) ?? null;
return buildPluginInfo(plugin, state, marketplaceUrl);
});
// Include marketplace-installed plugins that aren't in the configured plugins list
for (const state of allStates) {
if (state.source !== "marketplace") continue;
if (configuredIds.has(state.pluginId)) continue;
items.push({
id: state.pluginId,
name: state.displayName || state.pluginId,
version: state.marketplaceVersion ?? state.version,
enabled: state.status === "active",
status: state.status,
source: "marketplace",
marketplaceVersion: state.marketplaceVersion ?? undefined,
capabilities: [],
hasAdminPages: false,
hasDashboardWidgets: false,
hasHooks: false,
installedAt: state.installedAt?.toISOString(),
activatedAt: state.activatedAt?.toISOString() ?? undefined,
deactivatedAt: state.deactivatedAt?.toISOString() ?? undefined,
description: state.description ?? undefined,
iconUrl: marketplaceUrl ? marketplaceIconUrl(marketplaceUrl, state.pluginId) : undefined,
});
}
return {
success: true,
data: { items },
};
} catch {
return {
success: false,
error: {
code: "PLUGIN_LIST_ERROR",
message: "Failed to list plugins",
},
};
}
}
/**
* Get a single plugin's info
*/
export async function handlePluginGet(
db: Kysely<Database>,
configuredPlugins: ResolvedPlugin[],
pluginId: string,
marketplaceUrl?: string,
): Promise<ApiResult<PluginResponse>> {
try {
const plugin = configuredPlugins.find((p) => p.id === pluginId);
if (!plugin) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Plugin not found: ${pluginId}`,
},
};
}
const stateRepo = new PluginStateRepository(db);
const state = await stateRepo.get(pluginId);
return {
success: true,
data: { item: buildPluginInfo(plugin, state, marketplaceUrl) },
};
} catch {
return {
success: false,
error: {
code: "PLUGIN_GET_ERROR",
message: "Failed to get plugin",
},
};
}
}
/**
* Enable a plugin
*/
export async function handlePluginEnable(
db: Kysely<Database>,
configuredPlugins: ResolvedPlugin[],
pluginId: string,
): Promise<ApiResult<PluginResponse>> {
try {
const plugin = configuredPlugins.find((p) => p.id === pluginId);
if (!plugin) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Plugin not found: ${pluginId}`,
},
};
}
const stateRepo = new PluginStateRepository(db);
const state = await stateRepo.enable(pluginId, plugin.version);
return {
success: true,
data: { item: buildPluginInfo(plugin, state) },
};
} catch {
return {
success: false,
error: {
code: "PLUGIN_ENABLE_ERROR",
message: "Failed to enable plugin",
},
};
}
}
/**
* Disable a plugin
*/
export async function handlePluginDisable(
db: Kysely<Database>,
configuredPlugins: ResolvedPlugin[],
pluginId: string,
): Promise<ApiResult<PluginResponse>> {
try {
const plugin = configuredPlugins.find((p) => p.id === pluginId);
if (!plugin) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Plugin not found: ${pluginId}`,
},
};
}
const stateRepo = new PluginStateRepository(db);
const state = await stateRepo.disable(pluginId, plugin.version);
return {
success: true,
data: { item: buildPluginInfo(plugin, state) },
};
} catch {
return {
success: false,
error: {
code: "PLUGIN_DISABLE_ERROR",
message: "Failed to disable plugin",
},
};
}
}

View File

@@ -0,0 +1,360 @@
/**
* Redirect CRUD and 404 log handlers
*/
import type { Kysely } from "kysely";
import {
RedirectRepository,
type Redirect,
type NotFoundEntry,
type NotFoundSummary,
} from "../../database/repositories/redirect.js";
import type { FindManyResult } from "../../database/repositories/types.js";
import type { Database } from "../../database/types.js";
import { validatePattern, validateDestinationParams, isPattern } from "../../redirects/patterns.js";
import type { ApiResult } from "../types.js";
// ---------------------------------------------------------------------------
// Redirects
// ---------------------------------------------------------------------------
/**
* List redirects with cursor pagination and optional filters
*/
export async function handleRedirectList(
db: Kysely<Database>,
params: {
cursor?: string;
limit?: number;
search?: string;
group?: string;
enabled?: boolean;
auto?: boolean;
},
): Promise<ApiResult<FindManyResult<Redirect>>> {
try {
const repo = new RedirectRepository(db);
const result = await repo.findMany(params);
return { success: true, data: result };
} catch {
return {
success: false,
error: { code: "REDIRECT_LIST_ERROR", message: "Failed to fetch redirects" },
};
}
}
/**
* Create a redirect rule
*/
export async function handleRedirectCreate(
db: Kysely<Database>,
input: {
source: string;
destination: string;
type?: number;
enabled?: boolean;
groupName?: string | null;
},
): Promise<ApiResult<Redirect>> {
try {
const repo = new RedirectRepository(db);
// Source and destination must differ
if (input.source === input.destination) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "Source and destination must be different",
},
};
}
// If source looks like a pattern, validate it
const sourceIsPattern = isPattern(input.source);
if (sourceIsPattern) {
const patternError = validatePattern(input.source);
if (patternError) {
return {
success: false,
error: { code: "VALIDATION_ERROR", message: `Invalid source pattern: ${patternError}` },
};
}
// Validate destination params reference valid source params
const destError = validateDestinationParams(input.source, input.destination);
if (destError) {
return {
success: false,
error: { code: "VALIDATION_ERROR", message: destError },
};
}
}
// Check for duplicate source (exact match only for non-patterns)
const existing = await repo.findBySource(input.source);
if (existing) {
return {
success: false,
error: {
code: "CONFLICT",
message: `A redirect from "${input.source}" already exists`,
},
};
}
const redirect = await repo.create({
source: input.source,
destination: input.destination,
type: input.type ?? 301,
isPattern: sourceIsPattern,
enabled: input.enabled ?? true,
groupName: input.groupName ?? null,
});
return { success: true, data: redirect };
} catch {
return {
success: false,
error: { code: "REDIRECT_CREATE_ERROR", message: "Failed to create redirect" },
};
}
}
/**
* Get a redirect by ID
*/
export async function handleRedirectGet(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<Redirect>> {
try {
const repo = new RedirectRepository(db);
const redirect = await repo.findById(id);
if (!redirect) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
};
}
return { success: true, data: redirect };
} catch {
return {
success: false,
error: { code: "REDIRECT_GET_ERROR", message: "Failed to fetch redirect" },
};
}
}
/**
* Update a redirect by ID
*/
export async function handleRedirectUpdate(
db: Kysely<Database>,
id: string,
input: {
source?: string;
destination?: string;
type?: number;
enabled?: boolean;
groupName?: string | null;
},
): Promise<ApiResult<Redirect>> {
try {
const repo = new RedirectRepository(db);
const existing = await repo.findById(id);
if (!existing) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
};
}
const newSource = input.source ?? existing.source;
const newDest = input.destination ?? existing.destination;
// Source and destination must differ
if (newSource === newDest) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "Source and destination must be different",
},
};
}
// If source is changing, validate patterns
if (input.source !== undefined) {
const sourceIsPattern = isPattern(input.source);
if (sourceIsPattern) {
const patternError = validatePattern(input.source);
if (patternError) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: `Invalid source pattern: ${patternError}`,
},
};
}
}
// Check for duplicate source (exclude self)
const dup = await repo.findBySource(input.source);
if (dup && dup.id !== id) {
return {
success: false,
error: {
code: "CONFLICT",
message: `A redirect from "${input.source}" already exists`,
},
};
}
}
// Validate destination params against the (possibly updated) source
if (isPattern(newSource)) {
const destError = validateDestinationParams(newSource, newDest);
if (destError) {
return {
success: false,
error: { code: "VALIDATION_ERROR", message: destError },
};
}
}
const updated = await repo.update(id, {
source: input.source,
destination: input.destination,
type: input.type,
enabled: input.enabled,
groupName: input.groupName,
});
if (!updated) {
return {
success: false,
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
};
}
return { success: true, data: updated };
} catch {
return {
success: false,
error: { code: "REDIRECT_UPDATE_ERROR", message: "Failed to update redirect" },
};
}
}
/**
* Delete a redirect by ID
*/
export async function handleRedirectDelete(
db: Kysely<Database>,
id: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const repo = new RedirectRepository(db);
const deleted = await repo.delete(id);
if (!deleted) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Redirect "${id}" not found` },
};
}
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: { code: "REDIRECT_DELETE_ERROR", message: "Failed to delete redirect" },
};
}
}
// ---------------------------------------------------------------------------
// 404 Log
// ---------------------------------------------------------------------------
/**
* List 404 log entries with cursor pagination
*/
export async function handleNotFoundList(
db: Kysely<Database>,
params: { cursor?: string; limit?: number; search?: string },
): Promise<ApiResult<FindManyResult<NotFoundEntry>>> {
try {
const repo = new RedirectRepository(db);
const result = await repo.find404s(params);
return { success: true, data: result };
} catch {
return {
success: false,
error: { code: "NOT_FOUND_LIST_ERROR", message: "Failed to fetch 404 log" },
};
}
}
/**
* Get 404 summary (grouped by path, sorted by count)
*/
export async function handleNotFoundSummary(
db: Kysely<Database>,
limit?: number,
): Promise<ApiResult<{ items: NotFoundSummary[] }>> {
try {
const repo = new RedirectRepository(db);
const items = await repo.get404Summary(limit);
return { success: true, data: { items } };
} catch {
return {
success: false,
error: { code: "NOT_FOUND_SUMMARY_ERROR", message: "Failed to fetch 404 summary" },
};
}
}
/**
* Clear all 404 log entries
*/
export async function handleNotFoundClear(
db: Kysely<Database>,
): Promise<ApiResult<{ deleted: number }>> {
try {
const repo = new RedirectRepository(db);
const deleted = await repo.clear404s();
return { success: true, data: { deleted } };
} catch {
return {
success: false,
error: { code: "NOT_FOUND_CLEAR_ERROR", message: "Failed to clear 404 log" },
};
}
}
/**
* Prune 404 log entries older than a given date
*/
export async function handleNotFoundPrune(
db: Kysely<Database>,
olderThan: string,
): Promise<ApiResult<{ deleted: number }>> {
try {
const repo = new RedirectRepository(db);
const deleted = await repo.prune404s(olderThan);
return { success: true, data: { deleted } };
} catch {
return {
success: false,
error: { code: "NOT_FOUND_PRUNE_ERROR", message: "Failed to prune 404 log" },
};
}
}

View File

@@ -0,0 +1,145 @@
/**
* Revision history handlers
*/
import type { Kysely } from "kysely";
import { ContentRepository } from "../../database/repositories/content.js";
import { RevisionRepository, type Revision } from "../../database/repositories/revision.js";
import type { Database } from "../../database/types.js";
import type { ApiResult, ContentResponse } from "../types.js";
export interface RevisionListResponse {
items: Revision[];
total: number;
}
export interface RevisionResponse {
item: Revision;
}
/**
* List revisions for a content entry
*/
export async function handleRevisionList(
db: Kysely<Database>,
collection: string,
entryId: string,
params: { limit?: number } = {},
): Promise<ApiResult<RevisionListResponse>> {
try {
const repo = new RevisionRepository(db);
const [items, total] = await Promise.all([
repo.findByEntry(collection, entryId, { limit: Math.min(params.limit || 50, 100) }),
repo.countByEntry(collection, entryId),
]);
return {
success: true,
data: { items, total },
};
} catch {
return {
success: false,
error: {
code: "REVISION_LIST_ERROR",
message: "Failed to list revisions",
},
};
}
}
/**
* Get a specific revision
*/
export async function handleRevisionGet(
db: Kysely<Database>,
revisionId: string,
): Promise<ApiResult<RevisionResponse>> {
try {
const repo = new RevisionRepository(db);
const item = await repo.findById(revisionId);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Revision not found: ${revisionId}`,
},
};
}
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "REVISION_GET_ERROR",
message: "Failed to get revision",
},
};
}
}
/**
* Restore a revision (updates content to this revision's data and creates new revision)
*/
export async function handleRevisionRestore(
db: Kysely<Database>,
revisionId: string,
callerUserId: string,
): Promise<ApiResult<ContentResponse>> {
try {
const revisionRepo = new RevisionRepository(db);
const contentRepo = new ContentRepository(db);
// Get the revision
const revision = await revisionRepo.findById(revisionId);
if (!revision) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Revision not found: ${revisionId}`,
},
};
}
// Extract _slug from revision data (stored as metadata, not a real column)
const { _slug, ...fieldData } = revision.data;
// Update the content with the revision's data
const item = await contentRepo.update(revision.collection, revision.entryId, {
data: fieldData,
slug: typeof _slug === "string" ? _slug : undefined,
});
// Create a new revision to record the restore, attributed to the caller
await revisionRepo.create({
collection: revision.collection,
entryId: revision.entryId,
data: revision.data,
authorId: callerUserId,
});
// Fire-and-forget: prune old revisions to prevent unbounded growth
void revisionRepo.pruneOldRevisions(revision.collection, revision.entryId, 50).catch(() => {});
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "REVISION_RESTORE_ERROR",
message: "Failed to restore revision",
},
};
}
}

View File

@@ -0,0 +1,534 @@
/**
* Schema/collection management handlers
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import {
SchemaRegistry,
SchemaError,
type Collection,
type Field,
type CreateCollectionInput,
type UpdateCollectionInput,
type CreateFieldInput,
type UpdateFieldInput,
type CollectionWithFields,
} from "../../schema/index.js";
import type { ApiResult } from "../types.js";
export interface CollectionListResponse {
items: Collection[];
}
export interface CollectionResponse {
item: Collection;
}
export interface CollectionWithFieldsResponse {
item: CollectionWithFields;
}
export interface FieldListResponse {
items: Field[];
}
export interface FieldResponse {
item: Field;
}
/**
* List all collections
*/
export async function handleSchemaCollectionList(
db: Kysely<Database>,
): Promise<ApiResult<CollectionListResponse>> {
try {
const registry = new SchemaRegistry(db);
const items = await registry.listCollections();
return {
success: true,
data: { items },
};
} catch {
return {
success: false,
error: {
code: "SCHEMA_LIST_ERROR",
message: "Failed to list collections",
},
};
}
}
/**
* Get a collection by slug
*/
export async function handleSchemaCollectionGet(
db: Kysely<Database>,
slug: string,
options?: { includeFields?: boolean },
): Promise<ApiResult<CollectionResponse | CollectionWithFieldsResponse>> {
try {
const registry = new SchemaRegistry(db);
if (options?.includeFields) {
const item = await registry.getCollectionWithFields(slug);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Collection not found: ${slug}`,
},
};
}
return {
success: true,
data: { item },
};
}
const item = await registry.getCollection(slug);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Collection not found: ${slug}`,
},
};
}
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "SCHEMA_GET_ERROR",
message: "Failed to get collection",
},
};
}
}
/**
* Create a collection
*/
export async function handleSchemaCollectionCreate(
db: Kysely<Database>,
input: CreateCollectionInput,
): Promise<ApiResult<CollectionResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.createCollection(input);
return {
success: true,
data: { item },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
console.error("[emdash] Failed to create collection:", error);
return {
success: false,
error: {
code: "SCHEMA_CREATE_ERROR",
message: "Failed to create collection",
},
};
}
}
/**
* Update a collection
*/
export async function handleSchemaCollectionUpdate(
db: Kysely<Database>,
slug: string,
input: UpdateCollectionInput,
): Promise<ApiResult<CollectionResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.updateCollection(slug, input);
return {
success: true,
data: { item },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_UPDATE_ERROR",
message: "Failed to update collection",
},
};
}
}
/**
* Delete a collection
*/
export async function handleSchemaCollectionDelete(
db: Kysely<Database>,
slug: string,
options?: { force?: boolean },
): Promise<ApiResult<{ success: boolean }>> {
try {
const registry = new SchemaRegistry(db);
await registry.deleteCollection(slug, options);
return {
success: true,
data: { success: true },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_DELETE_ERROR",
message: "Failed to delete collection",
},
};
}
}
/**
* List fields for a collection
*/
export async function handleSchemaFieldList(
db: Kysely<Database>,
collectionSlug: string,
): Promise<ApiResult<FieldListResponse>> {
try {
const registry = new SchemaRegistry(db);
const collection = await registry.getCollection(collectionSlug);
if (!collection) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Collection not found: ${collectionSlug}`,
},
};
}
const items = await registry.listFields(collection.id);
return {
success: true,
data: { items },
};
} catch {
return {
success: false,
error: {
code: "SCHEMA_FIELD_LIST_ERROR",
message: "Failed to list fields",
},
};
}
}
/**
* Get a field
*/
export async function handleSchemaFieldGet(
db: Kysely<Database>,
collectionSlug: string,
fieldSlug: string,
): Promise<ApiResult<FieldResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.getField(collectionSlug, fieldSlug);
if (!item) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Field not found: ${fieldSlug} in collection ${collectionSlug}`,
},
};
}
return {
success: true,
data: { item },
};
} catch {
return {
success: false,
error: {
code: "SCHEMA_FIELD_GET_ERROR",
message: "Failed to get field",
},
};
}
}
/**
* Create a field
*/
export async function handleSchemaFieldCreate(
db: Kysely<Database>,
collectionSlug: string,
input: CreateFieldInput,
): Promise<ApiResult<FieldResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.createField(collectionSlug, input);
return {
success: true,
data: { item },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_FIELD_CREATE_ERROR",
message: "Failed to create field",
},
};
}
}
/**
* Update a field
*/
export async function handleSchemaFieldUpdate(
db: Kysely<Database>,
collectionSlug: string,
fieldSlug: string,
input: UpdateFieldInput,
): Promise<ApiResult<FieldResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.updateField(collectionSlug, fieldSlug, input);
return {
success: true,
data: { item },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_FIELD_UPDATE_ERROR",
message: "Failed to update field",
},
};
}
}
/**
* Delete a field
*/
export async function handleSchemaFieldDelete(
db: Kysely<Database>,
collectionSlug: string,
fieldSlug: string,
): Promise<ApiResult<{ success: boolean }>> {
try {
const registry = new SchemaRegistry(db);
await registry.deleteField(collectionSlug, fieldSlug);
return {
success: true,
data: { success: true },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_FIELD_DELETE_ERROR",
message: "Failed to delete field",
},
};
}
}
/**
* Reorder fields
*/
export async function handleSchemaFieldReorder(
db: Kysely<Database>,
collectionSlug: string,
fieldSlugs: string[],
): Promise<ApiResult<{ success: boolean }>> {
try {
const registry = new SchemaRegistry(db);
await registry.reorderFields(collectionSlug, fieldSlugs);
return {
success: true,
data: { success: true },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "SCHEMA_FIELD_REORDER_ERROR",
message: "Failed to reorder fields",
},
};
}
}
// ============================================
// Orphaned Table Discovery
// ============================================
export interface OrphanedTable {
slug: string;
tableName: string;
rowCount: number;
}
export interface OrphanedTableListResponse {
items: OrphanedTable[];
}
/**
* List orphaned content tables
*/
export async function handleOrphanedTableList(
db: Kysely<Database>,
): Promise<ApiResult<OrphanedTableListResponse>> {
try {
const registry = new SchemaRegistry(db);
const items = await registry.discoverOrphanedTables();
return {
success: true,
data: { items },
};
} catch (error) {
console.error("[emdash] Failed to list orphaned tables:", error);
return {
success: false,
error: {
code: "ORPHAN_LIST_ERROR",
message: "Failed to list orphaned tables",
},
};
}
}
/**
* Register an orphaned table as a collection
*/
export async function handleOrphanedTableRegister(
db: Kysely<Database>,
slug: string,
options?: {
label?: string;
labelSingular?: string;
description?: string;
},
): Promise<ApiResult<CollectionResponse>> {
try {
const registry = new SchemaRegistry(db);
const item = await registry.registerOrphanedTable(slug, options);
return {
success: true,
data: { item },
};
} catch (error) {
if (error instanceof SchemaError) {
return {
success: false,
error: {
code: error.code,
message: error.message,
details: error.details,
},
};
}
return {
success: false,
error: {
code: "ORPHAN_REGISTER_ERROR",
message: "Failed to register orphaned table",
},
};
}
}

View File

@@ -0,0 +1,289 @@
/**
* Section CRUD handlers
*/
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import type { FindManyResult } from "../../database/repositories/types.js";
import type { Database } from "../../database/types.js";
import {
getSectionById,
getSectionWithDb,
getSectionsWithDb,
type Section,
type GetSectionsOptions,
} from "../../sections/index.js";
import type { ApiResult } from "../types.js";
const SLUG_PATTERN = /^[a-z0-9-]+$/;
export type SectionListResponse = FindManyResult<Section>;
/**
* List sections with optional filters
*/
export async function handleSectionList(
db: Kysely<Database>,
params: GetSectionsOptions,
): Promise<ApiResult<SectionListResponse>> {
try {
const result = await getSectionsWithDb(db, {
source: params.source,
search: params.search,
limit: params.limit,
cursor: params.cursor,
});
return { success: true, data: result };
} catch {
return {
success: false,
error: { code: "SECTION_LIST_ERROR", message: "Failed to fetch sections" },
};
}
}
/**
* Create a section
*/
export async function handleSectionCreate(
db: Kysely<Database>,
input: {
slug: string;
title: string;
description?: string;
keywords?: string[];
content: unknown[];
previewMediaId?: string;
source?: string;
themeId?: string;
},
): Promise<ApiResult<Section>> {
try {
// Validate slug format
if (!SLUG_PATTERN.test(input.slug)) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "slug must only contain lowercase letters, numbers, and hyphens",
},
};
}
// Check if slug already exists
const existing = await db
.selectFrom("_emdash_sections")
.select("id")
.where("slug", "=", input.slug)
.executeTakeFirst();
if (existing) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Section with slug "${input.slug}" already exists`,
},
};
}
const id = ulid();
const now = new Date().toISOString();
await db
.insertInto("_emdash_sections")
.values({
id,
slug: input.slug,
title: input.title,
description: input.description ?? null,
keywords: input.keywords ? JSON.stringify(input.keywords) : null,
content: JSON.stringify(input.content),
preview_media_id: input.previewMediaId ?? null,
source: input.source ?? "user",
theme_id: input.themeId ?? null,
created_at: now,
updated_at: now,
})
.execute();
const section = await getSectionById(id, db);
if (!section) {
return {
success: false,
error: { code: "SECTION_CREATE_ERROR", message: "Failed to fetch created section" },
};
}
return { success: true, data: section };
} catch {
return {
success: false,
error: { code: "SECTION_CREATE_ERROR", message: "Failed to create section" },
};
}
}
/**
* Get a section by slug
*/
export async function handleSectionGet(
db: Kysely<Database>,
slug: string,
): Promise<ApiResult<Section>> {
try {
const section = await getSectionWithDb(slug, db);
if (!section) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
};
}
return { success: true, data: section };
} catch {
return {
success: false,
error: { code: "SECTION_GET_ERROR", message: "Failed to fetch section" },
};
}
}
/**
* Update a section by slug
*/
export async function handleSectionUpdate(
db: Kysely<Database>,
slug: string,
input: {
slug?: string;
title?: string;
description?: string;
keywords?: string[];
content?: unknown[];
previewMediaId?: string | null;
},
): Promise<ApiResult<Section>> {
try {
// Check if section exists
const existing = await db
.selectFrom("_emdash_sections")
.select(["id", "source"])
.where("slug", "=", slug)
.executeTakeFirst();
if (!existing) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
};
}
// Validate new slug if changing
if (input.slug && input.slug !== slug) {
if (!SLUG_PATTERN.test(input.slug)) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "slug must only contain lowercase letters, numbers, and hyphens",
},
};
}
// Check if new slug already exists
const slugExists = await db
.selectFrom("_emdash_sections")
.select("id")
.where("slug", "=", input.slug)
.executeTakeFirst();
if (slugExists) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Section with slug "${input.slug}" already exists`,
},
};
}
}
// Build update object
const updates: Record<string, unknown> = {
updated_at: new Date().toISOString(),
};
if (input.slug !== undefined) updates.slug = input.slug;
if (input.title !== undefined) updates.title = input.title;
if (input.description !== undefined) updates.description = input.description;
if (input.keywords !== undefined) updates.keywords = JSON.stringify(input.keywords);
if (input.content !== undefined) updates.content = JSON.stringify(input.content);
if (input.previewMediaId !== undefined) updates.preview_media_id = input.previewMediaId;
await db.updateTable("_emdash_sections").set(updates).where("id", "=", existing.id).execute();
const section = await getSectionById(existing.id, db);
if (!section) {
return {
success: false,
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to fetch updated section" },
};
}
return { success: true, data: section };
} catch {
return {
success: false,
error: { code: "SECTION_UPDATE_ERROR", message: "Failed to update section" },
};
}
}
/**
* Delete a section by slug
*/
export async function handleSectionDelete(
db: Kysely<Database>,
slug: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
// Check if section exists and get source
const existing = await db
.selectFrom("_emdash_sections")
.select(["id", "source", "theme_id"])
.where("slug", "=", slug)
.executeTakeFirst();
if (!existing) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Section "${slug}" not found` },
};
}
// Prevent deleting theme sections
if (existing.source === "theme") {
return {
success: false,
error: {
code: "FORBIDDEN",
message:
"Cannot delete theme-provided sections. Edit the section to create a user copy, then delete that.",
},
};
}
await db.deleteFrom("_emdash_sections").where("id", "=", existing.id).execute();
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: { code: "SECTION_DELETE_ERROR", message: "Failed to delete section" },
};
}
}

View File

@@ -0,0 +1,115 @@
/**
* SEO Handlers
*
* Business logic for sitemap generation and robots.txt.
*/
import { sql, type Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import { validateIdentifier } from "../../database/validate.js";
import type { ApiResult } from "../types.js";
/** Raw content data for sitemap generation — the route builds the actual URLs */
export interface SitemapContentEntry {
/** Collection slug (e.g., "post", "page") */
collection: string;
/** Content slug or ID */
identifier: string;
/** ISO date of last modification */
updatedAt: string;
}
export interface SitemapDataResponse {
entries: SitemapContentEntry[];
}
/** Maximum entries per sitemap (per spec) */
const SITEMAP_MAX_ENTRIES = 50_000;
/**
* Collect all published, indexable content across SEO-enabled collections
* for sitemap generation.
*
* Only includes content from collections with `has_seo = 1`.
* Excludes content with `seo_no_index = 1` in the `_emdash_seo` table.
*
* Returns raw data (collection + identifier + date). The caller (route)
* is responsible for building absolute URLs — this handler does NOT
* assume a URL structure.
*/
export async function handleSitemapData(
db: Kysely<Database>,
): Promise<ApiResult<SitemapDataResponse>> {
try {
// Find all SEO-enabled collections
const collections = await db
.selectFrom("_emdash_collections")
.select(["slug"])
.where("has_seo", "=", 1)
.execute();
const entries: SitemapContentEntry[] = [];
for (const col of collections) {
if (entries.length >= SITEMAP_MAX_ENTRIES) break;
// Validate the slug before using it as a table name identifier.
// Should always pass (slugs are validated on creation), but
// guards against corrupted DB data.
try {
validateIdentifier(col.slug, "collection slug");
} catch {
console.warn(`[SITEMAP] Skipping collection with invalid slug: ${col.slug}`);
continue;
}
const tableName = `ec_${col.slug}`;
const remaining = SITEMAP_MAX_ENTRIES - entries.length;
// Query published, non-deleted content.
// LEFT JOIN _emdash_seo to check noindex flag.
// Content without an SEO row is assumed indexable (default).
// Wrapped in try/catch so a missing/broken table doesn't fail the
// entire sitemap — we skip that collection and continue.
try {
const rows = await sql<{
slug: string | null;
id: string;
updated_at: string;
}>`
SELECT c.slug, c.id, c.updated_at
FROM ${sql.ref(tableName)} c
LEFT JOIN _emdash_seo s
ON s.collection = ${col.slug}
AND s.content_id = c.id
WHERE c.status = 'published'
AND c.deleted_at IS NULL
AND (s.seo_no_index IS NULL OR s.seo_no_index = 0)
ORDER BY c.updated_at DESC
LIMIT ${remaining}
`.execute(db);
for (const row of rows.rows) {
entries.push({
collection: col.slug,
identifier: row.slug || row.id,
updatedAt: row.updated_at,
});
}
} catch (err) {
// Table missing or query error — skip this collection
console.warn(`[SITEMAP] Failed to query collection "${col.slug}":`, err);
continue;
}
}
return { success: true, data: { entries } };
} catch (error) {
console.error("[SITEMAP_ERROR]", error);
return {
success: false,
error: { code: "SITEMAP_ERROR", message: "Failed to generate sitemap data" },
};
}
}

View File

@@ -0,0 +1,49 @@
/**
* Settings handlers
*/
import type { Kysely } from "kysely";
import type { Database } from "../../database/types.js";
import { getSiteSettingsWithDb, setSiteSettings } from "../../settings/index.js";
import type { SiteSettings } from "../../settings/types.js";
import type { Storage } from "../../storage/types.js";
import type { ApiResult } from "../types.js";
/**
* Get all site settings
*/
export async function handleSettingsGet(
db: Kysely<Database>,
storage: Storage | null,
): Promise<ApiResult<Partial<SiteSettings>>> {
try {
const settings = await getSiteSettingsWithDb(db, storage);
return { success: true, data: settings };
} catch {
return {
success: false,
error: { code: "SETTINGS_READ_ERROR", message: "Failed to get settings" },
};
}
}
/**
* Update site settings
*/
export async function handleSettingsUpdate(
db: Kysely<Database>,
storage: Storage | null,
input: Partial<SiteSettings>,
): Promise<ApiResult<Partial<SiteSettings>>> {
try {
await setSiteSettings(input, db);
const updatedSettings = await getSiteSettingsWithDb(db, storage);
return { success: true, data: updatedSettings };
} catch {
return {
success: false,
error: { code: "SETTINGS_UPDATE_ERROR", message: "Failed to update settings" },
};
}
}

View File

@@ -0,0 +1,350 @@
/**
* Snapshot handler — generates a portable database snapshot.
*
* Returns all content tables, schema definitions, and supporting data
* needed to render content in an isolated preview database.
*
* Used by:
* - DO preview database (EmDashPreviewDB.populateFromSnapshot)
* - Future: CLI export, backup, site migration
*/
import type { Kysely } from "kysely";
import { sql } from "kysely";
import type { Database } from "../../database/types.js";
// ─<><E29480> Preview signature verification ──────────────────────────────
/**
* Verify HMAC-SHA256 preview signature using crypto.subtle.
* Returns true if the signature is valid and not expired.
*/
export async function verifyPreviewSignature(
source: string,
exp: number,
sig: string,
secret: string,
): Promise<boolean> {
if (exp < Date.now() / 1000) return false;
const encoder = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["verify"],
);
const sigBytes = new Uint8Array(sig.length / 2);
for (let i = 0; i < sig.length; i += 2) {
sigBytes[i / 2] = parseInt(sig.substring(i, i + 2), 16);
}
return crypto.subtle.verify("HMAC", key, sigBytes, encoder.encode(`${source}:${exp}`));
}
/**
* Parse an X-Preview-Signature header value into its components.
*
* Format: "source:exp:sig" where source is a URL (contains colons),
* exp is a unix timestamp, and sig is 64 hex chars.
*
* Parses from the right since source URLs contain colons.
*
* @returns Parsed components, or null if the format is invalid
*/
export function parsePreviewSignatureHeader(
header: string,
): { source: string; exp: number; sig: string } | null {
const lastColon = header.lastIndexOf(":");
if (lastColon <= 0) return null;
const sig = header.substring(lastColon + 1);
if (sig.length !== 64) return null;
const rest = header.substring(0, lastColon);
const secondLastColon = rest.lastIndexOf(":");
if (secondLastColon <= 0) return null;
const source = rest.substring(0, secondLastColon);
const exp = parseInt(rest.substring(secondLastColon + 1), 10);
if (isNaN(exp) || source.length === 0) return null;
return { source, exp, sig };
}
// ── Media URL rewriting ─────────────────────────────────────────
const MEDIA_FILE_PREFIX = "/_emdash/api/media/file/";
/**
* Parse a JSON string value and inject `src` for local media objects.
* Returns the original string if it's not a local media value.
*/
function injectMediaSrc(jsonStr: string, origin: string): string {
try {
const obj = JSON.parse(jsonStr);
if (typeof obj !== "object" || obj === null || Array.isArray(obj)) return jsonStr;
if (injectMediaSrcInto(obj, origin)) {
return JSON.stringify(obj);
}
return jsonStr;
} catch {
return jsonStr;
}
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
/**
* Recursively walk an object and inject `src` into local media values.
* Returns true if any modifications were made.
*/
function injectMediaSrcInto(obj: Record<string, unknown>, origin: string): boolean {
let modified = false;
// Check if this object itself is a local media value
if ((obj.provider === "local" || (!obj.provider && obj.id && obj.meta)) && !obj.src) {
const meta = isRecord(obj.meta) ? obj.meta : undefined;
const storageKey = meta?.storageKey ?? obj.id;
if (typeof storageKey === "string" && storageKey) {
obj.src = `${origin}${MEDIA_FILE_PREFIX}${storageKey}`;
modified = true;
}
}
// Recurse into nested objects/arrays (e.g. Portable Text with image blocks)
for (const value of Object.values(obj)) {
if (Array.isArray(value)) {
for (const item of value) {
if (isRecord(item)) {
if (injectMediaSrcInto(item, origin)) {
modified = true;
}
}
}
} else if (isRecord(value)) {
if (injectMediaSrcInto(value, origin)) {
modified = true;
}
}
}
return modified;
}
// ── Snapshot generation ─────────────────────────────────────────
/**
* Safe identifier pattern for snapshot table names.
* More permissive than validateIdentifier() — allows leading underscores
* (needed for system tables like _emdash_collections).
*/
const SAFE_TABLE_NAME = /^[a-z_][a-z0-9_]*$/;
/** Snapshot shape consumed by the DO preview database */
export interface Snapshot {
tables: Record<string, Record<string, unknown>[]>;
schema: Record<
string,
{
columns: string[];
types?: Record<string, string>;
}
>;
generatedAt: string;
}
/**
* System tables included in snapshots.
* Content tables (ec_*) are discovered dynamically.
*/
const SYSTEM_TABLES = [
"_emdash_collections",
"_emdash_fields",
"_emdash_taxonomy_defs",
"_emdash_menus",
"_emdash_menu_items",
"_emdash_sections",
"_emdash_widget_areas",
"_emdash_widgets",
"_emdash_seo",
"_emdash_migrations",
"taxonomies",
"content_taxonomies",
"media",
"options",
"revisions",
];
/**
* Table name prefixes excluded from snapshots (auth/security data).
*/
const EXCLUDED_PREFIXES = [
"_emdash_api_tokens",
"_emdash_oauth_tokens",
"_emdash_authorization_codes",
"_emdash_device_codes",
"_emdash_migrations_lock",
"_plugin_",
"users",
"sessions",
"credentials",
"challenges",
];
/**
* Options key prefixes safe for inclusion in snapshots.
*
* The options table contains plugin secrets (plugin:*), passkey challenges
* (emdash:passkey_pending:*), and setup state that must not leak to
* preview databases. Only site-level rendering settings are needed.
*/
const SAFE_OPTIONS_PREFIXES = ["site:"];
function isExcluded(tableName: string): boolean {
return EXCLUDED_PREFIXES.some((prefix) => tableName.startsWith(prefix));
}
/** Column info from PRAGMA table_info */
interface ColumnInfo {
name: string;
type: string;
}
export interface GenerateSnapshotOptions {
/** Include draft and trashed content (default: false) */
includeDrafts?: boolean;
/** Origin URL for absolutizing local media URLs (e.g. "https://mysite.com") */
origin?: string;
}
/**
* Generate a portable database snapshot.
*
* Discovers ec_* content tables dynamically, exports system tables
* needed for rendering, and includes schema info for table recreation.
*/
export async function generateSnapshot(
db: Kysely<Database>,
options?: GenerateSnapshotOptions,
): Promise<Snapshot> {
const includeDrafts = options?.includeDrafts ?? false;
// Discover all ec_* content tables
const tableResult = await sql<{ name: string }>`
SELECT name FROM sqlite_master
WHERE type = 'table'
AND name LIKE 'ec_%'
ORDER BY name
`.execute(db);
const contentTables = tableResult.rows.map((r) => r.name);
// Build list of all tables to export
const allTables = [...contentTables, ...SYSTEM_TABLES];
const tables: Record<string, Record<string, unknown>[]> = {};
const schema: Record<string, { columns: string[]; types?: Record<string, string> }> = {};
for (const tableName of allTables) {
if (isExcluded(tableName)) continue;
// Validate identifier before interpolating into sql.raw().
// SYSTEM_TABLES are hardcoded and safe, but ec_* names come from
// sqlite_master and must be validated.
if (!SAFE_TABLE_NAME.test(tableName)) continue;
try {
// Get column info via PRAGMA
const pragmaResult = await sql<ColumnInfo>`
PRAGMA table_info(${sql.raw(`"${tableName}"`)})
`.execute(db);
if (pragmaResult.rows.length === 0) continue;
const columns = pragmaResult.rows.map((r) => r.name);
const types: Record<string, string> = {};
for (const row of pragmaResult.rows) {
types[row.name] = row.type || "TEXT";
}
schema[tableName] = { columns, types };
// Fetch rows
let rows: Record<string, unknown>[];
if (tableName.startsWith("ec_")) {
if (includeDrafts) {
// Include all non-deleted content (published, draft, scheduled)
rows = (
await sql<Record<string, unknown>>`
SELECT * FROM ${sql.raw(`"${tableName}"`)}
WHERE deleted_at IS NULL
`.execute(db)
).rows;
} else {
// Only export published content
rows = (
await sql<Record<string, unknown>>`
SELECT * FROM ${sql.raw(`"${tableName}"`)}
WHERE deleted_at IS NULL
AND (status = 'published' OR (status = 'scheduled' AND scheduled_at <= datetime('now')))
`.execute(db)
).rows;
}
} else if (tableName === "options") {
// Filter options to safe rendering-only prefixes.
// Excludes plugin secrets, passkey challenges, and setup state.
rows = (
await sql<Record<string, unknown>>`
SELECT * FROM ${sql.raw(`"${tableName}"`)}
`.execute(db)
).rows.filter((row) => {
const name = typeof row.name === "string" ? row.name : "";
return SAFE_OPTIONS_PREFIXES.some((prefix) => name.startsWith(prefix));
});
} else {
rows = (
await sql<Record<string, unknown>>`
SELECT * FROM ${sql.raw(`"${tableName}"`)}
`.execute(db)
).rows;
}
if (rows.length > 0) {
tables[tableName] = rows;
}
} catch {
// Table might not exist yet (e.g. pre-migration) — skip silently
}
}
// Absolutize local media URLs in content tables so snapshots are portable.
// Local image fields are stored as JSON with provider:"local" and
// meta.storageKey but no src — the URL is derived at render time.
// For snapshots consumed by external preview services, inject src now.
if (options?.origin) {
const origin = options.origin;
for (const [tableName, rows] of Object.entries(tables)) {
if (!tableName.startsWith("ec_")) continue;
for (const row of rows) {
for (const [col, value] of Object.entries(row)) {
if (typeof value !== "string" || !value.startsWith("{")) continue;
row[col] = injectMediaSrc(value, origin);
}
}
}
}
return {
tables,
schema,
generatedAt: new Date().toISOString(),
};
}

View File

@@ -0,0 +1,523 @@
/**
* Taxonomy and term CRUD handlers
*/
import type { Kysely } from "kysely";
import { ulid } from "ulidx";
import { TaxonomyRepository } from "../../database/repositories/taxonomy.js";
import type { Database } from "../../database/types.js";
import type { ApiResult } from "../types.js";
/** Taxonomy name validation pattern: lowercase alphanumeric + underscores, starts with letter */
const NAME_PATTERN = /^[a-z][a-z0-9_]*$/;
// ---------------------------------------------------------------------------
// Response types
// ---------------------------------------------------------------------------
export interface TaxonomyDef {
id: string;
name: string;
label: string;
labelSingular?: string;
hierarchical: boolean;
collections: string[];
}
export interface TaxonomyListResponse {
taxonomies: TaxonomyDef[];
}
export interface TermData {
id: string;
name: string;
slug: string;
label: string;
parentId: string | null;
description?: string;
}
export interface TermWithCount extends TermData {
count: number;
children: TermWithCount[];
}
export interface TermListResponse {
terms: TermWithCount[];
}
export interface TermResponse {
term: TermData;
}
export interface TermGetResponse {
term: TermData & {
count: number;
children: Array<{ id: string; slug: string; label: string }>;
};
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/**
* Build tree structure from flat terms
*/
function buildTree(flatTerms: TermWithCount[]): TermWithCount[] {
const map = new Map<string, TermWithCount>();
const roots: TermWithCount[] = [];
for (const term of flatTerms) {
map.set(term.id, term);
}
for (const term of flatTerms) {
if (term.parentId && map.has(term.parentId)) {
map.get(term.parentId)!.children.push(term);
} else {
roots.push(term);
}
}
return roots;
}
/**
* Look up a taxonomy definition by name, returning a NOT_FOUND error if missing.
*/
async function requireTaxonomyDef(
db: Kysely<Database>,
name: string,
): Promise<
| { success: true; def: { hierarchical: number } }
| { success: false; error: { code: string; message: string } }
> {
const def = await db
.selectFrom("_emdash_taxonomy_defs")
.selectAll()
.where("name", "=", name)
.executeTakeFirst();
if (!def) {
return {
success: false,
error: { code: "NOT_FOUND", message: `Taxonomy '${name}' not found` },
};
}
return { success: true, def };
}
// ---------------------------------------------------------------------------
// Handlers
// ---------------------------------------------------------------------------
/**
* List all taxonomy definitions
*/
export async function handleTaxonomyList(
db: Kysely<Database>,
): Promise<ApiResult<TaxonomyListResponse>> {
try {
const rows = await db.selectFrom("_emdash_taxonomy_defs").selectAll().execute();
const taxonomies: TaxonomyDef[] = rows.map((row) => ({
id: row.id,
name: row.name,
label: row.label,
labelSingular: row.label_singular ?? undefined,
hierarchical: row.hierarchical === 1,
collections: row.collections ? JSON.parse(row.collections) : [],
}));
return { success: true, data: { taxonomies } };
} catch {
return {
success: false,
error: { code: "TAXONOMY_LIST_ERROR", message: "Failed to list taxonomies" },
};
}
}
/**
* Create a new taxonomy definition
*/
export async function handleTaxonomyCreate(
db: Kysely<Database>,
input: { name: string; label: string; hierarchical?: boolean; collections?: string[] },
): Promise<ApiResult<{ taxonomy: TaxonomyDef }>> {
try {
// Validate name format
if (!NAME_PATTERN.test(input.name)) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message:
"Taxonomy name must start with a letter and contain only lowercase letters, numbers, and underscores",
},
};
}
const collections = [...new Set(input.collections ?? [])];
// Validate that referenced collections exist
if (collections.length > 0) {
const existingCollections = await db
.selectFrom("_emdash_collections")
.select("slug")
.where("slug", "in", collections)
.execute();
const existingSlugs = new Set(existingCollections.map((c) => c.slug));
const invalid = collections.filter((c) => !existingSlugs.has(c));
if (invalid.length > 0) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: `Unknown collection(s): ${invalid.join(", ")}`,
},
};
}
}
// Check for duplicate name
const existing = await db
.selectFrom("_emdash_taxonomy_defs")
.selectAll()
.where("name", "=", input.name)
.executeTakeFirst();
if (existing) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Taxonomy '${input.name}' already exists`,
},
};
}
const id = ulid();
await db
.insertInto("_emdash_taxonomy_defs")
.values({
id,
name: input.name,
label: input.label,
label_singular: null,
hierarchical: input.hierarchical ? 1 : 0,
collections: JSON.stringify(collections),
})
.execute();
return {
success: true,
data: {
taxonomy: {
id,
name: input.name,
label: input.label,
hierarchical: input.hierarchical ?? false,
collections,
},
},
};
} catch (error) {
// Handle UNIQUE constraint violation from concurrent duplicate inserts
if (error instanceof Error && error.message.includes("UNIQUE constraint failed")) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Taxonomy '${input.name}' already exists`,
},
};
}
return {
success: false,
error: { code: "TAXONOMY_CREATE_ERROR", message: "Failed to create taxonomy" },
};
}
}
/**
* List all terms for a taxonomy (returns tree for hierarchical taxonomies)
*/
export async function handleTermList(
db: Kysely<Database>,
taxonomyName: string,
): Promise<ApiResult<TermListResponse>> {
try {
const lookup = await requireTaxonomyDef(db, taxonomyName);
if (!lookup.success) return lookup;
const repo = new TaxonomyRepository(db);
const terms = await repo.findByName(taxonomyName);
// Get counts for each term
const counts = new Map<string, number>();
for (const term of terms) {
const count = await repo.countEntriesWithTerm(term.id);
counts.set(term.id, count);
}
const termData: TermWithCount[] = terms.map((term) => ({
id: term.id,
name: term.name,
slug: term.slug,
label: term.label,
parentId: term.parentId,
description: typeof term.data?.description === "string" ? term.data.description : undefined,
children: [],
count: counts.get(term.id) ?? 0,
}));
const isHierarchical = lookup.def.hierarchical === 1;
const result = isHierarchical ? buildTree(termData) : termData;
return { success: true, data: { terms: result } };
} catch {
return {
success: false,
error: { code: "TERM_LIST_ERROR", message: "Failed to list terms" },
};
}
}
/**
* Create a new term in a taxonomy
*/
export async function handleTermCreate(
db: Kysely<Database>,
taxonomyName: string,
input: { slug: string; label: string; parentId?: string | null; description?: string },
): Promise<ApiResult<TermResponse>> {
try {
const lookup = await requireTaxonomyDef(db, taxonomyName);
if (!lookup.success) return lookup;
const repo = new TaxonomyRepository(db);
// Check for slug conflict
const existing = await repo.findBySlug(taxonomyName, input.slug);
if (existing) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Term with slug '${input.slug}' already exists in taxonomy '${taxonomyName}'`,
},
};
}
const term = await repo.create({
name: taxonomyName,
slug: input.slug,
label: input.label,
parentId: input.parentId ?? undefined,
data: input.description ? { description: input.description } : undefined,
});
return {
success: true,
data: {
term: {
id: term.id,
name: term.name,
slug: term.slug,
label: term.label,
parentId: term.parentId,
description:
typeof term.data?.description === "string" ? term.data.description : undefined,
},
},
};
} catch {
return {
success: false,
error: { code: "TERM_CREATE_ERROR", message: "Failed to create term" },
};
}
}
/**
* Get a single term by slug
*/
export async function handleTermGet(
db: Kysely<Database>,
taxonomyName: string,
termSlug: string,
): Promise<ApiResult<TermGetResponse>> {
try {
const repo = new TaxonomyRepository(db);
const term = await repo.findBySlug(taxonomyName, termSlug);
if (!term) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
},
};
}
const count = await repo.countEntriesWithTerm(term.id);
const children = await repo.findChildren(term.id);
return {
success: true,
data: {
term: {
id: term.id,
name: term.name,
slug: term.slug,
label: term.label,
parentId: term.parentId,
description:
typeof term.data?.description === "string" ? term.data.description : undefined,
count,
children: children.map((c) => ({
id: c.id,
slug: c.slug,
label: c.label,
})),
},
},
};
} catch {
return {
success: false,
error: { code: "TERM_GET_ERROR", message: "Failed to get term" },
};
}
}
/**
* Update a term
*/
export async function handleTermUpdate(
db: Kysely<Database>,
taxonomyName: string,
termSlug: string,
input: { slug?: string; label?: string; parentId?: string | null; description?: string },
): Promise<ApiResult<TermResponse>> {
try {
const repo = new TaxonomyRepository(db);
const term = await repo.findBySlug(taxonomyName, termSlug);
if (!term) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
},
};
}
// Check if new slug conflicts
if (input.slug && input.slug !== termSlug) {
const existing = await repo.findBySlug(taxonomyName, input.slug);
if (existing && existing.id !== term.id) {
return {
success: false,
error: {
code: "CONFLICT",
message: `Term with slug '${input.slug}' already exists in taxonomy '${taxonomyName}'`,
},
};
}
}
const updated = await repo.update(term.id, {
slug: input.slug,
label: input.label,
parentId: input.parentId,
data: input.description !== undefined ? { description: input.description } : undefined,
});
if (!updated) {
return {
success: false,
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
};
}
return {
success: true,
data: {
term: {
id: updated.id,
name: updated.name,
slug: updated.slug,
label: updated.label,
parentId: updated.parentId,
description:
typeof updated.data?.description === "string" ? updated.data.description : undefined,
},
},
};
} catch {
return {
success: false,
error: { code: "TERM_UPDATE_ERROR", message: "Failed to update term" },
};
}
}
/**
* Delete a term
*/
export async function handleTermDelete(
db: Kysely<Database>,
taxonomyName: string,
termSlug: string,
): Promise<ApiResult<{ deleted: true }>> {
try {
const repo = new TaxonomyRepository(db);
const term = await repo.findBySlug(taxonomyName, termSlug);
if (!term) {
return {
success: false,
error: {
code: "NOT_FOUND",
message: `Term '${termSlug}' not found in taxonomy '${taxonomyName}'`,
},
};
}
// Prevent deletion of terms with children
const children = await repo.findChildren(term.id);
if (children.length > 0) {
return {
success: false,
error: {
code: "VALIDATION_ERROR",
message: "Cannot delete term with children. Delete children first.",
},
};
}
const deleted = await repo.delete(term.id);
if (!deleted) {
return {
success: false,
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
};
}
return { success: true, data: { deleted: true } };
} catch {
return {
success: false,
error: { code: "TERM_DELETE_ERROR", message: "Failed to delete term" },
};
}
}

View File

@@ -0,0 +1,6 @@
export * from "./types.js";
export * from "./handlers/index.js";
export * from "./parse.js";
export * from "./schemas/index.js";
export * from "./error.js";
export * from "./errors.js";

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1 @@
export { generateOpenApiDocument } from "./document.js";

View File

@@ -0,0 +1,139 @@
/**
* Request body and query parameter parsing with Zod validation.
*
* All API routes should use these utilities instead of `request.json() as T`
* or raw `url.searchParams.get()` with manual coercion.
*/
import { z } from "zod";
import { apiError } from "./error.js";
/** Maximum allowed JSON request body size (10 MB). */
const MAX_BODY_SIZE = 10 * 1024 * 1024;
/**
* Result of parsing: either the validated data or an error Response.
* Routes should check `if (result instanceof Response) return result;`
*/
export type ParseResult<T> = T | Response;
/**
* Parse and validate a JSON request body against a Zod schema.
*
* Returns the validated data on success, or a 400 Response on failure.
* Replaces all `(await request.json()) as T` casts.
*/
export async function parseBody<T extends z.ZodType>(
request: Request,
schema: T,
): Promise<ParseResult<z.infer<T>>> {
// Best-effort size check via Content-Length (can be absent with chunked encoding)
const contentLength = request.headers.get("Content-Length");
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
}
let raw: unknown;
try {
raw = await request.json();
} catch {
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
}
return validate(schema, raw);
}
/**
* Parse and validate an optional JSON request body.
*
* Returns `defaultValue` if the body is empty, or the validated data if present.
* For endpoints where the body is optional (e.g., preview-url, confirm).
*/
export async function parseOptionalBody<T extends z.ZodType>(
request: Request,
schema: T,
defaultValue: z.infer<T>,
): Promise<ParseResult<z.infer<T>>> {
// Best-effort size check via Content-Length (can be absent with chunked encoding)
const contentLength = request.headers.get("Content-Length");
if (contentLength && parseInt(contentLength, 10) > MAX_BODY_SIZE) {
return apiError("PAYLOAD_TOO_LARGE", "Request body too large", 413);
}
let text: string;
try {
text = await request.text();
} catch {
return defaultValue;
}
if (!text.trim()) {
return defaultValue;
}
let raw: unknown;
try {
raw = JSON.parse(text);
} catch {
return apiError("INVALID_JSON", "Request body must be valid JSON", 400);
}
return validate(schema, raw);
}
/**
* Parse and validate URL search params against a Zod schema.
*
* Converts searchParams to a plain object before validation.
* Zod coercion handles string -> number/boolean conversion.
* Replaces manual `url.searchParams.get()` + `parseInt()` patterns.
*/
export function parseQuery<T extends z.ZodType>(url: URL, schema: T): ParseResult<z.infer<T>> {
const raw: Record<string, string> = {};
for (const [key, value] of url.searchParams) {
raw[key] = value;
}
return validate(schema, raw);
}
/**
* Validate raw data against a schema. Returns data or error Response.
*/
function validate<T extends z.ZodType>(schema: T, data: unknown): ParseResult<z.infer<T>> {
const result = schema.safeParse(data);
if (result.success) {
return result.data as z.infer<T>;
}
// Format Zod errors into a readable structure
const issues = result.error.issues.map((issue: z.ZodIssue) => ({
path: issue.path.join("."),
message: issue.message,
}));
return Response.json(
{
error: {
code: "VALIDATION_ERROR",
message: "Invalid request data",
details: { issues },
},
},
{
status: 400,
headers: {
"Cache-Control": "private, no-store",
},
},
);
}
/**
* Type guard to check if a ParseResult is an error Response.
* Usage: `if (isParseError(result)) return result;`
*/
export function isParseError<T>(result: ParseResult<T>): result is Response {
return result instanceof Response;
}

View File

@@ -0,0 +1,14 @@
/**
* Validate that a redirect URL is a safe local path.
*
* Rejects:
* - Protocol-relative URLs (`//evil.com`)
* - Backslash bypass (`/\evil.com` — browsers normalize `\` to `/` in Location headers)
* - Absolute URLs (`https://evil.com`)
* - Empty / nullish values
*/
export function isSafeRedirect(url: string | null | undefined): url is string {
return (
typeof url === "string" && url.startsWith("/") && !url.startsWith("//") && !url.includes("\\")
);
}

View File

@@ -0,0 +1,67 @@
/**
* Opaque _rev token generation and validation.
*
* Format: base64("version:updated_at")
* Stateless — server decodes and checks both components.
*
* Rules:
* - No _rev sent → blind write (backwards-compatible)
* - _rev matches → write proceeds, new _rev returned
* - _rev mismatch → 409 Conflict
*/
import type { ContentItem } from "../database/repositories/types.js";
import { encodeBase64, decodeBase64 } from "../utils/base64.js";
/**
* Generate a _rev token from a content item's version and updatedAt.
*/
export function encodeRev(item: ContentItem): string {
return encodeBase64(`${item.version}:${item.updatedAt}`);
}
/**
* Decode a _rev token into its components.
* Returns null if the token is malformed.
*/
export function decodeRev(rev: string): { version: number; updatedAt: string } | null {
try {
const decoded = decodeBase64(rev);
const colonIdx = decoded.indexOf(":");
if (colonIdx === -1) return null;
const version = parseInt(decoded.slice(0, colonIdx), 10);
const updatedAt = decoded.slice(colonIdx + 1);
if (isNaN(version) || !updatedAt) return null;
return { version, updatedAt };
} catch {
return null;
}
}
/**
* Validate a _rev token against a content item.
* Returns null if valid (or if no _rev provided), or an error message if invalid.
*/
export function validateRev(
rev: string | undefined,
item: ContentItem,
): { valid: true } | { valid: false; message: string } {
// No _rev = blind write (backwards-compatible)
if (!rev) return { valid: true };
const decoded = decodeRev(rev);
if (!decoded) {
return { valid: false, message: "Malformed _rev token" };
}
if (decoded.version !== item.version || decoded.updatedAt !== item.updatedAt) {
return {
valid: false,
message: "Content has been modified since last read (version conflict)",
};
}
return { valid: true };
}

View File

@@ -0,0 +1,112 @@
import { z } from "zod";
import { roleLevel } from "./common.js";
// ---------------------------------------------------------------------------
// WebAuthn credential schemas (matching @emdashcms/auth/passkey types)
// ---------------------------------------------------------------------------
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
/** RegistrationResponse — sent by the browser after navigator.credentials.create() */
const registrationCredential = z.object({
id: z.string(),
rawId: z.string(),
type: z.literal("public-key"),
response: z.object({
clientDataJSON: z.string(),
attestationObject: z.string(),
transports: z.array(authenticatorTransport).optional(),
}),
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
});
/** AuthenticationResponse — sent by the browser after navigator.credentials.get() */
const authenticationCredential = z.object({
id: z.string(),
rawId: z.string(),
type: z.literal("public-key"),
response: z.object({
clientDataJSON: z.string(),
authenticatorData: z.string(),
signature: z.string(),
userHandle: z.string().optional(),
}),
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
});
// ---------------------------------------------------------------------------
// Auth: Input schemas
// ---------------------------------------------------------------------------
export const signupRequestBody = z
.object({
email: z.string().email(),
})
.meta({ id: "SignupRequestBody" });
export const signupCompleteBody = z
.object({
token: z.string().min(1),
credential: registrationCredential,
name: z.string().optional(),
})
.meta({ id: "SignupCompleteBody" });
export const inviteCreateBody = z
.object({
email: z.string().email(),
role: roleLevel.optional(),
})
.meta({ id: "InviteCreateBody" });
export const inviteCompleteBody = z
.object({
token: z.string().min(1),
credential: registrationCredential,
name: z.string().optional(),
})
.meta({ id: "InviteCompleteBody" });
export const magicLinkSendBody = z
.object({
email: z.string().email(),
})
.meta({ id: "MagicLinkSendBody" });
export const passkeyOptionsBody = z
.object({
email: z.string().email().optional(),
})
.meta({ id: "PasskeyOptionsBody" });
export const passkeyVerifyBody = z
.object({
credential: authenticationCredential,
})
.meta({ id: "PasskeyVerifyBody" });
export const passkeyRegisterOptionsBody = z
.object({
name: z.string().optional(),
})
.meta({ id: "PasskeyRegisterOptionsBody" });
export const passkeyRegisterVerifyBody = z
.object({
credential: registrationCredential,
name: z.string().optional(),
})
.meta({ id: "PasskeyRegisterVerifyBody" });
export const passkeyRenameBody = z
.object({
name: z.string().min(1),
})
.meta({ id: "PasskeyRenameBody" });
export const authMeActionBody = z
.object({
action: z.string().min(1),
})
.meta({ id: "AuthMeActionBody" });

View File

@@ -0,0 +1,85 @@
import { z } from "zod";
import { cursorPaginationQuery, httpUrl } from "./common.js";
/** Slug pattern: lowercase letters, digits, and hyphens; must start with a letter */
const bylineSlugPattern = /^[a-z][a-z0-9-]*$/;
export const bylineSummarySchema = z
.object({
id: z.string(),
slug: z.string(),
displayName: z.string(),
bio: z.string().nullable(),
avatarMediaId: z.string().nullable(),
websiteUrl: z.string().nullable(),
userId: z.string().nullable(),
isGuest: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "BylineSummary" });
export const bylineCreditSchema = z
.object({
byline: bylineSummarySchema,
sortOrder: z.number().int(),
roleLabel: z.string().nullable(),
source: z.enum(["explicit", "inferred"]).optional().meta({
description: "Whether this credit was explicitly assigned or inferred from authorId",
}),
})
.meta({ id: "BylineCredit" });
export const contentBylineInputSchema = z
.object({
bylineId: z.string().min(1),
roleLabel: z.string().nullish(),
})
.meta({ id: "ContentBylineInput" });
export const bylinesListQuery = cursorPaginationQuery
.extend({
search: z.string().optional(),
isGuest: z.coerce.boolean().optional(),
userId: z.string().optional(),
})
.meta({ id: "BylinesListQuery" });
export const bylineCreateBody = z
.object({
slug: z
.string()
.min(1)
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens"),
displayName: z.string().min(1),
bio: z.string().nullish(),
avatarMediaId: z.string().nullish(),
websiteUrl: httpUrl.nullish(),
userId: z.string().nullish(),
isGuest: z.boolean().optional(),
})
.meta({ id: "BylineCreateBody" });
export const bylineUpdateBody = z
.object({
slug: z
.string()
.min(1)
.regex(bylineSlugPattern, "Slug must contain only lowercase letters, digits, and hyphens")
.optional(),
displayName: z.string().min(1).optional(),
bio: z.string().nullish(),
avatarMediaId: z.string().nullish(),
websiteUrl: httpUrl.nullish(),
userId: z.string().nullish(),
isGuest: z.boolean().optional(),
})
.meta({ id: "BylineUpdateBody" });
export const bylineListResponseSchema = z
.object({
items: z.array(bylineSummarySchema),
nextCursor: z.string().optional(),
})
.meta({ id: "BylineListResponse" });

View File

@@ -0,0 +1,117 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Comments: Input schemas
// ---------------------------------------------------------------------------
export const createCommentBody = z
.object({
authorName: z.string().min(1).max(100),
authorEmail: z.string().email(),
body: z.string().min(1).max(5000),
parentId: z.string().optional(),
/** Honeypot field — hidden in the form, filled only by bots */
website_url: z.string().optional(),
})
.meta({ id: "CreateCommentBody" });
export const commentStatusBody = z
.object({
status: z.enum(["approved", "pending", "spam", "trash"]),
})
.meta({ id: "CommentStatusBody" });
export const commentBulkBody = z
.object({
ids: z.array(z.string().min(1)).min(1).max(100),
action: z.enum(["approve", "spam", "trash", "delete"]),
})
.meta({ id: "CommentBulkBody" });
export const commentListQuery = z
.object({
status: z.enum(["pending", "approved", "spam", "trash"]).optional(),
collection: z.string().optional(),
search: z.string().optional(),
limit: z.coerce.number().int().min(1).max(100).optional(),
cursor: z.string().optional(),
})
.meta({ id: "CommentListQuery" });
// ---------------------------------------------------------------------------
// Comments: Response schemas
// ---------------------------------------------------------------------------
const commentStatusValues = z.enum(["pending", "approved", "spam", "trash"]);
/**
* Public-facing comment (no email/IP).
*
* `replies` is recursive in practice (each reply can have replies), but we
* model it as a single level here to avoid circular type inference issues
* with tsgo. OpenAPI consumers should treat replies as the same shape.
*/
export const publicCommentSchema: z.ZodObject<{
id: z.ZodString;
authorName: z.ZodString;
isRegisteredUser: z.ZodBoolean;
body: z.ZodString;
parentId: z.ZodNullable<z.ZodString>;
createdAt: z.ZodString;
replies: z.ZodOptional<z.ZodArray<z.ZodAny>>;
}> = z
.object({
id: z.string(),
authorName: z.string(),
isRegisteredUser: z.boolean(),
body: z.string(),
parentId: z.string().nullable(),
createdAt: z.string(),
replies: z.array(z.any()).optional(),
})
.meta({ id: "PublicComment" });
/** Admin comment with full details */
export const commentSchema = z
.object({
id: z.string(),
collection: z.string(),
contentId: z.string(),
authorName: z.string(),
authorEmail: z.string(),
body: z.string(),
status: commentStatusValues,
parentId: z.string().nullable(),
ipHash: z.string().nullable(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "Comment" });
export const publicCommentListResponseSchema = z
.object({
items: z.array(publicCommentSchema),
nextCursor: z.string().optional(),
total: z.number().int(),
})
.meta({ id: "PublicCommentListResponse" });
export const adminCommentListResponseSchema = z
.object({
items: z.array(commentSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "AdminCommentListResponse" });
export const commentCountsResponseSchema = z
.object({
pending: z.number().int(),
approved: z.number().int(),
spam: z.number().int(),
trash: z.number().int(),
})
.meta({ id: "CommentCountsResponse" });
export const commentBulkResponseSchema = z
.object({ affected: z.number().int() })
.meta({ id: "CommentBulkResponse" });

View File

@@ -0,0 +1,89 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Role level
// ---------------------------------------------------------------------------
/** Valid role level values */
export const VALID_ROLE_LEVELS = new Set([10, 20, 30, 40, 50]);
/** Role level — coerces string/number to valid RoleLevel (10|20|30|40|50) */
export const roleLevel = z.coerce
.number()
.int()
.refine((n): n is 10 | 20 | 30 | 40 | 50 => VALID_ROLE_LEVELS.has(n), {
message: "Invalid role level. Must be 10, 20, 30, 40, or 50",
});
// ---------------------------------------------------------------------------
// Pagination
// ---------------------------------------------------------------------------
/** Pagination query params — cursor-based */
export const cursorPaginationQuery = z
.object({
cursor: z.string().optional().meta({ description: "Opaque cursor for pagination" }),
limit: z.coerce.number().int().min(1).max(100).optional().default(50).meta({
description: "Maximum number of items to return (1-100, default 50)",
}),
})
.meta({ id: "CursorPaginationQuery" });
/** Pagination query params — offset-based */
export const offsetPaginationQuery = z
.object({
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
offset: z.coerce.number().int().min(0).optional().default(0),
})
.meta({ id: "OffsetPaginationQuery" });
// ---------------------------------------------------------------------------
// Shared primitives
// ---------------------------------------------------------------------------
/** Slug pattern: lowercase letters, digits, underscores; starts with letter */
export const slugPattern = /^[a-z][a-z0-9_]*$/;
/** Matches http(s) scheme at start of URL */
const HTTP_SCHEME_RE = /^https?:\/\//i;
/** Validates that a URL string uses http or https scheme. Rejects javascript:/data: URI XSS vectors. */
export const httpUrl = z
.string()
.url()
.refine((url) => HTTP_SCHEME_RE.test(url), "URL must use http or https");
/** BCP 47 locale code — language with optional script/region subtags (e.g. en, en-US, pt-BR, es-419, zh-Hant) */
export const localeCode = z
.string()
.regex(/^[a-z]{2,3}(-[a-z0-9]{2,8})*$/i, "Invalid locale code")
.transform((v) => v.toLowerCase());
// ---------------------------------------------------------------------------
// OpenAPI: Shared response schemas
// ---------------------------------------------------------------------------
/** Standard API error response */
export const apiErrorSchema = z
.object({
error: z.object({
code: z.string().meta({ description: "Machine-readable error code", example: "NOT_FOUND" }),
message: z.string().meta({ description: "Human-readable error message" }),
}),
})
.meta({ id: "ApiError" });
/** Wrap a data schema in the standard success envelope: { data: T } */
export function successEnvelope<T extends z.ZodType>(dataSchema: T) {
return z.object({ data: dataSchema });
}
/** Standard delete response */
export const deleteResponseSchema = z.object({ deleted: z.literal(true) }).meta({
id: "DeleteResponse",
});
/** Standard count response */
export const countResponseSchema = z
.object({ count: z.number().int().min(0) })
.meta({ id: "CountResponse" });

View File

@@ -0,0 +1,191 @@
import { z } from "zod";
import { bylineSummarySchema, bylineCreditSchema, contentBylineInputSchema } from "./bylines.js";
import { cursorPaginationQuery, httpUrl, localeCode } from "./common.js";
// ---------------------------------------------------------------------------
// Content: Input schemas
// ---------------------------------------------------------------------------
/** SEO input — per-content meta fields */
export const contentSeoInput = z
.object({
title: z.string().max(200).nullish(),
description: z.string().max(500).nullish(),
image: z.string().nullish(),
canonical: httpUrl.nullish(),
noIndex: z.boolean().optional(),
})
.meta({ id: "ContentSeoInput" });
export const contentListQuery = cursorPaginationQuery
.extend({
status: z.string().optional(),
orderBy: z.string().optional(),
order: z.enum(["asc", "desc"]).optional(),
locale: localeCode.optional(),
})
.meta({ id: "ContentListQuery" });
export const contentCreateBody = z
.object({
data: z.record(z.string(), z.unknown()),
slug: z.string().nullish(),
status: z.string().optional(),
bylines: z.array(contentBylineInputSchema).optional(),
locale: localeCode.optional(),
translationOf: z.string().optional(),
seo: contentSeoInput.optional(),
})
.meta({ id: "ContentCreateBody" });
export const contentUpdateBody = z
.object({
data: z.record(z.string(), z.unknown()).optional(),
slug: z.string().nullish(),
status: z.string().optional(),
authorId: z.string().nullish(),
bylines: z.array(contentBylineInputSchema).optional(),
_rev: z
.string()
.optional()
.meta({ description: "Opaque revision token for optimistic concurrency" }),
skipRevision: z.boolean().optional(),
seo: contentSeoInput.optional(),
})
.meta({ id: "ContentUpdateBody" });
export const contentScheduleBody = z
.object({
scheduledAt: z.string().min(1, "scheduledAt is required").meta({
description: "ISO 8601 datetime for scheduled publishing",
example: "2025-06-15T09:00:00Z",
}),
})
.meta({ id: "ContentScheduleBody" });
export const contentPreviewUrlBody = z
.object({
expiresIn: z.union([z.string(), z.number()]).optional(),
pathPattern: z.string().optional(),
})
.meta({ id: "ContentPreviewUrlBody" });
export const contentTermsBody = z
.object({
termIds: z.array(z.string()),
})
.meta({ id: "ContentTermsBody" });
export const contentTrashQuery = cursorPaginationQuery;
// ---------------------------------------------------------------------------
// Content: Response schemas
// ---------------------------------------------------------------------------
/** SEO metadata on a content item */
export const contentSeoSchema = z
.object({
title: z.string().nullable(),
description: z.string().nullable(),
image: z.string().nullable(),
canonical: z.string().nullable(),
noIndex: z.boolean(),
})
.meta({ id: "ContentSeo" });
/** A single content item as returned by the API */
export const contentItemSchema = z
.object({
id: z.string(),
type: z.string().meta({ description: "Collection slug this item belongs to" }),
slug: z.string().nullable(),
status: z.string().meta({ description: "draft, published, or scheduled" }),
data: z.record(z.string(), z.unknown()).meta({
description: "User-defined field values",
}),
authorId: z.string().nullable(),
primaryBylineId: z.string().nullable(),
byline: bylineSummarySchema.nullable().optional(),
bylines: z.array(bylineCreditSchema).optional(),
createdAt: z.string(),
updatedAt: z.string(),
publishedAt: z.string().nullable(),
scheduledAt: z.string().nullable(),
liveRevisionId: z.string().nullable(),
draftRevisionId: z.string().nullable(),
version: z.number().int(),
locale: z.string().nullable(),
translationGroup: z.string().nullable(),
seo: contentSeoSchema.optional(),
})
.meta({ id: "ContentItem" });
/** Response for single content item endpoints (get, create, update) */
export const contentResponseSchema = z
.object({
item: contentItemSchema,
_rev: z
.string()
.optional()
.meta({ description: "Opaque revision token for optimistic concurrency" }),
})
.meta({ id: "ContentResponse" });
/** Response for content list endpoints */
export const contentListResponseSchema = z
.object({
items: z.array(contentItemSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "ContentListResponse" });
/** Trashed content item */
export const trashedContentItemSchema = z
.object({
id: z.string(),
type: z.string(),
slug: z.string().nullable(),
status: z.string(),
data: z.record(z.string(), z.unknown()),
authorId: z.string().nullable(),
createdAt: z.string(),
updatedAt: z.string(),
publishedAt: z.string().nullable(),
deletedAt: z.string(),
})
.meta({ id: "TrashedContentItem" });
/** Response for trashed content list */
export const trashedContentListResponseSchema = z
.object({
items: z.array(trashedContentItemSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "TrashedContentListResponse" });
/** Response for content compare (live vs draft) */
export const contentCompareResponseSchema = z
.object({
hasChanges: z.boolean(),
live: z.record(z.string(), z.unknown()).nullable(),
draft: z.record(z.string(), z.unknown()).nullable(),
})
.meta({ id: "ContentCompareResponse" });
/** Translation summary for a content item */
export const contentTranslationSchema = z.object({
id: z.string(),
locale: z.string().nullable(),
slug: z.string().nullable(),
status: z.string(),
updatedAt: z.string(),
});
/** Response for content translations endpoint */
export const contentTranslationsResponseSchema = z
.object({
translationGroup: z.string(),
translations: z.array(contentTranslationSchema),
})
.meta({ id: "ContentTranslationsResponse" });

View File

@@ -0,0 +1,52 @@
import { z } from "zod";
import { httpUrl } from "./common.js";
// ---------------------------------------------------------------------------
// Import
// ---------------------------------------------------------------------------
export const importProbeBody = z.object({
url: httpUrl,
});
export const wpPluginAnalyzeBody = z.object({
url: httpUrl,
token: z.string().min(1),
});
export const wpPluginExecuteBody = z.object({
url: httpUrl,
token: z.string().min(1),
config: z.record(z.string(), z.unknown()),
});
export const wpPrepareBody = z.object({
postTypes: z.array(
z.object({
name: z.string().min(1),
collection: z.string().min(1),
fields: z
.array(
z.object({
slug: z.string().min(1),
label: z.string().min(1),
type: z.string().min(1),
required: z.boolean(),
searchable: z.boolean().optional(),
}),
)
.optional(),
}),
),
});
export const wpMediaImportBody = z.object({
attachments: z.array(z.record(z.string(), z.unknown())),
stream: z.boolean().optional(),
});
export const wpRewriteUrlsBody = z.object({
urlMap: z.record(z.string(), z.string()),
collections: z.array(z.string()).optional(),
});

View File

@@ -0,0 +1,17 @@
export * from "./common.js";
export * from "./content.js";
export * from "./media.js";
export * from "./schema.js";
export * from "./comments.js";
export * from "./auth.js";
export * from "./menus.js";
export * from "./taxonomies.js";
export * from "./sections.js";
export * from "./settings.js";
export * from "./search.js";
export * from "./import.js";
export * from "./setup.js";
export * from "./users.js";
export * from "./widgets.js";
export * from "./redirects.js";
export * from "./bylines.js";

View File

@@ -0,0 +1,116 @@
import { z } from "zod";
import { cursorPaginationQuery } from "./common.js";
// ---------------------------------------------------------------------------
// Media: Input schemas
// ---------------------------------------------------------------------------
export const mediaListQuery = cursorPaginationQuery
.extend({
mimeType: z.string().optional(),
})
.meta({ id: "MediaListQuery" });
export const mediaUpdateBody = z
.object({
alt: z.string().optional(),
caption: z.string().optional(),
width: z.number().int().positive().optional(),
height: z.number().int().positive().optional(),
})
.meta({ id: "MediaUpdateBody" });
/** Maximum allowed file upload size (50 MB). */
const MAX_UPLOAD_SIZE = 50 * 1024 * 1024;
export const mediaUploadUrlBody = z
.object({
filename: z.string().min(1, "filename is required"),
contentType: z.string().min(1, "contentType is required"),
size: z
.number()
.int()
.positive()
.max(MAX_UPLOAD_SIZE, `File size must not exceed ${MAX_UPLOAD_SIZE / 1024 / 1024}MB`),
contentHash: z.string().optional(),
})
.meta({ id: "MediaUploadUrlBody" });
export const mediaConfirmBody = z
.object({
size: z.number().int().positive().optional(),
width: z.number().int().positive().optional(),
height: z.number().int().positive().optional(),
})
.meta({ id: "MediaConfirmBody" });
export const mediaProviderListQuery = cursorPaginationQuery
.extend({
query: z.string().optional(),
mimeType: z.string().optional(),
})
.meta({ id: "MediaProviderListQuery" });
// ---------------------------------------------------------------------------
// Media: Response schemas
// ---------------------------------------------------------------------------
const mediaStatusSchema = z.enum(["pending", "ready", "failed"]);
export const mediaItemSchema = z
.object({
id: z.string(),
filename: z.string(),
mimeType: z.string(),
size: z.number().nullable(),
width: z.number().nullable(),
height: z.number().nullable(),
alt: z.string().nullable(),
caption: z.string().nullable(),
storageKey: z.string(),
status: mediaStatusSchema,
contentHash: z.string().nullable(),
blurhash: z.string().nullable(),
dominantColor: z.string().nullable(),
createdAt: z.string(),
authorId: z.string().nullable(),
})
.meta({ id: "MediaItem" });
export const mediaResponseSchema = z
.object({ item: mediaItemSchema })
.meta({ id: "MediaResponse" });
export const mediaListResponseSchema = z
.object({
items: z.array(mediaItemSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "MediaListResponse" });
export const mediaUploadUrlResponseSchema = z
.object({
uploadUrl: z.string(),
method: z.literal("PUT"),
headers: z.record(z.string(), z.string()),
mediaId: z.string(),
storageKey: z.string(),
expiresAt: z.string(),
})
.meta({ id: "MediaUploadUrlResponse" });
export const mediaExistingResponseSchema = z
.object({
existing: z.literal(true),
mediaId: z.string(),
storageKey: z.string(),
url: z.string(),
})
.meta({ id: "MediaExistingResponse" });
export const mediaConfirmResponseSchema = z
.object({
item: mediaItemSchema.extend({ url: z.string() }),
})
.meta({ id: "MediaConfirmResponse" });

View File

@@ -0,0 +1,111 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Menus: Input schemas
// ---------------------------------------------------------------------------
const menuItemType = z.string().min(1);
export const createMenuBody = z
.object({
name: z.string().min(1),
label: z.string().min(1),
})
.meta({ id: "CreateMenuBody" });
export const updateMenuBody = z
.object({
label: z.string().min(1).optional(),
})
.meta({ id: "UpdateMenuBody" });
export const createMenuItemBody = z
.object({
type: menuItemType,
label: z.string().min(1),
referenceCollection: z.string().optional(),
referenceId: z.string().optional(),
customUrl: z.string().optional(),
target: z.string().optional(),
titleAttr: z.string().optional(),
cssClasses: z.string().optional(),
parentId: z.string().optional(),
sortOrder: z.number().int().min(0).optional(),
})
.meta({ id: "CreateMenuItemBody" });
export const updateMenuItemBody = z
.object({
label: z.string().min(1).optional(),
customUrl: z.string().optional(),
target: z.string().optional(),
titleAttr: z.string().optional(),
cssClasses: z.string().optional(),
parentId: z.string().nullish(),
sortOrder: z.number().int().min(0).optional(),
})
.meta({ id: "UpdateMenuItemBody" });
export const menuItemDeleteQuery = z.object({
id: z.string().min(1),
});
export const menuItemUpdateQuery = z.object({
id: z.string().min(1),
});
export const reorderMenuItemsBody = z
.object({
items: z.array(
z.object({
id: z.string().min(1),
parentId: z.string().nullable(),
sortOrder: z.number().int().min(0),
}),
),
})
.meta({ id: "ReorderMenuItemsBody" });
// ---------------------------------------------------------------------------
// Menus: Response schemas
// ---------------------------------------------------------------------------
export const menuSchema = z
.object({
id: z.string(),
name: z.string(),
label: z.string(),
created_at: z.string(),
updated_at: z.string(),
})
.meta({ id: "Menu" });
export const menuItemSchema = z
.object({
id: z.string(),
menu_id: z.string(),
parent_id: z.string().nullable(),
sort_order: z.number().int(),
type: z.string(),
reference_collection: z.string().nullable(),
reference_id: z.string().nullable(),
custom_url: z.string().nullable(),
label: z.string(),
title_attr: z.string().nullable(),
target: z.string().nullable(),
css_classes: z.string().nullable(),
created_at: z.string(),
})
.meta({ id: "MenuItem" });
export const menuListItemSchema = menuSchema
.extend({
itemCount: z.number().int(),
})
.meta({ id: "MenuListItem" });
export const menuWithItemsSchema = menuSchema
.extend({
items: z.array(menuItemSchema),
})
.meta({ id: "MenuWithItems" });

View File

@@ -0,0 +1,155 @@
import { z } from "zod";
import { cursorPaginationQuery } from "./common.js";
// ---------------------------------------------------------------------------
// Redirects: Input schemas
// ---------------------------------------------------------------------------
const redirectType = z.coerce
.number()
.int()
.refine((n) => [301, 302, 307, 308].includes(n), {
message: "Redirect type must be 301, 302, 307, or 308",
});
/** Matches CR or LF characters */
const CRLF = /[\r\n]/;
/** Path must start with / and not be protocol-relative, contain no CRLF, and no path traversal */
const urlPath = z
.string()
.min(1)
.refine((s) => s.startsWith("/") && !s.startsWith("//"), {
message: "Must be a path starting with / (no protocol-relative URLs)",
})
.refine((s) => !CRLF.test(s), {
message: "URL must not contain newline characters",
})
.refine(
(s) => {
try {
return !decodeURIComponent(s).split("/").includes("..");
} catch {
return false;
}
},
{ message: "URL must not contain path traversal segments" },
);
export const createRedirectBody = z
.object({
source: urlPath,
destination: urlPath,
type: redirectType.optional().default(301),
enabled: z.boolean().optional().default(true),
groupName: z.string().nullish(),
})
.meta({ id: "CreateRedirectBody" });
export const updateRedirectBody = z
.object({
source: urlPath.optional(),
destination: urlPath.optional(),
type: redirectType.optional(),
enabled: z.boolean().optional(),
groupName: z.string().nullish(),
})
.refine((o) => Object.values(o).some((v) => v !== undefined), {
message: "At least one field must be provided",
})
.meta({ id: "UpdateRedirectBody" });
export const redirectsListQuery = cursorPaginationQuery
.extend({
search: z.string().optional(),
group: z.string().optional(),
enabled: z
.enum(["true", "false"])
.transform((v) => v === "true")
.optional(),
auto: z
.enum(["true", "false"])
.transform((v) => v === "true")
.optional(),
})
.meta({ id: "RedirectsListQuery" });
// ---------------------------------------------------------------------------
// 404 Log: Input schemas
// ---------------------------------------------------------------------------
export const notFoundListQuery = cursorPaginationQuery
.extend({
search: z.string().optional(),
})
.meta({ id: "NotFoundListQuery" });
export const notFoundSummaryQuery = z.object({
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
});
export const notFoundPruneBody = z
.object({
olderThan: z.string().datetime({ message: "olderThan must be an ISO 8601 datetime" }),
})
.meta({ id: "NotFoundPruneBody" });
// ---------------------------------------------------------------------------
// Redirects: Response schemas
// ---------------------------------------------------------------------------
export const redirectSchema = z
.object({
id: z.string(),
source: z.string(),
destination: z.string(),
type: z.number().int(),
isPattern: z.boolean(),
enabled: z.boolean(),
hits: z.number().int(),
lastHitAt: z.string().nullable(),
groupName: z.string().nullable(),
auto: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "Redirect" });
export const redirectListResponseSchema = z
.object({
items: z.array(redirectSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "RedirectListResponse" });
export const notFoundEntrySchema = z
.object({
id: z.string(),
path: z.string(),
referrer: z.string().nullable(),
userAgent: z.string().nullable(),
ip: z.string().nullable(),
createdAt: z.string(),
})
.meta({ id: "NotFoundEntry" });
export const notFoundListResponseSchema = z
.object({
items: z.array(notFoundEntrySchema),
nextCursor: z.string().optional(),
})
.meta({ id: "NotFoundListResponse" });
export const notFoundSummarySchema = z
.object({
path: z.string(),
count: z.number().int(),
lastSeen: z.string(),
topReferrer: z.string().nullable(),
})
.meta({ id: "NotFoundSummary" });
export const notFoundSummaryResponseSchema = z
.object({ items: z.array(notFoundSummarySchema) })
.meta({ id: "NotFoundSummaryResponse" });

View File

@@ -0,0 +1,203 @@
import { z } from "zod";
import { slugPattern } from "./common.js";
// ---------------------------------------------------------------------------
// Schema (collections & fields): Input schemas
// ---------------------------------------------------------------------------
const collectionSupportValues = z.enum(["drafts", "revisions", "preview", "scheduling", "search"]);
const collectionSourcePattern = /^(template:.+|import:.+|manual|discovered|seed)$/;
const fieldTypeValues = z.enum([
"string",
"text",
"number",
"integer",
"boolean",
"datetime",
"select",
"multiSelect",
"portableText",
"image",
"file",
"reference",
"json",
"slug",
]);
const fieldValidation = z
.object({
required: z.boolean().optional(),
min: z.number().optional(),
max: z.number().optional(),
minLength: z.number().int().min(0).optional(),
maxLength: z.number().int().min(0).optional(),
pattern: z.string().optional(),
options: z.array(z.string()).optional(),
})
.optional();
const fieldWidgetOptions = z.record(z.string(), z.unknown()).optional();
export const createCollectionBody = z
.object({
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
label: z.string().min(1),
labelSingular: z.string().optional(),
description: z.string().optional(),
icon: z.string().optional(),
supports: z.array(collectionSupportValues).optional(),
source: z.string().regex(collectionSourcePattern).optional(),
urlPattern: z.string().optional(),
hasSeo: z.boolean().optional(),
})
.meta({ id: "CreateCollectionBody" });
export const updateCollectionBody = z
.object({
label: z.string().min(1).optional(),
labelSingular: z.string().optional(),
description: z.string().optional(),
icon: z.string().optional(),
supports: z.array(collectionSupportValues).optional(),
urlPattern: z.string().nullish(),
hasSeo: z.boolean().optional(),
commentsEnabled: z.boolean().optional(),
commentsModeration: z.enum(["all", "first_time", "none"]).optional(),
commentsClosedAfterDays: z.number().int().min(0).optional(),
commentsAutoApproveUsers: z.boolean().optional(),
})
.meta({ id: "UpdateCollectionBody" });
export const createFieldBody = z
.object({
slug: z.string().min(1).max(63).regex(slugPattern, "Invalid slug format"),
label: z.string().min(1),
type: fieldTypeValues,
required: z.boolean().optional(),
unique: z.boolean().optional(),
defaultValue: z.unknown().optional(),
validation: fieldValidation,
widget: z.string().optional(),
options: fieldWidgetOptions,
sortOrder: z.number().int().min(0).optional(),
searchable: z.boolean().optional(),
translatable: z.boolean().optional(),
})
.meta({ id: "CreateFieldBody" });
export const updateFieldBody = z
.object({
label: z.string().min(1).optional(),
required: z.boolean().optional(),
unique: z.boolean().optional(),
defaultValue: z.unknown().optional(),
validation: fieldValidation,
widget: z.string().optional(),
options: fieldWidgetOptions,
sortOrder: z.number().int().min(0).optional(),
searchable: z.boolean().optional(),
translatable: z.boolean().optional(),
})
.meta({ id: "UpdateFieldBody" });
export const fieldReorderBody = z
.object({
fieldSlugs: z.array(z.string().min(1)),
})
.meta({ id: "FieldReorderBody" });
export const orphanRegisterBody = z
.object({
label: z.string().optional(),
labelSingular: z.string().optional(),
description: z.string().optional(),
})
.meta({ id: "OrphanRegisterBody" });
export const schemaExportQuery = z.object({
format: z.string().optional(),
});
export const collectionGetQuery = z.object({
includeFields: z
.string()
.transform((v) => v === "true")
.optional(),
});
// ---------------------------------------------------------------------------
// Schema: Response schemas
// ---------------------------------------------------------------------------
export const collectionSchema = z
.object({
id: z.string(),
slug: z.string(),
label: z.string(),
labelSingular: z.string().nullable(),
description: z.string().nullable(),
icon: z.string().nullable(),
supports: z.array(z.string()),
source: z.string().nullable(),
urlPattern: z.string().nullable(),
hasSeo: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "Collection" });
export const fieldSchema = z
.object({
id: z.string(),
collectionId: z.string(),
slug: z.string(),
label: z.string(),
type: fieldTypeValues,
required: z.boolean(),
unique: z.boolean(),
defaultValue: z.unknown().nullable(),
validation: z.record(z.string(), z.unknown()).nullable(),
widget: z.string().nullable(),
options: z.record(z.string(), z.unknown()).nullable(),
sortOrder: z.number().int(),
searchable: z.boolean(),
translatable: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "Field" });
export const collectionResponseSchema = z
.object({ item: collectionSchema })
.meta({ id: "CollectionResponse" });
export const collectionWithFieldsResponseSchema = z
.object({
item: collectionSchema.extend({ fields: z.array(fieldSchema) }),
})
.meta({ id: "CollectionWithFieldsResponse" });
export const collectionListResponseSchema = z
.object({ items: z.array(collectionSchema) })
.meta({ id: "CollectionListResponse" });
export const fieldResponseSchema = z.object({ item: fieldSchema }).meta({ id: "FieldResponse" });
export const fieldListResponseSchema = z
.object({ items: z.array(fieldSchema) })
.meta({ id: "FieldListResponse" });
export const orphanedTableSchema = z
.object({
slug: z.string(),
tableName: z.string(),
rowCount: z.number().int(),
})
.meta({ id: "OrphanedTable" });
export const orphanedTableListResponseSchema = z
.object({ items: z.array(orphanedTableSchema) })
.meta({ id: "OrphanedTableListResponse" });

View File

@@ -0,0 +1,63 @@
import { z } from "zod";
import { localeCode } from "./common.js";
// ---------------------------------------------------------------------------
// Search: Input schemas
// ---------------------------------------------------------------------------
export const searchQuery = z
.object({
q: z.string().min(1),
collections: z.string().optional(),
status: z.string().optional(),
locale: localeCode.optional(),
limit: z.coerce.number().int().min(1).max(100).optional(),
})
.meta({ id: "SearchQuery" });
export const searchSuggestQuery = z
.object({
q: z.string().min(1),
collections: z.string().optional(),
locale: localeCode.optional(),
limit: z.coerce.number().int().min(1).max(20).optional(),
})
.meta({ id: "SearchSuggestQuery" });
export const searchRebuildBody = z
.object({
collection: z.string().min(1),
})
.meta({ id: "SearchRebuildBody" });
export const searchEnableBody = z
.object({
collection: z.string().min(1),
enabled: z.boolean(),
weights: z.record(z.string(), z.number()).optional(),
})
.meta({ id: "SearchEnableBody" });
// ---------------------------------------------------------------------------
// Search: Response schemas
// ---------------------------------------------------------------------------
export const searchResultSchema = z
.object({
collection: z.string(),
id: z.string(),
slug: z.string().nullable(),
locale: z.string(),
title: z.string().optional(),
snippet: z.string().optional(),
score: z.number(),
})
.meta({ id: "SearchResult" });
export const searchResponseSchema = z
.object({
items: z.array(searchResultSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "SearchResponse" });

View File

@@ -0,0 +1,67 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Sections: Input schemas
// ---------------------------------------------------------------------------
const sectionSource = z.enum(["theme", "user", "import"]);
export const sectionsListQuery = z
.object({
source: sectionSource.optional(),
search: z.string().optional(),
limit: z.coerce.number().int().min(1).max(100).optional(),
cursor: z.string().optional(),
})
.meta({ id: "SectionsListQuery" });
export const createSectionBody = z
.object({
slug: z.string().min(1),
title: z.string().min(1),
description: z.string().optional(),
keywords: z.array(z.string()).optional(),
content: z.array(z.record(z.string(), z.unknown())),
previewMediaId: z.string().optional(),
source: sectionSource.optional(),
themeId: z.string().optional(),
})
.meta({ id: "CreateSectionBody" });
export const updateSectionBody = z
.object({
slug: z.string().min(1).optional(),
title: z.string().min(1).optional(),
description: z.string().optional(),
keywords: z.array(z.string()).optional(),
content: z.array(z.record(z.string(), z.unknown())).optional(),
previewMediaId: z.string().nullish(),
})
.meta({ id: "UpdateSectionBody" });
// ---------------------------------------------------------------------------
// Sections: Response schemas
// ---------------------------------------------------------------------------
export const sectionSchema = z
.object({
id: z.string(),
slug: z.string(),
title: z.string(),
description: z.string().nullable(),
keywords: z.array(z.string()).nullable(),
content: z.array(z.record(z.string(), z.unknown())),
previewMediaId: z.string().nullable(),
source: z.string(),
themeId: z.string().nullable(),
createdAt: z.string(),
updatedAt: z.string(),
})
.meta({ id: "Section" });
export const sectionListResponseSchema = z
.object({
items: z.array(sectionSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "SectionListResponse" });

View File

@@ -0,0 +1,63 @@
import { z } from "zod";
import { httpUrl } from "./common.js";
// ---------------------------------------------------------------------------
// Settings: Input schemas
// ---------------------------------------------------------------------------
const mediaReference = z.object({
mediaId: z.string(),
alt: z.string().optional(),
});
const socialSettings = z.object({
twitter: z.string().optional(),
github: z.string().optional(),
facebook: z.string().optional(),
instagram: z.string().optional(),
linkedin: z.string().optional(),
youtube: z.string().optional(),
});
const seoSettings = z.object({
titleSeparator: z.string().max(10).optional(),
defaultOgImage: mediaReference.optional(),
robotsTxt: z.string().max(5000).optional(),
googleVerification: z.string().max(100).optional(),
bingVerification: z.string().max(100).optional(),
});
export const settingsUpdateBody = z
.object({
title: z.string().optional(),
tagline: z.string().optional(),
logo: mediaReference.optional(),
favicon: mediaReference.optional(),
url: z.union([httpUrl, z.literal("")]).optional(),
postsPerPage: z.number().int().min(1).max(100).optional(),
dateFormat: z.string().optional(),
timezone: z.string().optional(),
social: socialSettings.optional(),
seo: seoSettings.optional(),
})
.meta({ id: "SettingsUpdateBody" });
// ---------------------------------------------------------------------------
// Settings: Response schemas
// ---------------------------------------------------------------------------
export const siteSettingsSchema = z
.object({
title: z.string().optional(),
tagline: z.string().optional(),
logo: mediaReference.optional(),
favicon: mediaReference.optional(),
url: z.string().optional(),
postsPerPage: z.number().int().optional(),
dateFormat: z.string().optional(),
timezone: z.string().optional(),
social: socialSettings.optional(),
seo: seoSettings.optional(),
})
.meta({ id: "SiteSettings" });

View File

@@ -0,0 +1,37 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Setup
// ---------------------------------------------------------------------------
/** Registration credential — duplicated reference for setup flow.
* The canonical definition lives in auth.ts but setup needs it independently
* because setup runs before auth is configured. */
const authenticatorTransport = z.enum(["usb", "nfc", "ble", "internal", "hybrid"]);
const registrationCredential = z.object({
id: z.string(),
rawId: z.string(),
type: z.literal("public-key"),
response: z.object({
clientDataJSON: z.string(),
attestationObject: z.string(),
transports: z.array(authenticatorTransport).optional(),
}),
authenticatorAttachment: z.enum(["platform", "cross-platform"]).optional(),
});
export const setupBody = z.object({
title: z.string().min(1),
tagline: z.string().optional(),
includeContent: z.boolean(),
});
export const setupAdminBody = z.object({
email: z.string().email(),
name: z.string().optional(),
});
export const setupAdminVerifyBody = z.object({
credential: registrationCredential,
});

View File

@@ -0,0 +1,113 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Taxonomy definitions: Input schemas
// ---------------------------------------------------------------------------
/** Collection slug format: lowercase alphanumeric + underscores, starts with letter */
const collectionSlugPattern = /^[a-z][a-z0-9_]*$/;
export const createTaxonomyDefBody = z
.object({
name: z
.string()
.min(1)
.max(63)
.regex(/^[a-z][a-z0-9_]*$/, "Name must be lowercase alphanumeric with underscores"),
label: z.string().min(1).max(200),
hierarchical: z.boolean().optional().default(false),
collections: z
.array(
z.string().min(1).max(63).regex(collectionSlugPattern, "Invalid collection slug format"),
)
.max(100)
.optional()
.default([]),
})
.meta({ id: "CreateTaxonomyDefBody" });
// ---------------------------------------------------------------------------
// Taxonomy terms: Input schemas
// ---------------------------------------------------------------------------
export const createTermBody = z
.object({
slug: z.string().min(1),
label: z.string().min(1),
parentId: z.string().nullish(),
description: z.string().optional(),
})
.meta({ id: "CreateTermBody" });
export const updateTermBody = z
.object({
slug: z.string().min(1).optional(),
label: z.string().min(1).optional(),
parentId: z.string().nullish(),
description: z.string().optional(),
})
.meta({ id: "UpdateTermBody" });
// ---------------------------------------------------------------------------
// Taxonomies: Response schemas
// ---------------------------------------------------------------------------
export const taxonomyDefSchema = z
.object({
id: z.string(),
name: z.string(),
label: z.string(),
labelSingular: z.string().optional(),
hierarchical: z.boolean(),
collections: z.array(z.string()),
})
.meta({ id: "TaxonomyDef" });
export const taxonomyListResponseSchema = z
.object({ taxonomies: z.array(taxonomyDefSchema) })
.meta({ id: "TaxonomyListResponse" });
export const termSchema = z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
label: z.string(),
parentId: z.string().nullable(),
description: z.string().optional(),
})
.meta({ id: "Term" });
export const termWithCountSchema: z.ZodType = z
.object({
id: z.string(),
name: z.string(),
slug: z.string(),
label: z.string(),
parentId: z.string().nullable(),
description: z.string().optional(),
count: z.number().int(),
children: z.array(z.lazy(() => termWithCountSchema)),
})
.meta({ id: "TermWithCount" });
export const termListResponseSchema = z
.object({ terms: z.array(termWithCountSchema) })
.meta({ id: "TermListResponse" });
export const termResponseSchema = z.object({ term: termSchema }).meta({ id: "TermResponse" });
export const termGetResponseSchema = z
.object({
term: termSchema.extend({
count: z.number().int(),
children: z.array(
z.object({
id: z.string(),
slug: z.string(),
label: z.string(),
}),
),
}),
})
.meta({ id: "TermGetResponse" });

View File

@@ -0,0 +1,96 @@
import { z } from "zod";
import { roleLevel } from "./common.js";
// ---------------------------------------------------------------------------
// Admin / Users: Input schemas
// ---------------------------------------------------------------------------
export const usersListQuery = z
.object({
search: z.string().optional(),
role: z.string().optional(),
cursor: z.string().optional(),
limit: z.coerce.number().int().min(1).max(100).optional().default(50),
})
.meta({ id: "UsersListQuery" });
export const userUpdateBody = z
.object({
name: z.string().optional(),
email: z.string().email().optional(),
role: roleLevel.optional(),
})
.meta({ id: "UserUpdateBody" });
export const allowedDomainCreateBody = z
.object({
domain: z.string().min(1),
defaultRole: roleLevel,
})
.meta({ id: "AllowedDomainCreateBody" });
export const allowedDomainUpdateBody = z
.object({
enabled: z.boolean().optional(),
defaultRole: roleLevel.optional(),
})
.meta({ id: "AllowedDomainUpdateBody" });
// ---------------------------------------------------------------------------
// Admin / Users: Response schemas
// ---------------------------------------------------------------------------
export const userSchema = z
.object({
id: z.string(),
email: z.string(),
name: z.string().nullable(),
avatarUrl: z.string().nullable(),
role: z.number().int(),
emailVerified: z.boolean(),
disabled: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
lastLogin: z.string().nullable(),
credentialCount: z.number().int().optional(),
oauthProviders: z.array(z.string()).optional(),
})
.meta({ id: "User" });
export const userListResponseSchema = z
.object({
items: z.array(userSchema),
nextCursor: z.string().optional(),
})
.meta({ id: "UserListResponse" });
export const userDetailSchema = z
.object({
id: z.string(),
email: z.string(),
name: z.string().nullable(),
avatarUrl: z.string().nullable(),
role: z.number().int(),
emailVerified: z.boolean(),
disabled: z.boolean(),
createdAt: z.string(),
updatedAt: z.string(),
lastLogin: z.string().nullable(),
credentials: z.array(
z.object({
id: z.string(),
name: z.string().nullable(),
deviceType: z.string().nullable(),
createdAt: z.string(),
lastUsedAt: z.string(),
}),
),
oauthAccounts: z.array(
z.object({
provider: z.string(),
createdAt: z.string(),
}),
),
})
.meta({ id: "UserDetail" });

View File

@@ -0,0 +1,80 @@
import { z } from "zod";
// ---------------------------------------------------------------------------
// Widgets: Input schemas
// ---------------------------------------------------------------------------
const widgetType = z.enum(["content", "menu", "component"]);
export const createWidgetAreaBody = z
.object({
name: z.string().min(1),
label: z.string().min(1),
description: z.string().optional(),
})
.meta({ id: "CreateWidgetAreaBody" });
export const createWidgetBody = z
.object({
type: widgetType,
title: z.string().optional(),
content: z.array(z.record(z.string(), z.unknown())).optional(),
menuName: z.string().optional(),
componentId: z.string().optional(),
componentProps: z.record(z.string(), z.unknown()).optional(),
})
.meta({ id: "CreateWidgetBody" });
export const updateWidgetBody = z
.object({
type: widgetType.optional(),
title: z.string().optional(),
content: z.array(z.record(z.string(), z.unknown())).optional(),
menuName: z.string().optional(),
componentId: z.string().optional(),
componentProps: z.record(z.string(), z.unknown()).optional(),
})
.meta({ id: "UpdateWidgetBody" });
export const reorderWidgetsBody = z
.object({
widgetIds: z.array(z.string().min(1)),
})
.meta({ id: "ReorderWidgetsBody" });
// ---------------------------------------------------------------------------
// Widgets: Response schemas
// ---------------------------------------------------------------------------
export const widgetAreaSchema = z
.object({
id: z.string(),
name: z.string(),
label: z.string(),
description: z.string().nullable(),
created_at: z.string(),
updated_at: z.string(),
})
.meta({ id: "WidgetArea" });
export const widgetSchema = z
.object({
id: z.string(),
area_id: z.string(),
type: z.string(),
title: z.string().nullable(),
content: z.string().nullable(),
menu_name: z.string().nullable(),
component_id: z.string().nullable(),
component_props: z.string().nullable(),
sort_order: z.number().int(),
created_at: z.string(),
updated_at: z.string(),
})
.meta({ id: "Widget" });
export const widgetAreaWithWidgetsSchema = widgetAreaSchema
.extend({
widgets: z.array(widgetSchema),
})
.meta({ id: "WidgetAreaWithWidgets" });

View File

@@ -0,0 +1,25 @@
/**
* Resolve the canonical site base URL for use in outbound links (emails, etc.).
*
* Uses the stored `emdash:site_url` (set during setup on the real domain)
* so that Host header spoofing in later requests cannot redirect users to
* attacker-controlled domains.
*
* Falls back to the request URL only if no stored value exists (pre-setup).
*/
import type { Kysely } from "kysely";
import { OptionsRepository } from "../database/repositories/options.js";
import type { Database } from "../database/types.js";
export async function getSiteBaseUrl(db: Kysely<Database>, request: Request): Promise<string> {
const options = new OptionsRepository(db);
const storedUrl = await options.get<string>("emdash:site_url");
if (storedUrl) {
return `${storedUrl}/_emdash`;
}
// Fallback: derive from request (only reached before setup completes)
const url = new URL(request.url);
return `${url.protocol}//${url.host}/_emdash`;
}

View File

@@ -0,0 +1,82 @@
/**
* API types for EmDash REST endpoints
*/
import type { ContentItem } from "../database/repositories/types.js";
/**
* List response with cursor pagination
*/
export interface ListResponse<T> {
items: T[];
nextCursor?: string;
}
/**
* Content API responses
*/
export interface ContentListResponse extends ListResponse<ContentItem> {}
export interface ContentResponse {
item: ContentItem;
/** Opaque revision token for optimistic concurrency */
_rev?: string;
}
/**
* Manifest API response
*/
export interface ManifestResponse {
version: string;
hash: string;
collections: Record<
string,
{
label: string;
labelSingular: string;
supports: string[];
fields: Record<string, FieldDescriptor>;
}
>;
plugins: Record<
string,
{
adminPages?: Array<{ path: string; component: string }>;
widgets?: string[];
}
>;
}
export interface FieldDescriptor {
kind: string;
label?: string;
required?: boolean;
options?: Array<{ value: string; label: string }>;
}
/**
* Discriminated union for handler results.
*
* Handlers return `ApiResult<T>` -- either `{ success: true, data: T }` or
* `{ success: false, error: { code, message } }`. The `success` literal
* enables TypeScript narrowing on `.data`.
*
* The generic `E` parameter defaults to `ErrorCode` but can be narrowed to
* `OAuthErrorCode` for OAuth token-endpoint handlers.
*
* Use `unwrapResult()` from `error.ts` to convert to an HTTP Response.
*/
export type ApiResult<T, E extends string = string> =
| { success: true; data: T }
| {
success: false;
error: { code: E; message: string; details?: Record<string, unknown> };
};
/**
* API request context
*/
export interface ApiContext {
userId?: string;
userRole?: string;
}

View File

@@ -0,0 +1,27 @@
/**
* emdash/astro
*
* Astro integration for EmDash CMS (build-time only)
*
* For runtime APIs (loader, query functions, dialects), import from "emdash" directly.
* For Cloudflare-specific adapters (d1, r2, access), import from "@emdashcms/cloudflare".
*/
// Locals types (for typing Astro.locals in API routes)
export type {
EmDashHandlers,
EmDashManifest,
MediaItem,
ContentItem,
ManifestCollection,
} from "./types.js";
// Storage adapters (for integration config)
// Note: For R2 bindings, use `r2()` from `@emdashcms/cloudflare`
export { local, s3 } from "./storage/index.js";
export type { StorageDescriptor, LocalStorageConfig, S3StorageConfig } from "./storage/index.js";
// Integration (build-time only - the emdash() function uses Node.js APIs)
export { default } from "./integration/index.js";
export { getStoredConfig } from "./integration/runtime.js";
export type { EmDashConfig, ResolvedPlugin } from "./integration/runtime.js";

View File

@@ -0,0 +1,303 @@
/**
* EmDash Astro Integration
*
* This integration:
* - Injects the admin shell route at /_emdash/admin/[...path].astro
* - Sets up REST API endpoints under /_emdash/api/*
* - Configures middleware to provide database and manifest
*
* NOTE: This file is for build-time only. Runtime utilities are in runtime.ts
* to avoid bundling Node.js-only code into the production build.
*/
import type { AstroIntegration, AstroIntegrationLogger } from "astro";
import type { ResolvedPlugin } from "../../plugins/types.js";
import { local } from "../storage/adapters.js";
import { injectCoreRoutes, injectBuiltinAuthRoutes, injectMcpRoute } from "./routes.js";
import type { EmDashConfig, PluginDescriptor } from "./runtime.js";
import { createViteConfig } from "./vite-config.js";
// Re-export runtime types and functions
export type {
EmDashConfig,
PluginDescriptor,
SandboxedPluginDescriptor,
ResolvedPlugin,
} from "./runtime.js";
export { getStoredConfig } from "./runtime.js";
/** Default storage: Local filesystem in .emdash directory */
const DEFAULT_STORAGE = local({
directory: "./.emdash/uploads",
baseUrl: "/_emdash/api/media/file",
});
// Terminal formatting
const dim = (s: string) => `\x1b[2m${s}\x1b[22m`;
const bold = (s: string) => `\x1b[1m${s}\x1b[22m`;
const cyan = (s: string) => `\x1b[36m${s}\x1b[39m`;
/** Print the EmDash startup banner */
function printBanner(_logger: AstroIntegrationLogger): void {
const banner = `
${bold(cyan("E C L I P T I C"))}
`;
console.log(banner);
}
/** Print route injection summary */
function printRoutesSummary(_logger: AstroIntegrationLogger): void {
console.log(`\n ${dim("")} Admin UI ${cyan("/_emdash/admin")}`);
console.log(` ${dim("")} API ${cyan("/_emdash/api/*")}`);
console.log("");
}
/**
* Create the EmDash Astro integration
*/
export function emdash(config: EmDashConfig = {}): AstroIntegration {
// Apply defaults
const resolvedConfig: EmDashConfig = {
...config,
storage: config.storage ?? DEFAULT_STORAGE,
};
// Validate marketplace URL
if (resolvedConfig.marketplace) {
const url = resolvedConfig.marketplace;
try {
const parsed = new URL(url);
const isLocalhost = parsed.hostname === "localhost" || parsed.hostname === "127.0.0.1";
if (parsed.protocol !== "https:" && !isLocalhost) {
throw new Error(
`Marketplace URL must use HTTPS (got ${parsed.protocol}). ` +
`Only localhost URLs are allowed over HTTP.`,
);
}
} catch (e) {
if (e instanceof TypeError) {
throw new Error(`Invalid marketplace URL: "${url}"`, { cause: e });
}
throw e;
}
if (!resolvedConfig.sandboxRunner) {
throw new Error(
"Marketplace requires `sandboxRunner` to be configured. " +
"Marketplace plugins run in sandboxed V8 isolates.",
);
}
}
// Plugin descriptors from config
const pluginDescriptors = resolvedConfig.plugins ?? [];
const sandboxedDescriptors = resolvedConfig.sandboxed ?? [];
// Validate all plugin descriptors
for (const descriptor of [...pluginDescriptors, ...sandboxedDescriptors]) {
// Standard-format plugins can't use features that require trusted mode
if (descriptor.format === "standard") {
if (descriptor.adminEntry) {
throw new Error(
`Plugin "${descriptor.id}" is standard format but declares adminEntry. ` +
`Standard plugins use Block Kit for admin UI, not React components. ` +
`Remove adminEntry or change format to "native".`,
);
}
if (descriptor.componentsEntry) {
throw new Error(
`Plugin "${descriptor.id}" is standard format but declares componentsEntry. ` +
`Portable Text block components require native format. ` +
`Remove componentsEntry or change format to "native".`,
);
}
}
}
// Validate: non-standard plugins cannot be placed in sandboxed: []
for (const descriptor of sandboxedDescriptors) {
if (descriptor.format !== "standard") {
throw new Error(
`Plugin "${descriptor.id}" uses the native format and cannot be placed in ` +
`\`sandboxed: []\`. Native plugins can only run in \`plugins: []\`. ` +
`To sandbox this plugin, convert it to the standard format.`,
);
}
}
// Resolved plugins (populated at build time by importing entrypoints)
let _resolvedPlugins: ResolvedPlugin[] = [];
// Serialize config for virtual module (database/storage/auth - plugins handled separately)
// i18n is populated in astro:config:setup from astroConfig.i18n
const serializableConfig: Record<string, unknown> = {
database: resolvedConfig.database,
storage: resolvedConfig.storage,
auth: resolvedConfig.auth,
marketplace: resolvedConfig.marketplace,
};
// Determine auth mode for route injection
// Check if auth is an AuthDescriptor (has entrypoint) indicating external auth
const useExternalAuth = !!(resolvedConfig.auth && "entrypoint" in resolvedConfig.auth);
return {
name: "emdash",
hooks: {
"astro:config:setup": ({
injectRoute,
addMiddleware,
logger,
updateConfig,
config: astroConfig,
command,
}) => {
printBanner(logger);
// Extract i18n config from Astro config
// Astro locales can be strings OR { path, codes } objects — normalize to paths
if (astroConfig.i18n) {
const routing = astroConfig.i18n.routing;
serializableConfig.i18n = {
defaultLocale: astroConfig.i18n.defaultLocale,
locales: astroConfig.i18n.locales.map((l) => (typeof l === "string" ? l : l.path)),
fallback: astroConfig.i18n.fallback,
prefixDefaultLocale:
typeof routing === "object" ? (routing.prefixDefaultLocale ?? false) : false,
};
}
// Update Vite config with virtual modules and other settings
updateConfig({
vite: createViteConfig(
{
serializableConfig,
resolvedConfig,
pluginDescriptors,
astroConfig,
},
command,
),
});
// Inject all core routes
injectCoreRoutes(injectRoute);
// Only inject passkey/oauth/magic-link routes when NOT using external auth
if (!useExternalAuth) {
injectBuiltinAuthRoutes(injectRoute);
}
// Inject MCP endpoint when enabled
if (resolvedConfig.mcp) {
injectMcpRoute(injectRoute);
logger.info("MCP server enabled at /_emdash/api/mcp");
}
// In playground mode, inject the playground middleware FIRST.
// It sets up a per-session DO database in ALS before anything
// else runs, so the runtime init middleware sees a real DB.
if (resolvedConfig.playground) {
addMiddleware({
entrypoint: resolvedConfig.playground.middlewareEntrypoint,
order: "pre",
});
}
// Add middleware to provide database and manifest
addMiddleware({
entrypoint: "emdash/middleware",
order: "pre",
});
// Add redirect middleware (runs after runtime init, before setup/auth)
addMiddleware({
entrypoint: "emdash/middleware/redirect",
order: "pre",
});
// Skip setup and auth in playground mode -- the playground middleware
// handles session creation and injects an anonymous admin user.
if (!resolvedConfig.playground) {
addMiddleware({
entrypoint: "emdash/middleware/setup",
order: "pre",
});
addMiddleware({
entrypoint: "emdash/middleware/auth",
order: "pre",
});
}
// Add request context middleware (runs after auth, on ALL routes)
// Sets up ALS-based context for query functions (edit mode, preview)
addMiddleware({
entrypoint: "emdash/middleware/request-context",
order: "pre",
});
printRoutesSummary(logger);
},
"astro:server:setup": ({ server, logger }) => {
// Generate types once the server is listening.
// The endpoint returns the types content; we write the file here
// (in Node) because workerd has no real filesystem access.
server.httpServer?.once("listening", async () => {
const { writeFile, readFile } = await import("node:fs/promises");
const { resolve } = await import("node:path");
const address = server.httpServer?.address();
if (!address || typeof address === "string") return;
const port = address.port;
const typegenUrl = `http://localhost:${port}/_emdash/api/typegen`;
const outputPath = resolve(process.cwd(), "emdash-env.d.ts");
try {
const response = await fetch(typegenUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
});
if (!response.ok) {
const body = await response.text().catch(() => "");
logger.warn(`Typegen failed: ${response.status} ${body.slice(0, 200)}`);
return;
}
const { data: result } = (await response.json()) as {
data: {
types: string;
hash: string;
collections: number;
};
};
// Only write if content changed
let needsWrite = true;
try {
const existing = await readFile(outputPath, "utf-8");
if (existing === result.types) needsWrite = false;
} catch {
// File doesn't exist yet
}
if (needsWrite) {
await writeFile(outputPath, result.types, "utf-8");
logger.info(`Generated emdash-env.d.ts (${result.collections} collections)`);
}
} catch (error) {
const msg = error instanceof Error ? error.message : String(error);
logger.warn(`Typegen failed: ${msg}`);
}
});
},
"astro:build:done": ({ logger }) => {
logger.info("Build complete");
},
},
};
}
export default emdash;

View File

@@ -0,0 +1,834 @@
/**
* Route Injection
*
* Defines and injects all EmDash routes into the Astro application.
*/
import { createRequire } from "node:module";
import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
/**
* Resolve path to a route file in the package
* Uses Node.js APIs - only call at build time
*/
function resolveRoute(route: string): string {
// Lazy initialization to avoid running Node.js code at import time
// This prevents issues when the module is bundled for Cloudflare Workers
const require = createRequire(import.meta.url);
const __dirname = dirname(fileURLToPath(import.meta.url));
try {
// Try to resolve as package export
return require.resolve(`emdash/routes/${route}`);
} catch {
// Fallback to relative path (for development)
return resolve(__dirname, "../routes", route);
}
}
/** Route injection function type */
type InjectRoute = (route: { pattern: string; entrypoint: string }) => void;
/**
* Injects all core EmDash routes.
*/
export function injectCoreRoutes(injectRoute: InjectRoute): void {
// Inject admin shell route
injectRoute({
pattern: "/_emdash/admin/[...path]",
entrypoint: resolveRoute("admin.astro"),
});
// Inject API routes
injectRoute({
pattern: "/_emdash/api/manifest",
entrypoint: resolveRoute("api/manifest.ts"),
});
injectRoute({
pattern: "/_emdash/api/dashboard",
entrypoint: resolveRoute("api/dashboard.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]",
entrypoint: resolveRoute("api/content/[collection]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]",
entrypoint: resolveRoute("api/content/[collection]/[id].ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/revisions",
entrypoint: resolveRoute("api/content/[collection]/[id]/revisions.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/preview-url",
entrypoint: resolveRoute("api/content/[collection]/[id]/preview-url.ts"),
});
// Trash/restore routes
injectRoute({
pattern: "/_emdash/api/content/[collection]/trash",
entrypoint: resolveRoute("api/content/[collection]/trash.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/restore",
entrypoint: resolveRoute("api/content/[collection]/[id]/restore.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/permanent",
entrypoint: resolveRoute("api/content/[collection]/[id]/permanent.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/duplicate",
entrypoint: resolveRoute("api/content/[collection]/[id]/duplicate.ts"),
});
// Publishing routes
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/publish",
entrypoint: resolveRoute("api/content/[collection]/[id]/publish.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/unpublish",
entrypoint: resolveRoute("api/content/[collection]/[id]/unpublish.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/discard-draft",
entrypoint: resolveRoute("api/content/[collection]/[id]/discard-draft.ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/compare",
entrypoint: resolveRoute("api/content/[collection]/[id]/compare.ts"),
});
// i18n translation routes
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/translations",
entrypoint: resolveRoute("api/content/[collection]/[id]/translations.ts"),
});
// Scheduled publishing routes
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/schedule",
entrypoint: resolveRoute("api/content/[collection]/[id]/schedule.ts"),
});
// Revision management routes (for restore, etc.)
injectRoute({
pattern: "/_emdash/api/revisions/[revisionId]",
entrypoint: resolveRoute("api/revisions/[revisionId]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/revisions/[revisionId]/restore",
entrypoint: resolveRoute("api/revisions/[revisionId]/restore.ts"),
});
// Media API routes
injectRoute({
pattern: "/_emdash/api/media",
entrypoint: resolveRoute("api/media.ts"),
});
injectRoute({
pattern: "/_emdash/api/media/upload-url",
entrypoint: resolveRoute("api/media/upload-url.ts"),
});
injectRoute({
pattern: "/_emdash/api/media/file/[key]",
entrypoint: resolveRoute("api/media/file/[key].ts"),
});
injectRoute({
pattern: "/_emdash/api/media/[id]",
entrypoint: resolveRoute("api/media/[id].ts"),
});
injectRoute({
pattern: "/_emdash/api/media/[id]/confirm",
entrypoint: resolveRoute("api/media/[id]/confirm.ts"),
});
// Media provider routes
injectRoute({
pattern: "/_emdash/api/media/providers",
entrypoint: resolveRoute("api/media/providers/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/media/providers/[providerId]",
entrypoint: resolveRoute("api/media/providers/[providerId]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/media/providers/[providerId]/[itemId]",
entrypoint: resolveRoute("api/media/providers/[providerId]/[itemId].ts"),
});
// Import API routes
injectRoute({
pattern: "/_emdash/api/import/probe",
entrypoint: resolveRoute("api/import/probe.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress/analyze",
entrypoint: resolveRoute("api/import/wordpress/analyze.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress/prepare",
entrypoint: resolveRoute("api/import/wordpress/prepare.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress/execute",
entrypoint: resolveRoute("api/import/wordpress/execute.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress/media",
entrypoint: resolveRoute("api/import/wordpress/media.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress/rewrite-urls",
entrypoint: resolveRoute("api/import/wordpress/rewrite-urls.ts"),
});
// WordPress Plugin (EmDash Exporter) direct import routes
injectRoute({
pattern: "/_emdash/api/import/wordpress-plugin/analyze",
entrypoint: resolveRoute("api/import/wordpress-plugin/analyze.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress-plugin/execute",
entrypoint: resolveRoute("api/import/wordpress-plugin/execute.ts"),
});
injectRoute({
pattern: "/_emdash/api/import/wordpress-plugin/callback",
entrypoint: resolveRoute("api/import/wordpress-plugin/callback.ts"),
});
// Schema API routes
injectRoute({
pattern: "/_emdash/api/schema",
entrypoint: resolveRoute("api/schema/index.ts"),
});
// Typegen endpoint (dev-only)
injectRoute({
pattern: "/_emdash/api/typegen",
entrypoint: resolveRoute("api/typegen.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/collections",
entrypoint: resolveRoute("api/schema/collections/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/collections/[slug]",
entrypoint: resolveRoute("api/schema/collections/[slug]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/collections/[slug]/fields",
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/collections/[slug]/fields/reorder",
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/reorder.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/collections/[slug]/fields/[fieldSlug]",
entrypoint: resolveRoute("api/schema/collections/[slug]/fields/[fieldSlug].ts"),
});
// Orphaned tables discovery
injectRoute({
pattern: "/_emdash/api/schema/orphans",
entrypoint: resolveRoute("api/schema/orphans/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/schema/orphans/[slug]",
entrypoint: resolveRoute("api/schema/orphans/[slug].ts"),
});
// Site settings route
injectRoute({
pattern: "/_emdash/api/settings",
entrypoint: resolveRoute("api/settings.ts"),
});
// Snapshot route (for DO preview database population)
injectRoute({
pattern: "/_emdash/api/snapshot",
entrypoint: resolveRoute("api/snapshot.ts"),
});
// Taxonomy API routes
injectRoute({
pattern: "/_emdash/api/taxonomies",
entrypoint: resolveRoute("api/taxonomies/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/taxonomies/[name]/terms",
entrypoint: resolveRoute("api/taxonomies/[name]/terms/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/taxonomies/[name]/terms/[slug]",
entrypoint: resolveRoute("api/taxonomies/[name]/terms/[slug].ts"),
});
injectRoute({
pattern: "/_emdash/api/content/[collection]/[id]/terms/[taxonomy]",
entrypoint: resolveRoute("api/content/[collection]/[id]/terms/[taxonomy].ts"),
});
// Plugin management routes (under /admin to avoid conflict with plugin API routes)
injectRoute({
pattern: "/_emdash/api/admin/plugins",
entrypoint: resolveRoute("api/admin/plugins/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/[id]",
entrypoint: resolveRoute("api/admin/plugins/[id]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/[id]/enable",
entrypoint: resolveRoute("api/admin/plugins/[id]/enable.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/[id]/disable",
entrypoint: resolveRoute("api/admin/plugins/[id]/disable.ts"),
});
// Marketplace plugin routes
injectRoute({
pattern: "/_emdash/api/admin/plugins/marketplace",
entrypoint: resolveRoute("api/admin/plugins/marketplace/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/marketplace/[id]",
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/marketplace/[id]/icon",
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/icon.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/marketplace/[id]/install",
entrypoint: resolveRoute("api/admin/plugins/marketplace/[id]/install.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/[id]/update",
entrypoint: resolveRoute("api/admin/plugins/[id]/update.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/[id]/uninstall",
entrypoint: resolveRoute("api/admin/plugins/[id]/uninstall.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/plugins/updates",
entrypoint: resolveRoute("api/admin/plugins/updates.ts"),
});
// Exclusive hooks admin routes
injectRoute({
pattern: "/_emdash/api/admin/hooks/exclusive",
entrypoint: resolveRoute("api/admin/hooks/exclusive/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/hooks/exclusive/[hookName]",
entrypoint: resolveRoute("api/admin/hooks/exclusive/[hookName].ts"),
});
// Theme marketplace routes
injectRoute({
pattern: "/_emdash/api/admin/themes/marketplace",
entrypoint: resolveRoute("api/admin/themes/marketplace/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/themes/marketplace/[id]",
entrypoint: resolveRoute("api/admin/themes/marketplace/[id]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/themes/marketplace/[id]/thumbnail",
entrypoint: resolveRoute("api/admin/themes/marketplace/[id]/thumbnail.ts"),
});
// Theme preview signing (local, not proxied)
injectRoute({
pattern: "/_emdash/api/themes/preview",
entrypoint: resolveRoute("api/themes/preview.ts"),
});
// User management routes
injectRoute({
pattern: "/_emdash/api/admin/users",
entrypoint: resolveRoute("api/admin/users/index.ts"),
});
// Bylines routes
injectRoute({
pattern: "/_emdash/api/admin/bylines",
entrypoint: resolveRoute("api/admin/bylines/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/bylines/[id]",
entrypoint: resolveRoute("api/admin/bylines/[id]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/users/[id]",
entrypoint: resolveRoute("api/admin/users/[id]/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/users/[id]/disable",
entrypoint: resolveRoute("api/admin/users/[id]/disable.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/users/[id]/enable",
entrypoint: resolveRoute("api/admin/users/[id]/enable.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/users/[id]/send-recovery",
entrypoint: resolveRoute("api/admin/users/[id]/send-recovery.ts"),
});
// API token admin routes
injectRoute({
pattern: "/_emdash/api/admin/api-tokens",
entrypoint: resolveRoute("api/admin/api-tokens/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/api-tokens/[id]",
entrypoint: resolveRoute("api/admin/api-tokens/[id].ts"),
});
// OAuth client admin routes
injectRoute({
pattern: "/_emdash/api/admin/oauth-clients",
entrypoint: resolveRoute("api/admin/oauth-clients/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/oauth-clients/[id]",
entrypoint: resolveRoute("api/admin/oauth-clients/[id].ts"),
});
// OAuth Device Flow routes
injectRoute({
pattern: "/_emdash/api/oauth/device/code",
entrypoint: resolveRoute("api/oauth/device/code.ts"),
});
injectRoute({
pattern: "/_emdash/api/oauth/device/token",
entrypoint: resolveRoute("api/oauth/device/token.ts"),
});
injectRoute({
pattern: "/_emdash/api/oauth/device/authorize",
entrypoint: resolveRoute("api/oauth/device/authorize.ts"),
});
injectRoute({
pattern: "/_emdash/api/oauth/token/refresh",
entrypoint: resolveRoute("api/oauth/token/refresh.ts"),
});
injectRoute({
pattern: "/_emdash/api/oauth/token/revoke",
entrypoint: resolveRoute("api/oauth/token/revoke.ts"),
});
// Auth discovery endpoint
injectRoute({
pattern: "/_emdash/.well-known/auth",
entrypoint: resolveRoute("api/well-known/auth.ts"),
});
// OAuth 2.1 Authorization Code flow routes
injectRoute({
pattern: "/_emdash/api/oauth/token",
entrypoint: resolveRoute("api/oauth/token.ts"),
});
injectRoute({
pattern: "/_emdash/oauth/authorize",
entrypoint: resolveRoute("api/oauth/authorize.ts"),
});
// OAuth discovery endpoints (RFC 9728, RFC 8414)
injectRoute({
pattern: "/.well-known/oauth-protected-resource",
entrypoint: resolveRoute("api/well-known/oauth-protected-resource.ts"),
});
injectRoute({
pattern: "/_emdash/.well-known/oauth-authorization-server",
entrypoint: resolveRoute("api/well-known/oauth-authorization-server.ts"),
});
// Plugin-defined API routes
// All plugin routes are handled by a single catch-all handler
injectRoute({
pattern: "/_emdash/api/plugins/[pluginId]/[...path]",
entrypoint: resolveRoute("api/plugins/[pluginId]/[...path].ts"),
});
// Menu API routes
injectRoute({
pattern: "/_emdash/api/menus",
entrypoint: resolveRoute("api/menus/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/menus/[name]",
entrypoint: resolveRoute("api/menus/[name].ts"),
});
injectRoute({
pattern: "/_emdash/api/menus/[name]/items",
entrypoint: resolveRoute("api/menus/[name]/items.ts"),
});
injectRoute({
pattern: "/_emdash/api/menus/[name]/reorder",
entrypoint: resolveRoute("api/menus/[name]/reorder.ts"),
});
// Widget area routes
injectRoute({
pattern: "/_emdash/api/widget-areas",
entrypoint: resolveRoute("api/widget-areas/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/widget-components",
entrypoint: resolveRoute("api/widget-components.ts"),
});
injectRoute({
pattern: "/_emdash/api/widget-areas/[name]",
entrypoint: resolveRoute("api/widget-areas/[name].ts"),
});
injectRoute({
pattern: "/_emdash/api/widget-areas/[name]/widgets",
entrypoint: resolveRoute("api/widget-areas/[name]/widgets.ts"),
});
injectRoute({
pattern: "/_emdash/api/widget-areas/[name]/widgets/[id]",
entrypoint: resolveRoute("api/widget-areas/[name]/widgets/[id].ts"),
});
injectRoute({
pattern: "/_emdash/api/widget-areas/[name]/reorder",
entrypoint: resolveRoute("api/widget-areas/[name]/reorder.ts"),
});
// Section routes
injectRoute({
pattern: "/_emdash/api/sections",
entrypoint: resolveRoute("api/sections/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/sections/[slug]",
entrypoint: resolveRoute("api/sections/[slug].ts"),
});
// Redirect routes
injectRoute({
pattern: "/_emdash/api/redirects",
entrypoint: resolveRoute("api/redirects/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/redirects/404s/summary",
entrypoint: resolveRoute("api/redirects/404s/summary.ts"),
});
injectRoute({
pattern: "/_emdash/api/redirects/404s",
entrypoint: resolveRoute("api/redirects/404s/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/redirects/[id]",
entrypoint: resolveRoute("api/redirects/[id].ts"),
});
// Search routes
injectRoute({
pattern: "/_emdash/api/search",
entrypoint: resolveRoute("api/search/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/search/suggest",
entrypoint: resolveRoute("api/search/suggest.ts"),
});
injectRoute({
pattern: "/_emdash/api/search/stats",
entrypoint: resolveRoute("api/search/stats.ts"),
});
injectRoute({
pattern: "/_emdash/api/search/rebuild",
entrypoint: resolveRoute("api/search/rebuild.ts"),
});
injectRoute({
pattern: "/_emdash/api/search/enable",
entrypoint: resolveRoute("api/search/enable.ts"),
});
// Comment routes (public)
injectRoute({
pattern: "/_emdash/api/comments/[collection]/[contentId]",
entrypoint: resolveRoute("api/comments/[collection]/[contentId]/index.ts"),
});
// Comment routes (admin)
injectRoute({
pattern: "/_emdash/api/admin/comments",
entrypoint: resolveRoute("api/admin/comments/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/comments/counts",
entrypoint: resolveRoute("api/admin/comments/counts.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/comments/bulk",
entrypoint: resolveRoute("api/admin/comments/bulk.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/comments/[id]/status",
entrypoint: resolveRoute("api/admin/comments/[id]/status.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/comments/[id]",
entrypoint: resolveRoute("api/admin/comments/[id].ts"),
});
// SEO routes (public, at site root)
injectRoute({
pattern: "/sitemap.xml",
entrypoint: resolveRoute("sitemap.xml.ts"),
});
injectRoute({
pattern: "/robots.txt",
entrypoint: resolveRoute("robots.txt.ts"),
});
// Setup wizard API routes
injectRoute({
pattern: "/_emdash/api/setup/status",
entrypoint: resolveRoute("api/setup/status.ts"),
});
injectRoute({
pattern: "/_emdash/api/setup",
entrypoint: resolveRoute("api/setup/index.ts"),
});
// Auth API routes
injectRoute({
pattern: "/_emdash/api/setup/admin",
entrypoint: resolveRoute("api/setup/admin.ts"),
});
injectRoute({
pattern: "/_emdash/api/setup/admin/verify",
entrypoint: resolveRoute("api/setup/admin-verify.ts"),
});
injectRoute({
pattern: "/_emdash/api/setup/dev-bypass",
entrypoint: resolveRoute("api/setup/dev-bypass.ts"),
});
injectRoute({
pattern: "/_emdash/api/setup/dev-reset",
entrypoint: resolveRoute("api/setup/dev-reset.ts"),
});
// Current user endpoint (always available)
injectRoute({
pattern: "/_emdash/api/auth/me",
entrypoint: resolveRoute("api/auth/me.ts"),
});
// Logout is always available (though behavior differs by auth mode)
injectRoute({
pattern: "/_emdash/api/auth/logout",
entrypoint: resolveRoute("api/auth/logout.ts"),
});
}
/**
* Injects the MCP (Model Context Protocol) server route.
* Only injected when `mcp: true` is set in the EmDash config.
*/
export function injectMcpRoute(injectRoute: InjectRoute): void {
injectRoute({
pattern: "/_emdash/api/mcp",
entrypoint: resolveRoute("api/mcp.ts"),
});
}
/**
* Injects passkey/oauth/magic-link auth routes.
* Only used when NOT using external auth.
*/
export function injectBuiltinAuthRoutes(injectRoute: InjectRoute): void {
// Passkey authentication routes
injectRoute({
pattern: "/_emdash/api/auth/passkey/options",
entrypoint: resolveRoute("api/auth/passkey/options.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/passkey/verify",
entrypoint: resolveRoute("api/auth/passkey/verify.ts"),
});
// Passkey management routes (authenticated users)
injectRoute({
pattern: "/_emdash/api/auth/passkey",
entrypoint: resolveRoute("api/auth/passkey/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/passkey/register/options",
entrypoint: resolveRoute("api/auth/passkey/register/options.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/passkey/register/verify",
entrypoint: resolveRoute("api/auth/passkey/register/verify.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/passkey/[id]",
entrypoint: resolveRoute("api/auth/passkey/[id].ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/dev-bypass",
entrypoint: resolveRoute("api/auth/dev-bypass.ts"),
});
// Invite routes
injectRoute({
pattern: "/_emdash/api/auth/invite",
entrypoint: resolveRoute("api/auth/invite/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/invite/accept",
entrypoint: resolveRoute("api/auth/invite/accept.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/invite/complete",
entrypoint: resolveRoute("api/auth/invite/complete.ts"),
});
// Magic link routes
injectRoute({
pattern: "/_emdash/api/auth/magic-link/send",
entrypoint: resolveRoute("api/auth/magic-link/send.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/magic-link/verify",
entrypoint: resolveRoute("api/auth/magic-link/verify.ts"),
});
// OAuth routes
injectRoute({
pattern: "/_emdash/api/auth/oauth/[provider]",
entrypoint: resolveRoute("api/auth/oauth/[provider].ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/oauth/[provider]/callback",
entrypoint: resolveRoute("api/auth/oauth/[provider]/callback.ts"),
});
// Self-signup routes
injectRoute({
pattern: "/_emdash/api/auth/signup/request",
entrypoint: resolveRoute("api/auth/signup/request.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/signup/verify",
entrypoint: resolveRoute("api/auth/signup/verify.ts"),
});
injectRoute({
pattern: "/_emdash/api/auth/signup/complete",
entrypoint: resolveRoute("api/auth/signup/complete.ts"),
});
// Allowed domains admin routes (only relevant for passkey mode)
injectRoute({
pattern: "/_emdash/api/admin/allowed-domains",
entrypoint: resolveRoute("api/admin/allowed-domains/index.ts"),
});
injectRoute({
pattern: "/_emdash/api/admin/allowed-domains/[domain]",
entrypoint: resolveRoute("api/admin/allowed-domains/[domain].ts"),
});
}

View File

@@ -0,0 +1,338 @@
/**
* Runtime utilities for EmDash
*
* This file contains functions that are used at runtime (in middleware, routes, etc.)
* and must work in all environments including Cloudflare Workers.
*
* DO NOT import Node.js-only modules here (fs, path, module, etc.)
*/
import type { AuthDescriptor } from "../../auth/types.js";
import type { DatabaseDescriptor } from "../../db/adapters.js";
import type { MediaProviderDescriptor } from "../../media/types.js";
import type { ResolvedPlugin } from "../../plugins/types.js";
import type { StorageDescriptor } from "../storage/types.js";
export type { ResolvedPlugin };
export type { MediaProviderDescriptor };
/**
* Admin page definition (copied from plugins/types to avoid circular deps)
*/
export interface PluginAdminPage {
path: string;
label: string;
icon?: string;
}
/**
* Dashboard widget definition (copied from plugins/types to avoid circular deps)
*/
export interface PluginDashboardWidget {
id: string;
size?: "full" | "half" | "third";
title?: string;
}
/**
* Plugin descriptor - returned by plugin factory functions
*
* Contains all static metadata needed for manifest and admin UI,
* plus the entrypoint for runtime instantiation.
*
* @example
* ```ts
* export function myPlugin(options?: MyPluginOptions): PluginDescriptor {
* return {
* id: "my-plugin",
* version: "1.0.0",
* entrypoint: "@my-org/emdash-plugin-foo",
* options: options ?? {},
* adminEntry: "@my-org/emdash-plugin-foo/admin",
* adminPages: [{ path: "/settings", label: "Settings" }],
* };
* }
* ```
*/
/**
* Storage collection declaration for sandboxed plugins
*/
export interface StorageCollectionDeclaration {
indexes?: string[];
uniqueIndexes?: string[];
}
export interface PluginDescriptor<TOptions = Record<string, unknown>> {
/** Unique plugin identifier */
id: string;
/** Plugin version (semver) */
version: string;
/** Module specifier to import (e.g., "@emdashcms/plugin-api-test") */
entrypoint: string;
/**
* Options to pass to createPlugin(). Native format only.
* Standard-format plugins configure themselves via KV settings
* and Block Kit admin pages -- not constructor options.
*/
options?: TOptions;
/**
* Plugin format. Determines how the entrypoint is loaded:
* - `"standard"` -- exports `definePlugin({ hooks, routes })` as default.
* Wrapped with `adaptSandboxEntry` for in-process execution. Can run in both
* `plugins: []` (in-process) and `sandboxed: []` (isolate).
* - `"native"` -- exports `createPlugin(options)` returning a `ResolvedPlugin`.
* Can only run in `plugins: []`. Cannot be sandboxed or published to marketplace.
*
* Defaults to `"native"` when unset.
*
*/
format?: "standard" | "native";
/** Admin UI module specifier (e.g., "@emdashcms/plugin-audit-log/admin") */
adminEntry?: string;
/** Module specifier for site-side Astro rendering components (must export `blockComponents`) */
componentsEntry?: string;
/** Admin pages for navigation */
adminPages?: PluginAdminPage[];
/** Dashboard widgets */
adminWidgets?: PluginDashboardWidget[];
// === Sandbox-specific fields (for sandboxed plugins) ===
/**
* Capabilities the plugin requests.
* For standard-format plugins, capabilities are enforced in both trusted and
* sandboxed modes via the PluginContextFactory.
*/
capabilities?: string[];
/**
* Allowed hosts for network:fetch capability
* Supports wildcards like "*.example.com"
*/
allowedHosts?: string[];
/**
* Storage collections the plugin declares
* Sandboxed plugins can only access declared collections.
*/
storage?: Record<string, StorageCollectionDeclaration>;
}
/**
* Sandboxed plugin descriptor - same format as PluginDescriptor
*
* These run in isolated V8 isolates via Worker Loader on Cloudflare.
* The `entrypoint` is resolved to a file and bundled at build time.
*/
export type SandboxedPluginDescriptor<TOptions = Record<string, unknown>> =
PluginDescriptor<TOptions>;
export interface EmDashConfig {
/**
* Database configuration
*
* Use one of the adapter functions:
* - `sqlite({ url: "file:./data.db" })` - Local SQLite
* - `libsql({ url: "...", authToken: "..." })` - Turso/libSQL
* - `d1({ binding: "DB" })` - Cloudflare D1
*
* @example
* ```ts
* import { sqlite } from "emdash/db";
*
* emdash({
* database: sqlite({ url: "file:./data.db" }),
* })
* ```
*/
database?: DatabaseDescriptor;
/**
* Storage configuration (for media)
*/
storage?: StorageDescriptor;
/**
* Trusted plugins to load (run in main isolate)
*
* @example
* ```ts
* import { auditLogPlugin } from "@emdashcms/plugin-audit-log";
* import { webhookNotifierPlugin } from "@emdashcms/plugin-webhook-notifier";
*
* emdash({
* plugins: [
* auditLogPlugin(),
* webhookNotifierPlugin({ url: "https://example.com/webhook" }),
* ],
* })
* ```
*/
plugins?: PluginDescriptor[];
/**
* Sandboxed plugins to load (run in isolated V8 isolates)
*
* Only works on Cloudflare with Worker Loader enabled.
* Uses the same format as `plugins` - the difference is where they run.
*
* @example
* ```ts
* import { untrustedPlugin } from "some-third-party-plugin";
*
* emdash({
* plugins: [trustedPlugin()], // runs in host
* sandboxed: [untrustedPlugin()], // runs in isolate
* sandboxRunner: "@emdashcms/sandbox-cloudflare",
* })
* ```
*/
sandboxed?: SandboxedPluginDescriptor[];
/**
* Module that exports the sandbox runner factory.
* Required if using sandboxed plugins.
*
* @example
* ```ts
* emdash({
* sandboxRunner: "@emdashcms/sandbox-cloudflare",
* })
* ```
*/
sandboxRunner?: string;
/**
* Authentication configuration
*
* Use an auth adapter function from a platform package:
* - `access({ teamDomain: "..." })` from `@emdashcms/cloudflare`
*
* When an external auth provider is configured, passkey auth is disabled.
*
* @example
* ```ts
* import { access } from "@emdashcms/cloudflare";
*
* emdash({
* auth: access({
* teamDomain: "myteam.cloudflareaccess.com",
* audience: "abc123...",
* roleMapping: {
* "Admins": 50,
* "Editors": 30,
* },
* }),
* })
* ```
*/
auth?: AuthDescriptor;
/**
* Enable the MCP (Model Context Protocol) server endpoint.
*
* When enabled, exposes an MCP Streamable HTTP server at
* `/_emdash/api/mcp` that allows AI agents and tools to interact
* with the CMS using the standardized MCP protocol.
*
* Authentication is handled by the existing EmDash auth middleware —
* agents must authenticate with an API token or session cookie.
*
* @default false
*
* @example
* ```ts
* emdash({
* mcp: true,
* })
* ```
*/
mcp?: boolean;
/**
* Plugin marketplace URL
*
* When set, enables the marketplace features: browse, install, update,
* and uninstall plugins from a remote marketplace.
*
* Must be an HTTPS URL in production, or localhost/127.0.0.1 in dev.
* Requires `sandboxRunner` to be configured (marketplace plugins run sandboxed).
*
* @example
* ```ts
* emdash({
* marketplace: "https://marketplace.emdashcms.com",
* sandboxRunner: "@emdashcms/sandbox-cloudflare",
* })
* ```
*/
marketplace?: string;
/**
* Enable playground mode for ephemeral "try EmDash" sites.
*
* When set, the integration injects a playground middleware (order: "pre")
* that runs BEFORE the normal EmDash middleware chain. It creates an
* isolated Durable Object database per session, runs migrations, applies
* the seed, creates an anonymous admin user, and sets the DB in ALS.
* By the time the runtime middleware runs, the database is fully ready.
*
* Setup and auth middleware are skipped (the playground handles both).
*
* Requires `@emdashcms/cloudflare` as a dependency and a DO binding
* in wrangler.jsonc.
*
* @example
* ```ts
* emdash({
* database: playgroundDatabase({ binding: "PLAYGROUND_DB" }),
* playground: {
* middlewareEntrypoint: "@emdashcms/cloudflare/db/playground-middleware",
* },
* })
* ```
*/
playground?: {
/** Module path for the playground middleware. */
middlewareEntrypoint: string;
};
/**
* Media providers for browsing and uploading media
*
* The local media provider (using storage adapter) is available by default.
* Additional providers can be added for external services like Unsplash,
* Cloudinary, Mux, Cloudflare Images, etc.
*
* @example
* ```ts
* import { cloudflareImages, cloudflareStream } from "@emdashcms/cloudflare";
* import { unsplash } from "@emdashcms/provider-unsplash";
*
* emdash({
* mediaProviders: [
* cloudflareImages({ accountId: "..." }),
* cloudflareStream({ accountId: "..." }),
* unsplash({ accessKey: "..." }),
* ],
* })
* ```
*/
mediaProviders?: MediaProviderDescriptor[];
}
/**
* Get stored config from global
* This is set by the virtual module at build time
*/
export function getStoredConfig(): EmDashConfig | null {
return globalThis.__emdashConfig || null;
}
/**
* Set stored config in global
* Called by the integration at config time
*/
export function setStoredConfig(config: EmDashConfig): void {
globalThis.__emdashConfig = config;
}
// Declare global type
declare global {
// eslint-disable-next-line no-var
var __emdashConfig: EmDashConfig | undefined;
}

View File

@@ -0,0 +1,469 @@
/**
* Virtual Module Generators
*
* Functions that generate virtual module content for Vite.
* These modules statically import configured dependencies
* so Vite can properly resolve and bundle them.
*/
import { readFileSync } from "node:fs";
import { createRequire } from "node:module";
import { resolve } from "node:path";
import type { MediaProviderDescriptor } from "../../media/types.js";
import { defaultSeed } from "../../seed/default.js";
import type { PluginDescriptor } from "./runtime.js";
/** Pattern to remove scoped package prefix from plugin ID */
const SCOPED_PREFIX_PATTERN = /^@[^/]+\/plugin-/;
/** Pattern to remove emdash-plugin- prefix from plugin ID */
const EMDASH_PREFIX_PATTERN = /^emdash-plugin-/;
// Virtual module IDs
export const VIRTUAL_CONFIG_ID = "virtual:emdash/config";
export const RESOLVED_VIRTUAL_CONFIG_ID = "\0" + VIRTUAL_CONFIG_ID;
export const VIRTUAL_DIALECT_ID = "virtual:emdash/dialect";
export const RESOLVED_VIRTUAL_DIALECT_ID = "\0" + VIRTUAL_DIALECT_ID;
export const VIRTUAL_STORAGE_ID = "virtual:emdash/storage";
export const RESOLVED_VIRTUAL_STORAGE_ID = "\0" + VIRTUAL_STORAGE_ID;
export const VIRTUAL_ADMIN_REGISTRY_ID = "virtual:emdash/admin-registry";
export const RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID = "\0" + VIRTUAL_ADMIN_REGISTRY_ID;
export const VIRTUAL_PLUGINS_ID = "virtual:emdash/plugins";
export const RESOLVED_VIRTUAL_PLUGINS_ID = "\0" + VIRTUAL_PLUGINS_ID;
export const VIRTUAL_SANDBOX_RUNNER_ID = "virtual:emdash/sandbox-runner";
export const RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID = "\0" + VIRTUAL_SANDBOX_RUNNER_ID;
export const VIRTUAL_SANDBOXED_PLUGINS_ID = "virtual:emdash/sandboxed-plugins";
export const RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID = "\0" + VIRTUAL_SANDBOXED_PLUGINS_ID;
export const VIRTUAL_AUTH_ID = "virtual:emdash/auth";
export const RESOLVED_VIRTUAL_AUTH_ID = "\0" + VIRTUAL_AUTH_ID;
export const VIRTUAL_MEDIA_PROVIDERS_ID = "virtual:emdash/media-providers";
export const RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID = "\0" + VIRTUAL_MEDIA_PROVIDERS_ID;
export const VIRTUAL_BLOCK_COMPONENTS_ID = "virtual:emdash/block-components";
export const RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID = "\0" + VIRTUAL_BLOCK_COMPONENTS_ID;
export const VIRTUAL_SEED_ID = "virtual:emdash/seed";
export const RESOLVED_VIRTUAL_SEED_ID = "\0" + VIRTUAL_SEED_ID;
/**
* Generates the config virtual module.
*/
export function generateConfigModule(serializableConfig: Record<string, unknown>): string {
return `export default ${JSON.stringify(serializableConfig)};`;
}
/**
* Generates the dialect virtual module.
* Statically imports the configured database dialect and exports the dialect type.
*
* For D1 adapters, also re-exports session helpers (isSessionEnabled, getD1Binding,
* getDefaultConstraint, getBookmarkCookieName, createSessionDialect) used by
* middleware for per-request read replica sessions.
*
* For non-D1 adapters, session exports are no-ops.
*/
export function generateDialectModule(
dbEntrypoint?: string,
dbType?: string,
dbConfig?: unknown,
): string {
if (!dbEntrypoint) {
return [
`export const createDialect = undefined;`,
`export const dialectType = "sqlite";`,
`export const isSessionEnabled = () => false;`,
`export const getD1Binding = () => null;`,
`export const getDefaultConstraint = () => "first-unconstrained";`,
`export const getBookmarkCookieName = () => "";`,
`export const createSessionDialect = undefined;`,
].join("\n");
}
const type = dbType ?? "sqlite";
// Check if the adapter is D1 (has session helpers)
const isD1 = dbEntrypoint.includes("cloudflare") && dbEntrypoint.includes("d1");
// Check if sessions are enabled in the config
const sessionMode =
isD1 && dbConfig && typeof dbConfig === "object" && "session" in dbConfig
? // eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- runtime-checked above
(dbConfig as { session?: string }).session
: undefined;
const sessionEnabled = !!sessionMode && sessionMode !== "disabled";
if (isD1 && sessionEnabled) {
return `
import { createDialect as _createDialect } from "${dbEntrypoint}";
export { isSessionEnabled, getD1Binding, getDefaultConstraint, getBookmarkCookieName, createSessionDialect } from "${dbEntrypoint}";
export const createDialect = _createDialect;
export const dialectType = ${JSON.stringify(type)};
`;
}
// Non-D1 or sessions disabled: export no-ops
return `
import { createDialect as _createDialect } from "${dbEntrypoint}";
export const createDialect = _createDialect;
export const dialectType = ${JSON.stringify(type)};
export const isSessionEnabled = () => false;
export const getD1Binding = () => null;
export const getDefaultConstraint = () => "first-unconstrained";
export const getBookmarkCookieName = () => "";
export const createSessionDialect = undefined;
`;
}
/**
* Generates the storage virtual module.
* Statically imports the configured storage adapter.
*/
export function generateStorageModule(storageEntrypoint?: string): string {
if (!storageEntrypoint) {
return `export const createStorage = undefined;`;
}
return `
import { createStorage as _createStorage } from "${storageEntrypoint}";
export const createStorage = _createStorage;
`;
}
/**
* Generates the auth virtual module.
* Statically imports the configured auth provider.
*/
export function generateAuthModule(authEntrypoint?: string): string {
if (!authEntrypoint) {
return `export const authenticate = undefined;`;
}
return `
import { authenticate as _authenticate } from "${authEntrypoint}";
export const authenticate = _authenticate;
`;
}
/**
* Generates the plugins module.
* Imports and instantiates all plugins at runtime.
*
* Handles two plugin formats:
* - **Native**: imports `createPlugin` and calls it with options
* - **Standard**: imports the default export and wraps it with `adaptSandboxEntry`
*
* The format is determined by `descriptor.format`:
* - `"standard"` -- uses adaptSandboxEntry
* - `"native"` or undefined -- uses createPlugin
*
* This is critical for Cloudflare Workers where globals don't persist
* between build time and runtime.
*/
export function generatePluginsModule(descriptors: PluginDescriptor[]): string {
if (descriptors.length === 0) {
return `export const plugins = [];`;
}
const imports: string[] = [];
const instantiations: string[] = [];
// Track whether we need the adapter import
let needsAdapter = false;
descriptors.forEach((descriptor, index) => {
if (descriptor.format === "standard") {
// Standard format: import default export, wrap with adaptSandboxEntry
needsAdapter = true;
const varName = `pluginDef${index}`;
imports.push(`import ${varName} from "${descriptor.entrypoint}";`);
instantiations.push(
`adaptSandboxEntry(${varName}, ${JSON.stringify({
id: descriptor.id,
version: descriptor.version,
capabilities: descriptor.capabilities,
allowedHosts: descriptor.allowedHosts,
storage: descriptor.storage,
adminPages: descriptor.adminPages,
adminWidgets: descriptor.adminWidgets,
})})`,
);
} else {
// Native format: import createPlugin and call with options
const varName = `createPlugin${index}`;
imports.push(`import { createPlugin as ${varName} } from "${descriptor.entrypoint}";`);
instantiations.push(`${varName}(${JSON.stringify(descriptor.options ?? {})})`);
}
});
const adapterImport = needsAdapter
? `import { adaptSandboxEntry } from "emdash/plugins/adapt-sandbox-entry";\n`
: "";
return `
// Auto-generated plugins module
// Imports and instantiates all configured plugins at runtime
${adapterImport}${imports.join("\n")}
/** Resolved plugins array */
export const plugins = [
${instantiations.join(",\n ")}
];
`;
}
/**
* Generates the admin registry module.
* Uses adminEntry from plugin descriptors to statically import admin modules.
*/
export function generateAdminRegistryModule(descriptors: PluginDescriptor[]): string {
// Filter to descriptors with admin entries
const adminDescriptors = descriptors.filter((d) => d.adminEntry);
if (adminDescriptors.length === 0) {
return `export const pluginAdmins = {};`;
}
const imports: string[] = [];
const entries: string[] = [];
adminDescriptors.forEach((descriptor, index) => {
const varName = `admin${index}`;
// Use explicit ID from descriptor if available, otherwise derive from entrypoint
const pluginId =
descriptor.id ??
descriptor.entrypoint.replace(SCOPED_PREFIX_PATTERN, "").replace(EMDASH_PREFIX_PATTERN, "");
imports.push(`import * as ${varName} from "${descriptor.adminEntry}";`);
entries.push(` "${pluginId}": ${varName},`);
});
return `
// Auto-generated plugin admin registry
${imports.join("\n")}
export const pluginAdmins = {
${entries.join("\n")}
};
`;
}
/**
* Generates the sandbox runner module.
* Imports the configured sandbox runner factory or provides a noop default.
*/
export function generateSandboxRunnerModule(sandboxRunner?: string): string {
if (!sandboxRunner) {
// No sandbox runner configured - use noop
return `
// No sandbox runner configured - sandboxed plugins disabled
import { createNoopSandboxRunner } from "emdash";
export const createSandboxRunner = createNoopSandboxRunner;
export const sandboxEnabled = false;
`;
}
return `
// Auto-generated sandbox runner module
import { createSandboxRunner as _createSandboxRunner } from "${sandboxRunner}";
export const createSandboxRunner = _createSandboxRunner;
export const sandboxEnabled = true;
`;
}
/**
* Generates the media providers module.
* Imports and instantiates configured media providers at runtime.
*/
export function generateMediaProvidersModule(descriptors: MediaProviderDescriptor[]): string {
// Always include local provider by default unless explicitly disabled
const localDisabled = descriptors.some((d) => d.id === "local" && d.config.enabled === false);
const imports: string[] = [];
const entries: string[] = [];
// Add local provider first if not disabled
if (!localDisabled) {
imports.push(
`import { createMediaProvider as createLocalProvider } from "emdash/media/local-runtime";`,
);
entries.push(`{
id: "local",
name: "Library",
icon: "folder",
capabilities: { browse: true, search: false, upload: true, delete: true },
createProvider: (ctx) => createLocalProvider({ ...ctx, enabled: true }),
}`);
}
// Add custom providers
descriptors
.filter((d) => d.id !== "local" || d.config.enabled !== false)
.filter((d) => d.id !== "local") // Skip local if we already added it
.forEach((descriptor, index) => {
const varName = `createProvider${index}`;
imports.push(`import { createMediaProvider as ${varName} } from "${descriptor.entrypoint}";`);
entries.push(`{
id: ${JSON.stringify(descriptor.id)},
name: ${JSON.stringify(descriptor.name)},
icon: ${JSON.stringify(descriptor.icon)},
capabilities: ${JSON.stringify(descriptor.capabilities)},
createProvider: (ctx) => ${varName}({ ...${JSON.stringify(descriptor.config)}, ...ctx }),
}`);
});
return `
// Auto-generated media providers module
${imports.join("\n")}
/** Media provider descriptors with factory functions */
export const mediaProviders = [
${entries.join(",\n ")}
];
`;
}
/**
* Generates the block components module.
* Collects and merges `blockComponents` exports from plugin component entries.
*/
export function generateBlockComponentsModule(descriptors: PluginDescriptor[]): string {
const withComponents = descriptors.filter((d) => d.componentsEntry);
if (withComponents.length === 0) {
return `export const pluginBlockComponents = {};`;
}
const imports: string[] = [];
const spreads: string[] = [];
withComponents.forEach((d, i) => {
imports.push(`import { blockComponents as bc${i} } from "${d.componentsEntry}";`);
spreads.push(`...bc${i}`);
});
return `${imports.join("\n")}\nexport const pluginBlockComponents = { ${spreads.join(", ")} };`;
}
/**
* Generates the seed virtual module.
* Reads the user's seed file at build time (in Node context) and embeds it,
* so the runtime doesn't need filesystem access (required for workerd).
*
* Exports `userSeed` (user's seed or null) and `seed` (user's seed or default).
*/
export function generateSeedModule(projectRoot: string): string {
let userSeedJson: string | null = null;
// Try .emdash/seed.json
try {
const seedPath = resolve(projectRoot, ".emdash", "seed.json");
const content = readFileSync(seedPath, "utf-8");
JSON.parse(content); // validate
userSeedJson = content;
} catch {
// Not found, try next
}
// Try package.json → emdash.seed reference
if (!userSeedJson) {
try {
const pkgPath = resolve(projectRoot, "package.json");
const pkgContent = readFileSync(pkgPath, "utf-8");
const pkg: { emdash?: { seed?: string } } = JSON.parse(pkgContent);
if (pkg.emdash?.seed) {
const seedPath = resolve(projectRoot, pkg.emdash.seed);
const content = readFileSync(seedPath, "utf-8");
JSON.parse(content); // validate
userSeedJson = content;
}
} catch {
// Not found
}
}
if (userSeedJson) {
return [`export const userSeed = ${userSeedJson};`, `export const seed = userSeed;`].join("\n");
}
// No user seed — inline the default
return [
`export const userSeed = null;`,
`export const seed = ${JSON.stringify(defaultSeed)};`,
].join("\n");
}
/**
* Resolve a module specifier from the project's context.
* Uses Node.js require.resolve with the project root as base.
*/
function resolveModulePathFromProject(specifier: string, projectRoot: string): string {
// Create require from the project's package.json location
const projectPackageJson = resolve(projectRoot, "package.json");
const require = createRequire(projectPackageJson);
return require.resolve(specifier);
}
/**
* Generates the sandboxed plugins module.
* Resolves plugin entrypoints to files, reads them, and embeds the code.
*
* At runtime, middleware uses SandboxRunner to load these into isolates.
*/
export function generateSandboxedPluginsModule(
sandboxed: PluginDescriptor[],
projectRoot: string,
): string {
if (sandboxed.length === 0) {
return `
// No sandboxed plugins configured
export const sandboxedPlugins = [];
`;
}
const pluginEntries: string[] = [];
for (const descriptor of sandboxed) {
const bundleSpecifier = descriptor.entrypoint;
// Resolve the bundle to a file path using project's require context
const filePath = resolveModulePathFromProject(bundleSpecifier, projectRoot);
// Read the source code
const code = readFileSync(filePath, "utf-8");
// Create the plugin entry with embedded code and sandbox config
pluginEntries.push(`{
id: ${JSON.stringify(descriptor.id)},
version: ${JSON.stringify(descriptor.version)},
options: ${JSON.stringify(descriptor.options ?? {})},
capabilities: ${JSON.stringify(descriptor.capabilities ?? [])},
allowedHosts: ${JSON.stringify(descriptor.allowedHosts ?? [])},
storage: ${JSON.stringify(descriptor.storage ?? {})},
adminPages: ${JSON.stringify(descriptor.adminPages ?? [])},
adminWidgets: ${JSON.stringify(descriptor.adminWidgets ?? [])},
adminEntry: ${JSON.stringify(descriptor.adminEntry)},
// Code read from: ${filePath}
code: ${JSON.stringify(code)},
}`);
}
return `
// Auto-generated sandboxed plugins module
// Plugin code is embedded at build time
/**
* Sandboxed plugin entries with embedded code.
* Loaded at runtime via SandboxRunner.
*/
export const sandboxedPlugins = [
${pluginEntries.join(",\n ")}
];
`;
}

View File

@@ -0,0 +1,328 @@
/**
* Vite Plugin Configuration
*
* Defines the Vite plugin that handles virtual modules and other
* Vite-specific configuration for EmDash.
*/
import { createRequire } from "node:module";
import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url";
import type { AstroConfig } from "astro";
import type { Plugin } from "vite";
import type { EmDashConfig, PluginDescriptor } from "./runtime.js";
import {
VIRTUAL_CONFIG_ID,
RESOLVED_VIRTUAL_CONFIG_ID,
VIRTUAL_DIALECT_ID,
RESOLVED_VIRTUAL_DIALECT_ID,
VIRTUAL_STORAGE_ID,
RESOLVED_VIRTUAL_STORAGE_ID,
VIRTUAL_ADMIN_REGISTRY_ID,
RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID,
VIRTUAL_PLUGINS_ID,
RESOLVED_VIRTUAL_PLUGINS_ID,
VIRTUAL_SANDBOX_RUNNER_ID,
RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID,
VIRTUAL_SANDBOXED_PLUGINS_ID,
RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID,
VIRTUAL_AUTH_ID,
RESOLVED_VIRTUAL_AUTH_ID,
VIRTUAL_MEDIA_PROVIDERS_ID,
RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID,
VIRTUAL_BLOCK_COMPONENTS_ID,
RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID,
VIRTUAL_SEED_ID,
RESOLVED_VIRTUAL_SEED_ID,
generateSeedModule,
generateConfigModule,
generateDialectModule,
generateStorageModule,
generateAuthModule,
generatePluginsModule,
generateAdminRegistryModule,
generateSandboxRunnerModule,
generateSandboxedPluginsModule,
generateMediaProvidersModule,
generateBlockComponentsModule,
} from "./virtual-modules.js";
/**
* Resolve path to the admin package dist directory.
* Used for Vite alias to ensure the package is found in pnpm's isolated node_modules.
*/
function resolveAdminDist(): string {
const require = createRequire(import.meta.url);
const adminPath = require.resolve("@emdashcms/admin");
// Return the directory containing the built package (dist/)
return dirname(adminPath);
}
/**
* Resolve path to the admin package source directory.
* In dev mode, we alias @emdashcms/admin to the source so Vite processes it
* directly — giving instant HMR instead of requiring a rebuild + restart.
*/
function resolveAdminSource(): string | undefined {
const require = createRequire(import.meta.url);
const adminPath = require.resolve("@emdashcms/admin");
// dist/index.js -> go up to package root, then into src/
const packageRoot = resolve(dirname(adminPath), "..");
const srcEntry = resolve(packageRoot, "src", "index.ts");
// Only use source alias if the source directory actually exists
// (won't exist in published packages, only in the monorepo)
try {
// Use require.resolve mechanics — if the file exists, return the source dir
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- CJS require returns any
const fs = require("node:fs") as typeof import("node:fs");
if (fs.existsSync(srcEntry)) {
return resolve(packageRoot, "src");
}
} catch {
// Not in monorepo — fall back to dist
}
return undefined;
}
export interface VitePluginOptions {
/** Serializable config (database, storage, auth descriptors) */
serializableConfig: Record<string, unknown>;
/** Resolved EmDash config */
resolvedConfig: EmDashConfig;
/** Plugin descriptors */
pluginDescriptors: PluginDescriptor[];
/** Astro config */
astroConfig: AstroConfig;
}
/**
* Creates the EmDash virtual modules Vite plugin.
*/
export function createVirtualModulesPlugin(options: VitePluginOptions): Plugin {
const { serializableConfig, resolvedConfig, pluginDescriptors, astroConfig } = options;
return {
name: "emdash-virtual-modules",
resolveId(id: string) {
if (id === VIRTUAL_CONFIG_ID) {
return RESOLVED_VIRTUAL_CONFIG_ID;
}
if (id === VIRTUAL_DIALECT_ID) {
return RESOLVED_VIRTUAL_DIALECT_ID;
}
if (id === VIRTUAL_STORAGE_ID) {
return RESOLVED_VIRTUAL_STORAGE_ID;
}
if (id === VIRTUAL_ADMIN_REGISTRY_ID) {
return RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID;
}
if (id === VIRTUAL_PLUGINS_ID) {
return RESOLVED_VIRTUAL_PLUGINS_ID;
}
if (id === VIRTUAL_SANDBOX_RUNNER_ID) {
return RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID;
}
if (id === VIRTUAL_SANDBOXED_PLUGINS_ID) {
return RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID;
}
if (id === VIRTUAL_AUTH_ID) {
return RESOLVED_VIRTUAL_AUTH_ID;
}
if (id === VIRTUAL_MEDIA_PROVIDERS_ID) {
return RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID;
}
if (id === VIRTUAL_BLOCK_COMPONENTS_ID) {
return RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID;
}
if (id === VIRTUAL_SEED_ID) {
return RESOLVED_VIRTUAL_SEED_ID;
}
},
load(id: string) {
if (id === RESOLVED_VIRTUAL_CONFIG_ID) {
return generateConfigModule(serializableConfig);
}
// Generate a module that statically imports the configured dialect
// This allows Vite to properly resolve and bundle it
if (id === RESOLVED_VIRTUAL_DIALECT_ID) {
return generateDialectModule(
resolvedConfig.database?.entrypoint,
resolvedConfig.database?.type,
resolvedConfig.database?.config,
);
}
// Generate a module that statically imports the configured storage
if (id === RESOLVED_VIRTUAL_STORAGE_ID) {
return generateStorageModule(resolvedConfig.storage?.entrypoint);
}
// Generate plugins module that imports and instantiates all plugins
if (id === RESOLVED_VIRTUAL_PLUGINS_ID) {
return generatePluginsModule(pluginDescriptors);
}
// Generate admin registry module with plugin components
if (id === RESOLVED_VIRTUAL_ADMIN_REGISTRY_ID) {
// Include both trusted and sandboxed plugins
const allDescriptors = [...pluginDescriptors, ...(resolvedConfig.sandboxed ?? [])];
return generateAdminRegistryModule(allDescriptors);
}
// Generate sandbox runner module
if (id === RESOLVED_VIRTUAL_SANDBOX_RUNNER_ID) {
return generateSandboxRunnerModule(resolvedConfig.sandboxRunner);
}
// Generate sandboxed plugins config module
if (id === RESOLVED_VIRTUAL_SANDBOXED_PLUGINS_ID) {
// Pass project root for proper module resolution
const projectRoot = fileURLToPath(astroConfig.root);
return generateSandboxedPluginsModule(resolvedConfig.sandboxed ?? [], projectRoot);
}
// Generate auth module that statically imports the configured auth provider
if (id === RESOLVED_VIRTUAL_AUTH_ID) {
const authDescriptor = resolvedConfig.auth;
if (!authDescriptor || !("entrypoint" in authDescriptor)) {
return generateAuthModule(undefined);
}
return generateAuthModule(authDescriptor.entrypoint);
}
// Generate media providers module
if (id === RESOLVED_VIRTUAL_MEDIA_PROVIDERS_ID) {
return generateMediaProvidersModule(resolvedConfig.mediaProviders ?? []);
}
// Generate block components module (plugin rendering components for PortableText)
if (id === RESOLVED_VIRTUAL_BLOCK_COMPONENTS_ID) {
return generateBlockComponentsModule(pluginDescriptors);
}
// Generate seed module — embeds user seed or default at build time
if (id === RESOLVED_VIRTUAL_SEED_ID) {
const projectRoot = fileURLToPath(astroConfig.root);
return generateSeedModule(projectRoot);
}
},
};
}
/**
* Modules that contain native Node.js addons or Node-only code.
* These must be external in SSR to avoid bundling failures on Node.
* On Cloudflare, the adapter handles its own externalization — setting
* ssr.external there conflicts with @cloudflare/vite-plugin's validation.
*/
const NODE_NATIVE_EXTERNALS = [
"better-sqlite3",
"bindings",
"file-uri-to-path",
"@libsql/kysely-libsql",
"pg",
];
/**
* Detect whether the Cloudflare adapter is being used.
*/
function isCloudflareAdapter(astroConfig: AstroConfig): boolean {
return astroConfig.adapter?.name === "@astrojs/cloudflare";
}
/**
* Creates the Vite config update for EmDash.
*/
export function createViteConfig(
options: VitePluginOptions,
command: "dev" | "build" | "preview" | "sync",
): NonNullable<AstroConfig["vite"]> {
const adminDistPath = resolveAdminDist();
const cloudflare = isCloudflareAdapter(options.astroConfig);
const isDev = command === "dev";
// In dev mode within the monorepo, alias JS imports to source for instant HMR.
// CSS always comes from dist/ (pre-compiled by @tailwindcss/cli) since Tailwind's
// Vite plugin has native deps that don't bundle well. Run `pnpm dev` in packages/admin
// alongside the demo server to get CSS watch-rebuilds too.
const adminSourcePath = isDev ? resolveAdminSource() : undefined;
const useSource = adminSourcePath !== undefined;
return {
resolve: {
dedupe: ["@emdashcms/admin", "react", "react-dom"],
// Array form so more-specific entries are checked first.
// The styles.css alias must come before the package alias, otherwise
// Vite's prefix matching on "@emdashcms/admin" would resolve
// "@emdashcms/admin/styles.css" through the source directory.
alias: [
// CSS: always dist (pre-compiled by @tailwindcss/cli)
{ find: "@emdashcms/admin/styles.css", replacement: resolve(adminDistPath, "styles.css") },
// JS: source in dev (HMR), dist in build
{ find: "@emdashcms/admin", replacement: useSource ? adminSourcePath : adminDistPath },
],
},
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Monorepo has both vite 6 (docs) and vite 7 (core). tsgo resolves correctly.
plugins: [createVirtualModulesPlugin(options)] as NonNullable<AstroConfig["vite"]>["plugins"],
// Handle native modules for SSR.
// On Node: external keeps native addons out of the SSR bundle.
// On Cloudflare: skip — the adapter handles externalization, and setting
// ssr.external conflicts with @cloudflare/vite-plugin's resolve.external validation.
ssr: cloudflare
? {
noExternal: ["emdash", "@emdashcms/admin"],
// Pre-bundle EmDash's runtime deps for workerd. Without this,
// Vite discovers them one-by-one on first request, causing workerd
// to enter "worker cancelled" state on cold cache.
optimizeDeps: {
include: [
// EmDash direct deps
"emdash > @portabletext/toolkit",
"emdash > @unpic/placeholder",
"emdash > blurhash",
"emdash > croner",
"emdash > image-size",
"emdash > jose",
"emdash > jpeg-js",
"emdash > kysely",
"emdash > mime/lite",
"emdash > modern-tar",
"emdash > sanitize-html",
"emdash > ulidx",
"emdash > upng-js",
"emdash > astro-portabletext",
"emdash > sax",
// Deeper transitive deps
"emdash > sanitize-html > parse5",
"emdash > @emdashcms/gutenberg-to-portable-text > @wordpress/block-serialization-default-parser",
"emdash > @emdashcms/auth > @oslojs/crypto/ecdsa",
"emdash > @emdashcms/auth > @oslojs/crypto/sha2",
"emdash > @emdashcms/auth > @oslojs/webauthn",
// React (commonly used, may be hoisted)
"react",
"react/jsx-dev-runtime",
"react/jsx-runtime",
"react-dom",
"react-dom/server",
// Top-level deps (use astro > path for pnpm compat)
"astro > zod/v4",
"astro > zod/v4/core",
"@emdashcms/cloudflare > kysely-d1",
// Astro internal deps not covered by @astrojs/cloudflare adapter
"astro/virtual-modules/middleware.js",
"astro/virtual-modules/live-config",
"astro/content/runtime",
"astro/assets/utils/inferRemoteSize.js",
"astro/assets/fonts/runtime.js",
"@astrojs/cloudflare/image-service",
],
},
}
: {
external: NODE_NATIVE_EXTERNALS,
noExternal: ["emdash", "@emdashcms/admin"],
},
optimizeDeps: {
// When using source, don't pre-bundle JS — let Vite transform on the fly for HMR.
// When using dist, pre-bundle to avoid re-optimization on first hydration.
include: useSource
? ["@astrojs/react/client.js"]
: ["@emdashcms/admin", "@astrojs/react/client.js"],
exclude: cloudflare ? [] : NODE_NATIVE_EXTERNALS,
},
};
}

View File

@@ -0,0 +1,398 @@
/**
* EmDash middleware
*
* Thin wrapper that initializes EmDashRuntime and attaches it to locals.
* All heavy lifting happens in EmDashRuntime.
*/
import { defineMiddleware } from "astro:middleware";
import { Kysely } from "kysely";
// Import from virtual modules (populated by integration at build time)
// @ts-ignore - virtual module
import virtualConfig from "virtual:emdash/config";
// @ts-ignore - virtual module
import {
createDialect as virtualCreateDialect,
isSessionEnabled as virtualIsSessionEnabled,
getD1Binding as virtualGetD1Binding,
getDefaultConstraint as virtualGetDefaultConstraint,
getBookmarkCookieName as virtualGetBookmarkCookieName,
createSessionDialect as virtualCreateSessionDialect,
} from "virtual:emdash/dialect";
// @ts-ignore - virtual module
import { mediaProviders as virtualMediaProviders } from "virtual:emdash/media-providers";
// @ts-ignore - virtual module
import { plugins as virtualPlugins } from "virtual:emdash/plugins";
import {
createSandboxRunner as virtualCreateSandboxRunner,
sandboxEnabled as virtualSandboxEnabled,
// @ts-ignore - virtual module
} from "virtual:emdash/sandbox-runner";
// @ts-ignore - virtual module
import { sandboxedPlugins as virtualSandboxedPlugins } from "virtual:emdash/sandboxed-plugins";
// @ts-ignore - virtual module
import { createStorage as virtualCreateStorage } from "virtual:emdash/storage";
import {
EmDashRuntime,
type RuntimeDependencies,
type SandboxedPluginEntry,
type MediaProviderEntry,
} from "../emdash-runtime.js";
import { setI18nConfig } from "../i18n/config.js";
import type { Database, Storage } from "../index.js";
import type { SandboxRunner } from "../plugins/sandbox/types.js";
import type { ResolvedPlugin } from "../plugins/types.js";
import { runWithContext } from "../request-context.js";
import type { EmDashConfig } from "./integration/runtime.js";
// Cached runtime instance (persists across requests within worker)
let runtimeInstance: EmDashRuntime | null = null;
// Whether initialization is in progress (prevents concurrent init attempts)
let runtimeInitializing = false;
/** Whether i18n config has been initialized from the virtual module */
let i18nInitialized = false;
/**
* Get EmDash configuration from virtual module
*/
function getConfig(): EmDashConfig | null {
if (virtualConfig && typeof virtualConfig === "object") {
// Initialize i18n config on first access (once per worker lifetime)
if (!i18nInitialized) {
i18nInitialized = true;
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module checked as object above
const config = virtualConfig as Record<string, unknown>;
if (config.i18n && typeof config.i18n === "object") {
setI18nConfig(
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- runtime-checked above
config.i18n as {
defaultLocale: string;
locales: string[];
fallback?: Record<string, string>;
},
);
} else {
setI18nConfig(null);
}
}
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
return virtualConfig as EmDashConfig;
}
return null;
}
/**
* Get plugins from virtual module
*/
function getPlugins(): ResolvedPlugin[] {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
return (virtualPlugins as ResolvedPlugin[]) || [];
}
/**
* Build runtime dependencies from virtual modules
*/
function buildDependencies(config: EmDashConfig): RuntimeDependencies {
return {
config,
plugins: getPlugins(),
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
createDialect: virtualCreateDialect as (config: Record<string, unknown>) => unknown,
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
createStorage: virtualCreateStorage as ((config: Record<string, unknown>) => Storage) | null,
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
sandboxEnabled: virtualSandboxEnabled as boolean,
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
sandboxedPluginEntries: (virtualSandboxedPlugins as SandboxedPluginEntry[]) || [],
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
createSandboxRunner: virtualCreateSandboxRunner as
| ((opts: { db: Kysely<Database> }) => SandboxRunner)
| null,
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module import is untyped (@ts-ignore above)
mediaProviderEntries: (virtualMediaProviders as MediaProviderEntry[]) || [],
};
}
/**
* Get or create the runtime instance
*/
async function getRuntime(config: EmDashConfig): Promise<EmDashRuntime> {
// Return cached instance if available
if (runtimeInstance) {
return runtimeInstance;
}
// If another request is already initializing, wait and retry.
// We don't share the promise across requests because workerd flags
// cross-request promise resolution (causes warnings + potential hangs).
if (runtimeInitializing) {
// Poll until the initializing request finishes
await new Promise((resolve) => setTimeout(resolve, 50));
return getRuntime(config);
}
runtimeInitializing = true;
try {
const deps = buildDependencies(config);
const runtime = await EmDashRuntime.create(deps);
runtimeInstance = runtime;
return runtime;
} finally {
runtimeInitializing = false;
}
}
/**
* Baseline security headers applied to all responses.
* Admin routes get additional headers (strict CSP) from auth middleware.
*/
function setBaselineSecurityHeaders(response: Response): void {
// Prevent MIME type sniffing
response.headers.set("X-Content-Type-Options", "nosniff");
// Control referrer information
response.headers.set("Referrer-Policy", "strict-origin-when-cross-origin");
// Restrict access to sensitive browser APIs
response.headers.set(
"Permissions-Policy",
"camera=(), microphone=(), geolocation=(), payment=()",
);
// Prevent clickjacking (non-admin routes; admin CSP uses frame-ancestors)
if (!response.headers.has("Content-Security-Policy")) {
response.headers.set("X-Frame-Options", "SAMEORIGIN");
}
}
/** Public routes that require the runtime (sitemap, robots.txt, etc.) */
const PUBLIC_RUNTIME_ROUTES = new Set(["/sitemap.xml", "/robots.txt"]);
export const onRequest = defineMiddleware(async (context, next) => {
const { request, locals, cookies } = context;
const url = context.url;
// Process /_emdash routes and public routes with an active session
// (logged-in editors need the runtime for toolbar/visual editing on public pages)
const isEmDashRoute = url.pathname.startsWith("/_emdash");
const isPublicRuntimeRoute = PUBLIC_RUNTIME_ROUTES.has(url.pathname);
// Check for edit mode cookie - editors viewing public pages need the runtime
// so auth middleware can verify their session for visual editing
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
const hasPreviewToken = url.searchParams.has("_preview");
// Playground mode: the playground middleware stashes the per-session DO database
// on locals.__playgroundDb. When present, use runWithContext() to make it
// available to getDb() and the runtime's db getter via the correct ALS instance.
const playgroundDb = locals.__playgroundDb;
if (!isEmDashRoute && !isPublicRuntimeRoute && !hasEditCookie && !hasPreviewToken) {
const sessionUser = await context.session?.get("user");
if (!sessionUser && !playgroundDb) {
const response = await next();
setBaselineSecurityHeaders(response);
return response;
}
}
const config = getConfig();
if (!config) {
console.error("EmDash: No configuration found");
return next();
}
// In playground mode, wrap the entire runtime init + request handling in
// runWithContext so that getDatabase() and all init queries use the real
// DO database via the same AsyncLocalStorage instance as the loader.
const doInit = async () => {
try {
// Get or create runtime
const runtime = await getRuntime(config);
// Get manifest (cached after first call)
const manifest = await runtime.getManifest();
// Attach to locals for route handlers
locals.emdashManifest = manifest;
locals.emdash = {
// Content handlers
handleContentList: runtime.handleContentList.bind(runtime),
handleContentGet: runtime.handleContentGet.bind(runtime),
handleContentCreate: runtime.handleContentCreate.bind(runtime),
handleContentUpdate: runtime.handleContentUpdate.bind(runtime),
handleContentDelete: runtime.handleContentDelete.bind(runtime),
// Trash handlers
handleContentListTrashed: runtime.handleContentListTrashed.bind(runtime),
handleContentRestore: runtime.handleContentRestore.bind(runtime),
handleContentPermanentDelete: runtime.handleContentPermanentDelete.bind(runtime),
handleContentCountTrashed: runtime.handleContentCountTrashed.bind(runtime),
handleContentGetIncludingTrashed: runtime.handleContentGetIncludingTrashed.bind(runtime),
// Duplicate handler
handleContentDuplicate: runtime.handleContentDuplicate.bind(runtime),
// Publishing & Scheduling handlers
handleContentPublish: runtime.handleContentPublish.bind(runtime),
handleContentUnpublish: runtime.handleContentUnpublish.bind(runtime),
handleContentSchedule: runtime.handleContentSchedule.bind(runtime),
handleContentUnschedule: runtime.handleContentUnschedule.bind(runtime),
handleContentCountScheduled: runtime.handleContentCountScheduled.bind(runtime),
handleContentDiscardDraft: runtime.handleContentDiscardDraft.bind(runtime),
handleContentCompare: runtime.handleContentCompare.bind(runtime),
handleContentTranslations: runtime.handleContentTranslations.bind(runtime),
// Media handlers
handleMediaList: runtime.handleMediaList.bind(runtime),
handleMediaGet: runtime.handleMediaGet.bind(runtime),
handleMediaCreate: runtime.handleMediaCreate.bind(runtime),
handleMediaUpdate: runtime.handleMediaUpdate.bind(runtime),
handleMediaDelete: runtime.handleMediaDelete.bind(runtime),
// Revision handlers
handleRevisionList: runtime.handleRevisionList.bind(runtime),
handleRevisionGet: runtime.handleRevisionGet.bind(runtime),
handleRevisionRestore: runtime.handleRevisionRestore.bind(runtime),
// Plugin routes
handlePluginApiRoute: runtime.handlePluginApiRoute.bind(runtime),
getPluginRouteMeta: runtime.getPluginRouteMeta.bind(runtime),
// Media provider methods
getMediaProvider: runtime.getMediaProvider.bind(runtime),
getMediaProviderList: runtime.getMediaProviderList.bind(runtime),
// Direct access (for advanced use cases)
storage: runtime.storage,
db: runtime.db,
hooks: runtime.hooks,
email: runtime.email,
configuredPlugins: runtime.configuredPlugins,
// Configuration (for checking database type, auth mode, etc.)
config,
// Manifest invalidation (call after schema changes)
invalidateManifest: runtime.invalidateManifest.bind(runtime),
// Sandbox runner (for marketplace plugin install/update)
getSandboxRunner: runtime.getSandboxRunner.bind(runtime),
// Sync marketplace plugin states (after install/update/uninstall)
syncMarketplacePlugins: runtime.syncMarketplacePlugins.bind(runtime),
// Update plugin enabled/disabled status and rebuild hook pipeline
setPluginStatus: runtime.setPluginStatus.bind(runtime),
};
} catch (error) {
console.error("EmDash middleware error:", error);
}
// =========================================================================
// D1 Read Replica Session Management
//
// When D1 sessions are enabled, we create a per-request D1 session and
// Kysely instance. The session is wrapped in ALS so `runtime.db` (a getter)
// picks up the per-request instance instead of the singleton.
//
// After the response, we extract the bookmark from the session and set
// it as a cookie for authenticated users (read-your-writes consistency).
// =========================================================================
const dbConfig = config?.database?.config;
const sessionEnabled =
dbConfig &&
typeof virtualIsSessionEnabled === "function" &&
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module functions are untyped
(virtualIsSessionEnabled as (config: unknown) => boolean)(dbConfig);
if (
sessionEnabled &&
typeof virtualGetD1Binding === "function" &&
virtualCreateSessionDialect
) {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module functions are untyped
const d1Binding = (virtualGetD1Binding as (config: unknown) => unknown)(dbConfig);
if (d1Binding && typeof d1Binding === "object" && "withSession" in d1Binding) {
const isAuthenticated = !!(await context.session?.get("user"));
const isWrite = request.method !== "GET" && request.method !== "HEAD";
// Determine session constraint:
// - Config says "primary-first" → always "first-primary"
// - Authenticated writes → "first-primary" (need to hit primary)
// - Authenticated reads with bookmark → resume from bookmark
// - Otherwise → "first-unconstrained" (nearest replica)
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module functions are untyped
const configConstraint = (virtualGetDefaultConstraint as (config: unknown) => string)(
dbConfig,
);
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module functions are untyped
const cookieName = (virtualGetBookmarkCookieName as (config: unknown) => string)(dbConfig);
let constraint: string = configConstraint;
if (isAuthenticated && isWrite) {
constraint = "first-primary";
} else if (isAuthenticated) {
const bookmarkCookie = context.cookies.get(cookieName);
if (bookmarkCookie?.value) {
constraint = bookmarkCookie.value;
}
}
// Create the D1 session and per-request Kysely instance.
// D1DatabaseSession has the same prepare()/batch() interface as D1Database,
// so createSessionDialect passes it straight to D1Dialect.
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- D1 binding with Sessions API, checked via "withSession" in d1Binding above
const withSession = (d1Binding as { withSession: (c: string) => unknown }).withSession;
const session = withSession.call(d1Binding, constraint);
const sessionDialect =
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- virtual module functions are untyped
(virtualCreateSessionDialect as (db: unknown) => import("kysely").Dialect)(session);
const sessionDb = new Kysely<Database>({ dialect: sessionDialect });
// Wrap the request in ALS with the per-request db
return runWithContext({ editMode: false, db: sessionDb }, async () => {
const response = await next();
setBaselineSecurityHeaders(response);
// Set bookmark cookie for authenticated users only — they need
// read-your-writes consistency across requests. Anonymous visitors
// don't write, so they get "first-unconstrained" every time.
if (
isAuthenticated &&
session &&
typeof session === "object" &&
"getBookmark" in session
) {
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- D1DatabaseSession with getBookmark()
const getBookmark = (session as { getBookmark: () => string | null }).getBookmark;
const newBookmark = getBookmark.call(session);
if (newBookmark) {
response.headers.append(
"Set-Cookie",
`${cookieName}=${newBookmark}; Path=/; HttpOnly; SameSite=Lax; Secure`,
);
}
}
return response;
});
}
}
const response = await next();
setBaselineSecurityHeaders(response);
return response;
}; // end doInit
if (playgroundDb) {
// Read the edit-mode cookie to determine if visual editing is active.
// Default to false -- editing is opt-in via the playground toolbar toggle.
const editMode = context.cookies.get("emdash-edit-mode")?.value === "true";
return runWithContext({ editMode, db: playgroundDb }, doInit);
}
return doInit();
});
export default onRequest;

View File

@@ -0,0 +1,743 @@
/**
* Auth middleware for admin routes
*
* Checks if the user is authenticated and has appropriate permissions.
* Supports two auth modes:
* - Passkey (default): Session-based auth with passkey login
* - External providers: JWT-based auth (Cloudflare Access, etc.)
*
* This middleware runs AFTER the setup middleware - so if we get here,
* we know setup is complete and users exist.
*/
import type { User, RoleLevel } from "@emdashcms/auth";
import { createKyselyAdapter } from "@emdashcms/auth/adapters/kysely";
import { defineMiddleware } from "astro:middleware";
import { ulid } from "ulidx";
// Import auth provider via virtual module (statically bundled)
// This avoids dynamic import issues in Cloudflare Workers
import { authenticate as virtualAuthenticate } from "virtual:emdash/auth";
import { checkPublicCsrf } from "../../api/csrf.js";
import { apiError } from "../../api/error.js";
/** Cache headers for middleware error responses (matches API_CACHE_HEADERS in api/error.ts) */
const MW_CACHE_HEADERS = {
"Cache-Control": "private, no-store",
} as const;
import { resolveApiToken, resolveOAuthToken } from "../../api/handlers/api-tokens.js";
import { hasScope } from "../../auth/api-tokens.js";
import { getAuthMode, type ExternalAuthMode } from "../../auth/mode.js";
import type { ExternalAuthConfig } from "../../auth/types.js";
import type { EmDashHandlers, EmDashManifest } from "../types.js";
declare global {
namespace App {
interface Locals {
user?: User;
/** Token scopes when authenticated via API token or OAuth token. Undefined for session auth. */
tokenScopes?: string[];
emdash?: EmDashHandlers;
emdashManifest?: EmDashManifest;
}
interface SessionData {
user: { id: string };
hasSeenWelcome: boolean;
}
}
}
// Role level constants (matching @emdashcms/auth)
const ROLE_ADMIN = 50;
/**
* Strict Content-Security-Policy for /_emdash routes (admin + API).
*
* Applied via middleware header rather than Astro's built-in CSP because
* Astro's auto-hashing defeats 'unsafe-inline' (CSP3 ignores 'unsafe-inline'
* when hashes are present), which would break user-facing pages.
*/
function buildEmDashCsp(marketplaceUrl?: string): string {
const imgSources = ["'self'", "data:", "blob:"];
if (marketplaceUrl) {
try {
imgSources.push(new URL(marketplaceUrl).origin);
} catch {
// ignore invalid marketplace URL
}
}
return [
"default-src 'self'",
"script-src 'self' 'unsafe-inline'",
"style-src 'self' 'unsafe-inline'",
"connect-src 'self'",
"form-action 'self'",
"frame-ancestors 'none'",
`img-src ${imgSources.join(" ")}`,
"object-src 'none'",
"base-uri 'self'",
].join("; ");
}
/**
* API routes that skip auth — each handles its own access control.
*
* Prefix entries match any path starting with that prefix.
* Exact entries (no trailing slash or wildcard) match that path only.
*/
const PUBLIC_API_PREFIXES = [
"/_emdash/api/setup",
"/_emdash/api/auth/login",
"/_emdash/api/auth/register",
"/_emdash/api/auth/dev-bypass",
"/_emdash/api/auth/signup/",
"/_emdash/api/auth/magic-link/",
"/_emdash/api/auth/invite/accept",
"/_emdash/api/auth/invite/complete",
"/_emdash/api/auth/oauth/",
"/_emdash/api/oauth/device/token",
"/_emdash/api/oauth/device/code",
"/_emdash/api/oauth/token",
"/_emdash/api/comments/",
"/_emdash/api/media/file/",
"/_emdash/.well-known/",
];
const PUBLIC_API_EXACT = new Set([
"/_emdash/api/auth/passkey/options",
"/_emdash/api/auth/passkey/verify",
"/_emdash/api/oauth/token",
"/_emdash/api/snapshot",
]);
function isPublicEmDashRoute(pathname: string): boolean {
if (PUBLIC_API_EXACT.has(pathname)) return true;
if (PUBLIC_API_PREFIXES.some((p) => pathname.startsWith(p))) return true;
if (import.meta.env.DEV && pathname === "/_emdash/api/typegen") return true;
return false;
}
export const onRequest = defineMiddleware(async (context, next) => {
const { url } = context;
// Only check auth on admin routes and API routes
const isAdminRoute = url.pathname.startsWith("/_emdash/admin");
const isSetupRoute = url.pathname.startsWith("/_emdash/admin/setup");
const isApiRoute = url.pathname.startsWith("/_emdash/api");
const isPublicApiRoute = isPublicEmDashRoute(url.pathname);
const isPublicRoute = !isAdminRoute && !isApiRoute;
// Public API routes skip auth but still need CSRF protection on state-changing methods.
// We check Origin header against the request host (same approach as Astro's checkOrigin).
// This prevents cross-origin form submissions and fetch requests from malicious sites.
if (isPublicApiRoute) {
const method = context.request.method.toUpperCase();
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
const csrfError = checkPublicCsrf(context.request, url);
if (csrfError) return csrfError;
}
return next();
}
// Plugin routes: soft auth (resolve user if credentials present, but never block).
// The catch-all handler decides per-route whether auth is required (public vs private).
// Public plugin routes that accept POST are vulnerable to cross-origin form submissions,
// so we apply the same Origin-based CSRF check as other public routes.
const isPluginRoute = url.pathname.startsWith("/_emdash/api/plugins/");
if (isPluginRoute) {
const method = context.request.method.toUpperCase();
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
const csrfError = checkPublicCsrf(context.request, url);
if (csrfError) return csrfError;
}
return handlePluginRouteAuth(context, next);
}
// Setup routes: skip auth but still enforce CSRF on state-changing methods
if (isSetupRoute) {
const method = context.request.method.toUpperCase();
if (method !== "GET" && method !== "HEAD" && method !== "OPTIONS") {
const csrfHeader = context.request.headers.get("X-EmDash-Request");
if (csrfHeader !== "1") {
return new Response(
JSON.stringify({
error: { code: "CSRF_REJECTED", message: "Missing required header" },
}),
{
status: 403,
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
},
);
}
}
return next();
}
// For public routes: soft auth check (set locals.user if session exists, but never block)
if (isPublicRoute) {
return handlePublicRouteAuth(context, next);
}
// --- Everything below is /_emdash (admin + API) ---
// Try Bearer token auth first (API tokens and OAuth tokens).
// If successful, skip CSRF (tokens aren't ambient credentials like cookies).
const bearerResult = await handleBearerAuth(context);
if (bearerResult === "invalid") {
const headers: Record<string, string> = {
"Content-Type": "application/json",
...MW_CACHE_HEADERS,
};
// Add WWW-Authenticate header on MCP endpoint 401s to trigger OAuth discovery
if (url.pathname === "/_emdash/api/mcp") {
headers["WWW-Authenticate"] =
`Bearer resource_metadata="${url.origin}/.well-known/oauth-protected-resource"`;
}
return new Response(
JSON.stringify({ error: { code: "INVALID_TOKEN", message: "Invalid or expired token" } }),
{ status: 401, headers },
);
}
const isTokenAuth = bearerResult === "authenticated";
// CSRF protection: require X-EmDash-Request header on state-changing requests.
// Skip for token-authenticated requests (tokens aren't ambient credentials).
// Browsers block cross-origin custom headers, so this prevents CSRF without tokens.
// OAuth authorize consent is exempt: it's a standard HTML form POST that can't
// include custom headers. The consent flow is protected by session + single-use codes.
const method = context.request.method.toUpperCase();
const isOAuthConsent = url.pathname.startsWith("/_emdash/oauth/authorize");
if (
isApiRoute &&
!isTokenAuth &&
!isOAuthConsent &&
method !== "GET" &&
method !== "HEAD" &&
method !== "OPTIONS" &&
!isPublicApiRoute
) {
const csrfHeader = context.request.headers.get("X-EmDash-Request");
if (csrfHeader !== "1") {
return new Response(
JSON.stringify({ error: { code: "CSRF_REJECTED", message: "Missing required header" } }),
{
status: 403,
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
},
);
}
}
// If already authenticated via Bearer token, enforce scope then skip session/external auth
if (isTokenAuth) {
// Enforce API token scopes based on URL pattern + HTTP method
const scopeError = enforceTokenScope(url.pathname, method, context.locals.tokenScopes);
if (scopeError) return scopeError;
const response = await next();
if (!import.meta.env.DEV) {
const marketplaceUrl = context.locals.emdash?.config.marketplace;
response.headers.set("Content-Security-Policy", buildEmDashCsp(marketplaceUrl));
}
return response;
}
const response = await handleEmDashAuth(context, next);
// Set strict CSP on all /_emdash responses (prod only)
if (!import.meta.env.DEV) {
const marketplaceUrl = context.locals.emdash?.config.marketplace;
response.headers.set("Content-Security-Policy", buildEmDashCsp(marketplaceUrl));
}
return response;
});
/**
* Auth handling for /_emdash routes. Returns a Response from either
* an auth error/redirect or the downstream route handler.
*/
async function handleEmDashAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
): Promise<Response> {
const { url, locals } = context;
const { emdash } = locals;
const isLoginRoute = url.pathname.startsWith("/_emdash/admin/login");
const isApiRoute = url.pathname.startsWith("/_emdash/api");
if (!emdash?.db) {
// No database - let the admin handle this error
return next();
}
// Determine auth mode from config
const authMode = getAuthMode(emdash.config);
if (authMode.type === "external") {
// In dev mode, fall back to passkey auth since external JWT won't be present
if (import.meta.env.DEV) {
if (isLoginRoute) {
return next();
}
return handlePasskeyAuth(context, next, isApiRoute);
}
// External auth provider (Cloudflare Access, etc.)
return handleExternalAuth(context, next, authMode, isApiRoute);
}
// Passkey authentication (default)
if (isLoginRoute) {
return next();
}
return handlePasskeyAuth(context, next, isApiRoute);
}
/**
* Soft auth for plugin routes: resolve user from Bearer token or session if present,
* but never block unauthenticated requests. The catch-all handler checks route
* metadata to decide whether auth is required (public vs private routes).
*/
async function handlePluginRouteAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
): Promise<Response> {
const { locals } = context;
const { emdash } = locals;
try {
// Try Bearer token auth first (API tokens and OAuth tokens)
const bearerResult = await handleBearerAuth(context);
if (bearerResult === "authenticated") {
// User and tokenScopes are set on locals by handleBearerAuth
return next();
}
if (bearerResult === "invalid") {
// A token was presented but is invalid/expired — return 401 so the
// caller knows their token is bad (don't silently downgrade to no-auth).
return new Response(
JSON.stringify({ error: { code: "INVALID_TOKEN", message: "Invalid or expired token" } }),
{
status: 401,
headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS },
},
);
}
// "none" — no token presented, try session auth below.
} catch (error) {
console.error("Plugin route bearer auth error:", error);
}
try {
// Try session auth (sets locals.user if session exists)
const { session } = context;
const sessionUser = await session?.get("user");
if (sessionUser?.id && emdash?.db) {
const adapter = createKyselyAdapter(emdash.db);
const user = await adapter.getUserById(sessionUser.id);
if (user && !user.disabled) {
locals.user = user;
}
}
} catch (error) {
// Log but don't block — public routes should still work without session
console.error("Plugin route session auth error:", error);
}
return next();
}
/**
* Soft auth check for public routes with edit mode cookie.
* Checks the session and sets locals.user if valid, but never blocks the request.
*/
async function handlePublicRouteAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
): Promise<Response> {
const { locals, session } = context;
const { emdash } = locals;
try {
const sessionUser = await session?.get("user");
if (sessionUser?.id && emdash?.db) {
const adapter = createKyselyAdapter(emdash.db);
const user = await adapter.getUserById(sessionUser.id);
if (user && !user.disabled) {
locals.user = user;
}
}
} catch {
// Silently continue — public page should render normally
}
return next();
}
/**
* Handle external auth provider authentication (Cloudflare Access, etc.)
*/
async function handleExternalAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
authMode: ExternalAuthMode,
_isApiRoute: boolean,
): Promise<Response> {
const { locals, request } = context;
const { emdash } = locals;
try {
// Use the authenticate function from the virtual module
// (statically imported at build time to work with Cloudflare Workers)
if (typeof virtualAuthenticate !== "function") {
throw new Error(
`Auth provider ${authMode.entrypoint} does not export an authenticate function`,
);
}
// Authenticate via the provider
const authResult = await virtualAuthenticate(request, authMode.config);
// Get external auth config for auto-provision settings
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- narrowing AuthModeConfig to ExternalAuthConfig after provider check
const externalConfig = authMode.config as ExternalAuthConfig;
// Find or create user
const adapter = createKyselyAdapter(emdash!.db);
let user = await adapter.getUserByEmail(authResult.email);
if (!user) {
// User doesn't exist
if (externalConfig.autoProvision === false) {
return new Response("User not authorized", {
status: 403,
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
});
}
// Check if this is the first user (they become admin)
const userCount = await emdash!.db
.selectFrom("users")
.select(emdash!.db.fn.count("id").as("count"))
.executeTakeFirst();
const isFirstUser = Number(userCount?.count ?? 0) === 0;
const role = isFirstUser ? ROLE_ADMIN : authResult.role;
// Create user
const now = new Date().toISOString();
const newUser = {
id: ulid(),
email: authResult.email,
name: authResult.name,
role,
email_verified: 1,
created_at: now,
updated_at: now,
};
await emdash!.db.insertInto("users").values(newUser).execute();
user = await adapter.getUserByEmail(authResult.email);
console.log(
`[external-auth] Provisioned user: ${authResult.email} (role: ${role}, first: ${isFirstUser})`,
);
} else {
// User exists - check if we need to sync anything
const updates: Record<string, unknown> = {};
let newName: string | undefined;
let newRole: RoleLevel | undefined;
// Sync name from provider if provider provides one and local differs
if (authResult.name && user.name !== authResult.name) {
newName = authResult.name;
updates.name = newName;
}
// Sync role if enabled
if (externalConfig.syncRoles && user.role !== authResult.role) {
newRole = authResult.role;
updates.role = newRole;
}
if (Object.keys(updates).length > 0) {
updates.updated_at = new Date().toISOString();
await emdash!.db.updateTable("users").set(updates).where("id", "=", user.id).execute();
user = {
...user,
...(newName ? { name: newName } : {}),
...(newRole ? { role: newRole } : {}),
};
console.log(
`[external-auth] Updated user ${authResult.email}:`,
Object.keys(updates).filter((k) => k !== "updated_at"),
);
}
}
if (!user) {
// This shouldn't happen, but handle it gracefully
return new Response("Failed to provision user", {
status: 500,
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
});
}
// Check if user is disabled locally
if (user.disabled) {
return new Response("Account disabled", {
status: 403,
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
});
}
// Set user in locals
locals.user = user;
// Persist to session so public pages can identify the user
// (external auth headers are only verified on /_emdash routes)
const { session } = context;
session?.set("user", { id: user.id });
return next();
} catch (error) {
console.error("[external-auth] Auth error:", error);
return new Response("Authentication failed", {
status: 401,
headers: { "Content-Type": "text/plain", ...MW_CACHE_HEADERS },
});
}
}
/**
* Try to authenticate via Bearer token (API token or OAuth token).
*
* Returns:
* - "authenticated" if token is valid and user is resolved
* - "invalid" if a token was provided but is invalid/expired
* - "none" if no Bearer token was provided
*/
async function handleBearerAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
): Promise<"authenticated" | "invalid" | "none"> {
const authHeader = context.request.headers.get("Authorization");
if (!authHeader?.startsWith("Bearer ")) return "none";
const token = authHeader.slice(7);
if (!token) return "none";
const { locals } = context;
const { emdash } = locals;
if (!emdash?.db) return "none";
// Resolve token based on prefix
let resolved: { userId: string; scopes: string[] } | null = null;
if (token.startsWith("ec_pat_")) {
resolved = await resolveApiToken(emdash.db, token);
} else if (token.startsWith("ec_oat_")) {
resolved = await resolveOAuthToken(emdash.db, token);
} else {
// Unknown token format
return "invalid";
}
if (!resolved) return "invalid";
// Look up the user
const adapter = createKyselyAdapter(emdash.db);
const user = await adapter.getUserById(resolved.userId);
if (!user || user.disabled) return "invalid";
// Set user and scopes on locals
locals.user = user;
locals.tokenScopes = resolved.scopes;
return "authenticated";
}
/**
* Handle passkey (session-based) authentication
*/
async function handlePasskeyAuth(
context: Parameters<Parameters<typeof defineMiddleware>[0]>[0],
next: Parameters<Parameters<typeof defineMiddleware>[0]>[1],
isApiRoute: boolean,
): Promise<Response> {
const { url, locals, session } = context;
const { emdash } = locals;
try {
// Check session for user (session.get returns a Promise)
const sessionUser = await session?.get("user");
if (!sessionUser?.id) {
// Not authenticated
if (isApiRoute) {
const headers: Record<string, string> = { ...MW_CACHE_HEADERS };
// Add WWW-Authenticate on MCP endpoint 401s to trigger OAuth discovery
if (url.pathname === "/_emdash/api/mcp") {
headers["WWW-Authenticate"] =
`Bearer resource_metadata="${url.origin}/.well-known/oauth-protected-resource"`;
}
return Response.json(
{ error: { code: "NOT_AUTHENTICATED", message: "Not authenticated" } },
{ status: 401, headers },
);
}
const loginUrl = new URL("/_emdash/admin/login", url.origin);
loginUrl.searchParams.set("redirect", url.pathname);
return context.redirect(loginUrl.toString());
}
// Get full user from database
const adapter = createKyselyAdapter(emdash!.db);
const user = await adapter.getUserById(sessionUser.id);
if (!user) {
// User no longer exists - clear session
session?.destroy();
if (isApiRoute) {
return Response.json(
{ error: { code: "NOT_FOUND", message: "User not found" } },
{ status: 401, headers: MW_CACHE_HEADERS },
);
}
return context.redirect("/_emdash/admin/login");
}
// Check if user is disabled
if (user.disabled) {
session?.destroy();
if (isApiRoute) {
return apiError("ACCOUNT_DISABLED", "Account disabled", 403);
}
const loginUrl = new URL("/_emdash/admin/login", url.origin);
loginUrl.searchParams.set("error", "account_disabled");
return context.redirect(loginUrl.toString());
}
// Set user in locals for use by routes
locals.user = user;
} catch (error) {
console.error("Auth middleware error:", error);
// On error, redirect to login
return context.redirect("/_emdash/admin/login");
}
return next();
}
// =============================================================================
// Token scope enforcement
// =============================================================================
/**
* Scope rules: ordered list of (pathPrefix, method, requiredScope) tuples.
* First matching rule wins. Methods: "*" = any, "WRITE" = POST/PUT/PATCH/DELETE.
*
* Routes not matched by any rule default to "admin" scope (fail-closed).
*/
const SCOPE_RULES: Array<[prefix: string, method: string, scope: string]> = [
// Content routes
["/_emdash/api/content", "GET", "content:read"],
["/_emdash/api/content", "WRITE", "content:write"],
// Media routes (excluding /file/ which is public)
["/_emdash/api/media/file", "*", "media:read"], // public anyway, but scope if token-authed
["/_emdash/api/media", "GET", "media:read"],
["/_emdash/api/media", "WRITE", "media:write"],
// Schema routes
["/_emdash/api/schema", "GET", "schema:read"],
["/_emdash/api/schema", "WRITE", "schema:write"],
// Taxonomy, menu, section, widget, revision — all content domain
["/_emdash/api/taxonomies", "GET", "content:read"],
["/_emdash/api/taxonomies", "WRITE", "content:write"],
["/_emdash/api/menus", "GET", "content:read"],
["/_emdash/api/menus", "WRITE", "content:write"],
["/_emdash/api/sections", "GET", "content:read"],
["/_emdash/api/sections", "WRITE", "content:write"],
["/_emdash/api/widget-areas", "GET", "content:read"],
["/_emdash/api/widget-areas", "WRITE", "content:write"],
["/_emdash/api/revisions", "GET", "content:read"],
["/_emdash/api/revisions", "WRITE", "content:write"],
// Search
["/_emdash/api/search", "GET", "content:read"],
["/_emdash/api/search", "WRITE", "admin"],
// Import, admin, settings, plugins — all require admin scope
["/_emdash/api/import", "*", "admin"],
["/_emdash/api/admin", "*", "admin"],
["/_emdash/api/settings", "*", "admin"],
["/_emdash/api/plugins", "*", "admin"],
// MCP endpoint — scopes enforced per-tool inside mcp/server.ts
["/_emdash/api/mcp", "*", "content:read"],
];
const WRITE_METHODS = new Set(["POST", "PUT", "PATCH", "DELETE"]);
/**
* Enforce API token scopes based on the request URL and HTTP method.
* Returns a 403 Response if the scope is insufficient, or null if allowed.
*
* Session-authenticated requests (tokenScopes === undefined) are never checked.
*/
function enforceTokenScope(
pathname: string,
method: string,
tokenScopes: string[] | undefined,
): Response | null {
// Session auth — implicit full access, no scope restrictions
if (!tokenScopes) return null;
const isWrite = WRITE_METHODS.has(method);
for (const [prefix, ruleMethod, scope] of SCOPE_RULES) {
// Match exact prefix or prefix followed by /
if (pathname !== prefix && !pathname.startsWith(prefix + "/")) continue;
// Check method match
if (ruleMethod === "*" || (ruleMethod === "WRITE" && isWrite) || ruleMethod === method) {
if (hasScope(tokenScopes, scope)) return null;
return new Response(
JSON.stringify({
error: {
code: "INSUFFICIENT_SCOPE",
message: `Token lacks required scope: ${scope}`,
},
}),
{ status: 403, headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS } },
);
}
}
// No rule matched — default to admin scope (fail-closed)
if (hasScope(tokenScopes, "admin")) return null;
return new Response(
JSON.stringify({
error: {
code: "INSUFFICIENT_SCOPE",
message: "Token lacks required scope: admin",
},
}),
{ status: 403, headers: { "Content-Type": "application/json", ...MW_CACHE_HEADERS } },
);
}

View File

@@ -0,0 +1,89 @@
/**
* Redirect middleware
*
* Intercepts incoming requests and checks for matching redirect rules.
* Runs after runtime init (needs db) but before setup/auth (should handle
* ALL routes, including public ones, and should be fast).
*
* Skip paths:
* - /_emdash/* (admin UI, API routes, auth endpoints)
* - /_image (Astro image optimization)
* - Static assets (files with extensions)
*
* 404 logging happens post-response: if next() returns 404 and the path
* wasn't already matched by a redirect, log it.
*/
import { defineMiddleware } from "astro:middleware";
import { RedirectRepository } from "../../database/repositories/redirect.js";
/** Paths that should never be intercepted by redirects */
const SKIP_PREFIXES = ["/_emdash", "/_image"];
/** Static asset extensions -- don't redirect file requests */
const ASSET_EXTENSION = /\.\w{1,10}$/;
type RedirectCode = 301 | 302 | 303 | 307 | 308;
function isRedirectCode(code: number): code is RedirectCode {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
}
export const onRequest = defineMiddleware(async (context, next) => {
const { pathname } = context.url;
// Skip internal paths and static assets
if (SKIP_PREFIXES.some((prefix) => pathname.startsWith(prefix))) {
return next();
}
if (ASSET_EXTENSION.test(pathname)) {
return next();
}
const { emdash } = context.locals;
if (!emdash?.db) {
return next();
}
try {
const repo = new RedirectRepository(emdash.db);
const match = await repo.matchPath(pathname);
if (match) {
// Reject protocol-relative URLs (e.g. //evil.com or /\evil.com) from interpolation.
// Browsers normalize backslashes to forward slashes, so /\ is equivalent to //.
if (
match.resolvedDestination.startsWith("//") ||
match.resolvedDestination.startsWith("/\\")
) {
return next();
}
// Fire-and-forget hit recording (don't block the redirect)
repo.recordHit(match.redirect.id).catch(() => {});
const code = isRedirectCode(match.redirect.type) ? match.redirect.type : 301;
return context.redirect(match.resolvedDestination, code);
}
// No redirect matched -- proceed and check for 404
const response = await next();
// Log 404s for unmatched paths (fire-and-forget)
if (response.status === 404) {
const referrer = context.request.headers.get("referer") ?? null;
const userAgent = context.request.headers.get("user-agent") ?? null;
repo
.log404({
path: pathname,
referrer,
userAgent,
})
.catch(() => {});
}
return response;
} catch {
// If the redirects table doesn't exist yet (pre-migration), skip silently
return next();
}
});

View File

@@ -0,0 +1,129 @@
/**
* EmDash Request Context Middleware
*
* Sets up AsyncLocalStorage-based request context for query functions.
* Skips ALS entirely for logged-out users with no CMS signals (fast path).
*
* Handles:
* - Preview tokens: _preview query param with signed HMAC token
* - Edit mode: emdash-edit-mode cookie (for visual editing)
* - Toolbar injection: floating pill for authenticated editors
*/
import { defineMiddleware } from "astro:middleware";
import { verifyPreviewToken, parseContentId } from "../../preview/tokens.js";
import { runWithContext } from "../../request-context.js";
import { renderToolbar } from "../../visual-editing/toolbar.js";
/**
* Inject toolbar HTML into a response if it's an HTML page.
* Returns the original response if not HTML.
*/
async function injectToolbar(response: Response, toolbarHtml: string): Promise<Response> {
const contentType = response.headers.get("content-type");
if (!contentType?.includes("text/html")) return response;
const html = await response.text();
if (!html.includes("</body>")) return new Response(html, response);
const injected = html.replace("</body>", `${toolbarHtml}</body>`);
return new Response(injected, {
status: response.status,
headers: response.headers,
});
}
export const onRequest = defineMiddleware(async (context, next) => {
const { cookies, url } = context;
// Skip /_emdash routes (admin has its own UI, no rendering context needed)
if (url.pathname.startsWith("/_emdash")) {
return next();
}
// Check for authenticated editor (role >= 30)
const { user } = context.locals;
const isEditor = !!user && user.role >= 30;
// Playground mode: the playground middleware (from @emdashcms/cloudflare) stashes
// the per-session DO database on locals.__playgroundDb. We set it via ALS here
// (same module instance as the loader) so getDb() picks it up correctly.
const playgroundDb = context.locals.__playgroundDb;
if (playgroundDb) {
// Check if playground user has toggled edit mode on
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
return runWithContext({ editMode: hasEditCookie, db: playgroundDb }, () => next());
}
// Fast path: check for CMS signals before doing any work
const hasEditCookie = cookies.get("emdash-edit-mode")?.value === "true";
const hasPreviewToken = url.searchParams.has("_preview");
// No CMS signals and not an editor → skip everything (zero overhead)
if (!hasEditCookie && !hasPreviewToken && !isEditor) {
return next();
}
// Determine edit mode: cookie AND authenticated editor
const editMode = hasEditCookie && isEditor;
// Read locale from Astro's i18n routing
// eslint-disable-next-line typescript-eslint(no-unsafe-type-assertion) -- Astro context includes currentLocale when i18n is configured
const locale = (context as { currentLocale?: string }).currentLocale;
// Verify preview token if present
let preview: { collection: string; id: string } | undefined;
if (hasPreviewToken) {
const secret = import.meta.env.EMDASH_PREVIEW_SECRET || import.meta.env.PREVIEW_SECRET || "";
if (secret) {
const result = await verifyPreviewToken({ url, secret });
if (result.valid) {
const { collection, id } = parseContentId(result.payload.cid);
preview = { collection, id };
}
}
}
// If we have CMS signals, wrap in ALS context
const needsContext = hasEditCookie || hasPreviewToken;
if (needsContext) {
return runWithContext({ editMode, preview, locale }, async () => {
let response = await next();
// Preview responses must not be cached -- draft content could leak past token expiry.
// Clone the response before modifying headers — the original may be immutable.
if (preview) {
response = new Response(response.body, response);
response.headers.set("Cache-Control", "private, no-store");
}
// Inject toolbar for authenticated editors
if (isEditor) {
const toolbarHtml = renderToolbar({
editMode,
isPreview: !!preview,
});
return injectToolbar(response, toolbarHtml);
}
return response;
});
}
// Editor without CMS signals — no ALS needed, but inject toolbar
if (isEditor) {
const response = await next();
const toolbarHtml = renderToolbar({
editMode: false,
isPreview: false,
});
return injectToolbar(response, toolbarHtml);
}
return next();
});
export default onRequest;

View File

@@ -0,0 +1,89 @@
/**
* Setup detection middleware
*
* Redirects to setup wizard if the site hasn't been set up yet.
* Checks both "emdash:setup_complete" option AND user existence.
*
* Detection logic (in order):
* 1. Does options table exist? No → setup needed
* 2. Is setup_complete true? No → setup needed
* 3. In passkey mode: Are there any users? No → setup needed
* In Access mode: Skip user check (first user created on first login)
* 4. Proceed to admin
*/
import { defineMiddleware } from "astro:middleware";
import { getAuthMode } from "../../auth/mode.js";
export const onRequest = defineMiddleware(async (context, next) => {
// Only check setup on admin routes (but not the setup page itself)
const isAdminRoute = context.url.pathname.startsWith("/_emdash/admin");
const isSetupRoute = context.url.pathname.startsWith("/_emdash/admin/setup");
if (isAdminRoute && !isSetupRoute) {
// Check if setup is complete
const { emdash } = context.locals;
if (!emdash?.db) {
// No database configured - let the admin handle this error
return next();
}
try {
// Check setup_complete flag
const setupComplete = await emdash.db
.selectFrom("options")
.select("value")
.where("name", "=", "emdash:setup_complete")
.executeTakeFirst();
// Value is JSON-encoded, parse it. Accepts both boolean true and string "true"
const isComplete =
setupComplete &&
(() => {
try {
const parsed = JSON.parse(setupComplete.value);
return parsed === true || parsed === "true";
} catch {
return false;
}
})();
if (!isComplete) {
// Redirect to setup wizard
return context.redirect("/_emdash/admin/setup");
}
// Check auth mode - user verification differs by mode
const authMode = getAuthMode(emdash.config);
// In passkey mode, verify users exist
// In Access mode, skip this check - first user is created on first Access login
if (authMode.type === "passkey") {
// Setup is marked complete, but verify users exist
// This catches edge case where setup_complete is true but no users
const userCount = await emdash.db
.selectFrom("users")
.select((eb) => eb.fn.countAll<number>().as("count"))
.executeTakeFirstOrThrow();
if (userCount.count === 0) {
// No users - need to complete admin creation
return context.redirect("/_emdash/admin/setup");
}
}
} catch (error) {
// If the options table doesn't exist yet, redirect to setup
// This handles fresh installations where migrations haven't run
if (error instanceof Error && error.message.includes("no such table")) {
return context.redirect("/_emdash/admin/setup");
}
// Other errors - let the admin handle them
console.error("Setup middleware error:", error);
}
}
return next();
});

View File

@@ -0,0 +1,15 @@
/**
* Admin Wrapper
*
* Imports plugin admin modules from the virtual module and passes them
* to AdminApp via props. This ensures plugin components are bundled
* together with the admin app and available via React context.
*/
import { AdminApp } from "@emdashcms/admin";
// @ts-ignore - virtual module generated by integration
import { pluginAdmins } from "virtual:emdash/admin-registry";
export default function AdminWrapper() {
return <AdminApp pluginAdmins={pluginAdmins} />;
}

View File

@@ -0,0 +1,81 @@
---
/**
* Admin shell route - injected by EmDash integration
*
* This page serves the EmDash admin React SPA.
* AdminWrapper imports plugin admin modules and passes them to AdminApp.
*/
import "@emdashcms/admin/styles.css";
// Use package-qualified import so Astro generates a proper module URL
// (relative imports resolve to absolute paths which break client hydration)
import AdminWrapper from "emdash/routes/PluginRegistry";
export const prerender = false;
---
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link
rel="icon"
href="data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'%3E%3Ctext y='.9em' font-size='90'%3E%F0%9F%92%AB%3C/text%3E%3C/svg%3E"
/>
<title>EmDash Admin</title>
</head>
<body>
<div id="admin-root" class="min-h-screen">
<div id="emdash-boot-loader">
<style>
#emdash-boot-loader {
display: flex;
align-items: center;
justify-content: center;
min-height: 100vh;
color-scheme: light dark;
background: light-dark(hsl(0 0% 100%), hsl(222.2 84% 4.9%));
}
#emdash-boot-loader .loader-inner {
text-align: center;
}
#emdash-boot-loader .spinner {
width: 24px;
height: 24px;
margin: 0 auto;
border: 2.5px solid
light-dark(
hsl(215.4 16.3% 46.9% / 0.3),
hsl(215 20.2% 65.1% / 0.3)
);
border-top-color: light-dark(
hsl(215.4 16.3% 46.9%),
hsl(215 20.2% 65.1%)
);
border-radius: 50%;
animation: emdash-spin 0.8s linear infinite;
}
#emdash-boot-loader p {
margin-top: 1rem;
font-family:
system-ui,
-apple-system,
sans-serif;
font-size: 0.875rem;
color: light-dark(hsl(215.4 16.3% 46.9%), hsl(215 20.2% 65.1%));
}
@keyframes emdash-spin {
to {
transform: rotate(360deg);
}
}
</style>
<div class="loader-inner">
<div class="spinner"></div>
<p>Loading EmDash...</p>
</div>
</div>
<AdminWrapper client:only="react" />
</div>
</body>
</html>

View File

@@ -0,0 +1,112 @@
/**
* PATCH/DELETE /_emdash/api/admin/allowed-domains/[domain]
*
* Admin endpoints for managing a specific allowed domain.
* PATCH - Update domain settings (enabled, defaultRole)
* DELETE - Remove an allowed domain
*/
import type { APIRoute } from "astro";
export const prerender = false;
import { Role, roleFromLevel } from "@emdashcms/auth";
import { createKyselyAdapter } from "@emdashcms/auth/adapters/kysely";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { isParseError, parseBody } from "#api/parse.js";
import { allowedDomainUpdateBody } from "#api/schemas.js";
/**
* PATCH - Update domain settings
*/
export const PATCH: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
const { domain } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!domain) {
return apiError("VALIDATION_ERROR", "Domain is required", 400);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
try {
const body = await parseBody(request, allowedDomainUpdateBody);
if (isParseError(body)) return body;
// Check if domain exists
const existing = await adapter.getAllowedDomain(domain);
if (!existing) {
return apiError("NOT_FOUND", "Domain not found", 404);
}
// Role is already validated as RoleLevel by Zod schema
const defaultRole = body.defaultRole;
// Update domain
const enabled = body.enabled ?? existing.enabled;
await adapter.updateAllowedDomain(domain, enabled, defaultRole);
// Fetch updated domain
const updated = await adapter.getAllowedDomain(domain);
return apiSuccess({
success: true,
domain: updated
? {
domain: updated.domain,
defaultRole: updated.defaultRole,
roleName: roleFromLevel(updated.defaultRole),
enabled: updated.enabled,
createdAt: updated.createdAt.toISOString(),
}
: null,
});
} catch (error) {
return handleError(error, "Failed to update allowed domain", "DOMAIN_UPDATE_ERROR");
}
};
/**
* DELETE - Remove an allowed domain
*/
export const DELETE: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { domain } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!domain) {
return apiError("VALIDATION_ERROR", "Domain is required", 400);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
try {
// Check if domain exists (optional - delete is idempotent)
const existing = await adapter.getAllowedDomain(domain);
if (!existing) {
return apiError("NOT_FOUND", "Domain not found", 404);
}
await adapter.deleteAllowedDomain(domain);
return apiSuccess({ success: true });
} catch (error) {
return handleError(error, "Failed to delete allowed domain", "DOMAIN_DELETE_ERROR");
}
};

View File

@@ -0,0 +1,108 @@
/**
* GET/POST /_emdash/api/admin/allowed-domains
*
* Admin endpoints for managing allowed signup domains.
* GET - List all allowed domains
* POST - Add a new allowed domain
*/
import type { APIRoute } from "astro";
export const prerender = false;
import { Role, roleFromLevel } from "@emdashcms/auth";
import { createKyselyAdapter } from "@emdashcms/auth/adapters/kysely";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { isParseError, parseBody } from "#api/parse.js";
import { allowedDomainCreateBody } from "#api/schemas.js";
const DOMAIN_REGEX = /^[a-zA-Z0-9][a-zA-Z0-9-]*(\.[a-zA-Z0-9-]+)+$/;
/**
* GET - List all allowed domains
*/
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
try {
const domains = await adapter.getAllowedDomains();
return apiSuccess({
domains: domains.map((d) => ({
domain: d.domain,
defaultRole: d.defaultRole,
roleName: roleFromLevel(d.defaultRole),
enabled: d.enabled,
createdAt: d.createdAt.toISOString(),
})),
});
} catch (error) {
return handleError(error, "Failed to list allowed domains", "DOMAIN_LIST_ERROR");
}
};
/**
* POST - Add a new allowed domain
*/
export const POST: APIRoute = async ({ request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
try {
const body = await parseBody(request, allowedDomainCreateBody);
if (isParseError(body)) return body;
// Role is already validated as RoleLevel by Zod schema
const defaultRole = body.defaultRole;
// Validate domain format (no protocol, just domain)
const cleanDomain = body.domain.toLowerCase().trim();
if (!DOMAIN_REGEX.test(cleanDomain)) {
return apiError("VALIDATION_ERROR", "Invalid domain format", 400);
}
// Check if domain already exists
const existing = await adapter.getAllowedDomain(cleanDomain);
if (existing) {
return apiError("CONFLICT", "Domain already exists", 409);
}
const domain = await adapter.createAllowedDomain(cleanDomain, defaultRole);
return apiSuccess(
{
success: true,
domain: {
domain: domain.domain,
defaultRole: domain.defaultRole,
roleName: roleFromLevel(domain.defaultRole),
enabled: domain.enabled,
createdAt: domain.createdAt.toISOString(),
},
},
201,
);
} catch (error) {
return handleError(error, "Failed to create allowed domain", "DOMAIN_CREATE_ERROR");
}
};

View File

@@ -0,0 +1,40 @@
/**
* Single API token endpoint
*
* DELETE /_emdash/api/admin/api-tokens/:id — Revoke a token
*/
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { apiError, handleError, unwrapResult } from "#api/error.js";
import { handleApiTokenRevoke } from "#api/handlers/api-tokens.js";
export const prerender = false;
/**
* Revoke (delete) an API token.
*/
export const DELETE: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const tokenId = params.id;
if (!tokenId) {
return apiError("VALIDATION_ERROR", "Token ID is required", 400);
}
try {
const result = await handleApiTokenRevoke(emdash.db, tokenId, user.id);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to revoke API token", "TOKEN_REVOKE_ERROR");
}
};

View File

@@ -0,0 +1,68 @@
/**
* API token management endpoints
*
* GET /_emdash/api/admin/api-tokens — List tokens for current user
* POST /_emdash/api/admin/api-tokens — Create a new token
*/
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { z } from "zod";
import { apiError, handleError, unwrapResult } from "#api/error.js";
import { handleApiTokenCreate, handleApiTokenList } from "#api/handlers/api-tokens.js";
import { isParseError, parseBody } from "#api/parse.js";
import { VALID_SCOPES } from "#auth/api-tokens.js";
export const prerender = false;
const createTokenSchema = z.object({
name: z.string().min(1).max(100),
scopes: z.array(z.enum(VALID_SCOPES)).min(1),
expiresAt: z.string().datetime().optional(),
});
/**
* List API tokens for the current user.
* Admins can list all tokens (future: add ?userId= filter).
*/
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const result = await handleApiTokenList(emdash.db, user.id);
return unwrapResult(result);
};
/**
* Create a new API token.
* Returns the raw token once — it cannot be retrieved again.
*/
export const POST: APIRoute = async ({ request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
try {
const body = await parseBody(request, createTokenSchema);
if (isParseError(body)) return body;
const result = await handleApiTokenCreate(emdash.db, user.id, body);
return unwrapResult(result, 201);
} catch (error) {
return handleError(error, "Failed to create API token", "TOKEN_CREATE_ERROR");
}
};

View File

@@ -0,0 +1,87 @@
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { isParseError, parseBody } from "#api/parse.js";
import { bylineUpdateBody } from "#api/schemas.js";
import { BylineRepository } from "#db/repositories/byline.js";
export const prerender = false;
function requireEditor(user: { role: number } | undefined): Response | null {
if (!user || user.role < Role.EDITOR) {
return apiError("FORBIDDEN", "Editor privileges required", 403);
}
return null;
}
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
// Read access uses content:read so all authenticated roles can view byline data
const denied = requirePerm(user, "content:read");
if (denied) return denied;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
try {
const repo = new BylineRepository(emdash.db);
const byline = await repo.findById(params.id!);
if (!byline) return apiError("NOT_FOUND", "Byline not found", 404);
return apiSuccess(byline);
} catch (error) {
return handleError(error, "Failed to get byline", "BYLINE_GET_ERROR");
}
};
export const PUT: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
const denied = requireEditor(user);
if (denied) return denied;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const body = await parseBody(request, bylineUpdateBody);
if (isParseError(body)) return body;
try {
const repo = new BylineRepository(emdash.db);
const byline = await repo.update(params.id!, {
slug: body.slug,
displayName: body.displayName,
bio: body.bio ?? null,
avatarMediaId: body.avatarMediaId ?? null,
websiteUrl: body.websiteUrl ?? null,
userId: body.userId ?? null,
isGuest: body.isGuest,
});
if (!byline) return apiError("NOT_FOUND", "Byline not found", 404);
return apiSuccess(byline);
} catch (error) {
return handleError(error, "Failed to update byline", "BYLINE_UPDATE_ERROR");
}
};
export const DELETE: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const denied = requireEditor(user);
if (denied) return denied;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
try {
const repo = new BylineRepository(emdash.db);
const deleted = await repo.delete(params.id!);
if (!deleted) return apiError("NOT_FOUND", "Byline not found", 404);
return apiSuccess({ deleted: true });
} catch (error) {
return handleError(error, "Failed to delete byline", "BYLINE_DELETE_ERROR");
}
};

View File

@@ -0,0 +1,72 @@
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { isParseError, parseBody, parseQuery } from "#api/parse.js";
import { bylineCreateBody, bylinesListQuery } from "#api/schemas.js";
import { BylineRepository } from "#db/repositories/byline.js";
export const prerender = false;
export const GET: APIRoute = async ({ url, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
// Read access uses content:read so all authenticated roles can view byline data
const denied = requirePerm(user, "content:read");
if (denied) return denied;
const query = parseQuery(url, bylinesListQuery);
if (isParseError(query)) return query;
try {
const repo = new BylineRepository(emdash.db);
const result = await repo.findMany({
search: query.search,
isGuest: query.isGuest,
userId: query.userId,
cursor: query.cursor,
limit: query.limit,
});
return apiSuccess(result);
} catch (error) {
return handleError(error, "Failed to list bylines", "BYLINE_LIST_ERROR");
}
};
export const POST: APIRoute = async ({ request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.EDITOR) {
return apiError("FORBIDDEN", "Editor privileges required", 403);
}
const body = await parseBody(request, bylineCreateBody);
if (isParseError(body)) return body;
try {
const repo = new BylineRepository(emdash.db);
const byline = await repo.create({
slug: body.slug,
displayName: body.displayName,
bio: body.bio ?? null,
avatarMediaId: body.avatarMediaId ?? null,
websiteUrl: body.websiteUrl ?? null,
userId: body.userId ?? null,
isGuest: body.isGuest,
});
return apiSuccess(byline, 201);
} catch (error) {
return handleError(error, "Failed to create byline", "BYLINE_CREATE_ERROR");
}
};

View File

@@ -0,0 +1,64 @@
/**
* Single comment admin endpoints
*
* GET /_emdash/api/admin/comments/:id - Get comment detail
* DELETE /_emdash/api/admin/comments/:id - Hard delete (ADMIN only)
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, handleError, requireDb, unwrapResult } from "#api/error.js";
import { handleCommentGet, handleCommentDelete } from "#api/handlers/comments.js";
export const prerender = false;
/**
* Get single comment detail (includes moderation_metadata)
*/
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!id) {
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
}
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
const denied = requirePerm(user, "comments:moderate");
if (denied) return denied;
try {
const result = await handleCommentGet(emdash.db, id);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to get comment", "COMMENT_GET_ERROR");
}
};
/**
* Hard delete a comment (ADMIN only)
*/
export const DELETE: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!id) {
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
}
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
const denied = requirePerm(user, "comments:delete");
if (denied) return denied;
try {
const result = await handleCommentDelete(emdash.db, id);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to delete comment", "COMMENT_DELETE_ERROR");
}
};

View File

@@ -0,0 +1,120 @@
/**
* Comment status change
*
* PUT /_emdash/api/admin/comments/:id/status - Change comment status
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, apiSuccess, handleError, requireDb, unwrapResult } from "#api/error.js";
import { handleCommentGet } from "#api/handlers/comments.js";
import { isParseError, parseBody } from "#api/parse.js";
import { commentStatusBody } from "#api/schemas.js";
import { getSiteBaseUrl } from "#api/site-url.js";
import { lookupContentAuthor, sendCommentNotification } from "#comments/notifications.js";
import { moderateComment, type CommentHookRunner } from "#comments/service.js";
import type { CommentStatus } from "#db/repositories/comment.js";
import type { ModerationDecision } from "#plugins/types.js";
export const prerender = false;
export const PUT: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!id) {
return apiError("VALIDATION_ERROR", "Comment ID required", 400);
}
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
const denied = requirePerm(user, "comments:moderate");
if (denied) return denied;
try {
const body = await parseBody(request, commentStatusBody);
if (isParseError(body)) return body;
const newStatus = body.status as CommentStatus;
// Build hook runner for the service
const hookRunner: CommentHookRunner = {
async runBeforeCreate(event) {
return emdash.hooks.runCommentBeforeCreate(event);
},
async runModerate(event) {
const result = await emdash.hooks.invokeExclusiveHook("comment:moderate", event);
if (!result) return { status: "pending" as const, reason: "No moderator configured" };
if (result.error) return { status: "pending" as const, reason: "Moderation error" };
return result.result as ModerationDecision;
},
fireAfterCreate(event) {
emdash.hooks
.runCommentAfterCreate(event)
.catch((err) =>
console.error(
"[comments] afterCreate error:",
err instanceof Error ? err.message : err,
),
);
},
fireAfterModerate(event) {
emdash.hooks
.runCommentAfterModerate(event)
.catch((err) =>
console.error(
"[comments] afterModerate error:",
err instanceof Error ? err.message : err,
),
);
},
};
// Read the comment before updating so we know the previous status
const existing = await handleCommentGet(emdash.db, id);
if (!existing.success) {
return unwrapResult(existing);
}
const previousStatus = existing.data.status;
const updated = await moderateComment(
emdash.db,
id,
newStatus,
{ id: user!.id, name: user!.name ?? null },
hookRunner,
);
if (!updated) {
return apiError("NOT_FOUND", "Comment not found", 404);
}
// Send notification when a comment is newly approved
if (newStatus === "approved" && previousStatus !== "approved" && emdash.email) {
try {
const adminBaseUrl = await getSiteBaseUrl(emdash.db, request);
const content = await lookupContentAuthor(
emdash.db,
updated.collection,
updated.contentId,
);
if (content?.author) {
await sendCommentNotification({
email: emdash.email,
comment: updated,
contentAuthor: content.author,
adminBaseUrl,
});
}
} catch (err) {
console.error("[comments] notification error:", err instanceof Error ? err.message : err);
}
}
return apiSuccess(updated);
} catch (error) {
return handleError(error, "Failed to update comment status", "COMMENT_STATUS_ERROR");
}
};

View File

@@ -0,0 +1,42 @@
/**
* Bulk comment operations
*
* POST /_emdash/api/admin/comments/bulk - Bulk status change or delete
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { handleError, requireDb, unwrapResult } from "#api/error.js";
import { handleCommentBulk } from "#api/handlers/comments.js";
import { isParseError, parseBody } from "#api/parse.js";
import { commentBulkBody } from "#api/schemas.js";
export const prerender = false;
export const POST: APIRoute = async ({ request, locals }) => {
const { emdash, user } = locals;
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
try {
const body = await parseBody(request, commentBulkBody);
if (isParseError(body)) return body;
// Bulk delete requires ADMIN, bulk status change requires EDITOR
if (body.action === "delete") {
const denied = requirePerm(user, "comments:delete");
if (denied) return denied;
} else {
const denied = requirePerm(user, "comments:moderate");
if (denied) return denied;
}
const result = await handleCommentBulk(emdash.db, body.ids, body.action);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to perform bulk operation", "COMMENT_BULK_ERROR");
}
};

View File

@@ -0,0 +1,30 @@
/**
* Comment status counts for inbox badges
*
* GET /_emdash/api/admin/comments/counts
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { handleError, requireDb, unwrapResult } from "#api/error.js";
import { handleCommentCounts } from "#api/handlers/comments.js";
export const prerender = false;
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
const denied = requirePerm(user, "comments:moderate");
if (denied) return denied;
try {
const result = await handleCommentCounts(emdash.db);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to get comment counts", "COMMENT_COUNTS_ERROR");
}
};

View File

@@ -0,0 +1,46 @@
/**
* Admin comment inbox
*
* GET /_emdash/api/admin/comments - List comments (filterable by status, collection, search)
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { handleError, requireDb, unwrapResult } from "#api/error.js";
import { handleCommentInbox } from "#api/handlers/comments.js";
import { isParseError, parseQuery } from "#api/parse.js";
import { commentListQuery } from "#api/schemas.js";
import type { CommentStatus } from "#db/repositories/comment.js";
export const prerender = false;
/**
* List comments for moderation inbox
*/
export const GET: APIRoute = async ({ url, locals }) => {
const { emdash, user } = locals;
const dbErr = requireDb(emdash?.db);
if (dbErr) return dbErr;
const denied = requirePerm(user, "comments:moderate");
if (denied) return denied;
try {
const query = parseQuery(url, commentListQuery);
if (isParseError(query)) return query;
const result = await handleCommentInbox(emdash.db, {
status: query.status as CommentStatus | undefined,
collection: query.collection,
search: query.search,
limit: query.limit,
cursor: query.cursor,
});
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to list comments", "COMMENT_INBOX_ERROR");
}
};

View File

@@ -0,0 +1,91 @@
/**
* Exclusive hook selection endpoint
*
* PUT /_emdash/api/admin/hooks/exclusive/:hookName
*
* Sets or clears the selected provider for an exclusive hook.
* Body: { pluginId: string | null }
* Requires settings:manage permission.
*/
import type { APIRoute } from "astro";
import { z } from "zod";
import { requirePerm } from "#api/authorize.js";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { isParseError, parseBody } from "#api/parse.js";
import { OptionsRepository } from "#db/repositories/options.js";
export const prerender = false;
/** Hook name format: namespace:action (e.g., "content:beforeSave") */
const HOOK_NAME_RE = /^[a-z]+:[a-zA-Z]+$/;
const setSelectionSchema = z.object({
pluginId: z.string().min(1).nullable(),
});
export const PUT: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "settings:manage");
if (denied) return denied;
const hookName = params.hookName;
if (!hookName) {
return apiError("VALIDATION_ERROR", "Hook name is required", 400);
}
// Validate hook name format: must be namespace:action (e.g., "content:beforeSave")
if (!HOOK_NAME_RE.test(hookName)) {
return apiError("VALIDATION_ERROR", "Invalid hook name format", 400);
}
try {
const pipeline = emdash.hooks;
// Verify this is actually an exclusive hook
if (!pipeline.isExclusiveHook(hookName)) {
return apiError("NOT_FOUND", `Hook '${hookName}' is not a registered exclusive hook`, 404);
}
const body = await parseBody(request, setSelectionSchema);
if (isParseError(body)) return body;
const optionsRepo = new OptionsRepository(emdash.db);
const optionKey = `emdash:exclusive_hook:${hookName}`;
if (body.pluginId === null) {
// Clear the selection
await optionsRepo.delete(optionKey);
pipeline.clearExclusiveSelection(hookName);
} else {
// Validate that the pluginId is an actual provider for this hook
const providers = pipeline.getExclusiveHookProviders(hookName);
const isValidProvider = providers.some(
(p: { pluginId: string }) => p.pluginId === body.pluginId,
);
if (!isValidProvider) {
return apiError(
"VALIDATION_ERROR",
`Plugin '${body.pluginId}' is not a provider for hook '${hookName}'`,
400,
);
}
await optionsRepo.set(optionKey, body.pluginId);
pipeline.setExclusiveSelection(hookName, body.pluginId);
}
return apiSuccess({
hookName,
selectedPluginId: body.pluginId,
});
} catch (error) {
return handleError(error, "Failed to set exclusive hook selection", "EXCLUSIVE_HOOK_SET_ERROR");
}
};

View File

@@ -0,0 +1,51 @@
/**
* Exclusive hooks list endpoint
*
* GET /_emdash/api/admin/hooks/exclusive
*
* Lists all exclusive hooks with their providers and current selections.
* Requires admin role.
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, apiSuccess, handleError } from "#api/error.js";
import { OptionsRepository } from "#db/repositories/options.js";
export const prerender = false;
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "settings:manage");
if (denied) return denied;
try {
const pipeline = emdash.hooks;
const exclusiveHookNames = pipeline.getRegisteredExclusiveHooks();
const optionsRepo = new OptionsRepository(emdash.db);
const hooks = [];
for (const hookName of exclusiveHookNames) {
const providers = pipeline.getExclusiveHookProviders(hookName);
const selection = await optionsRepo.get<string>(`emdash:exclusive_hook:${hookName}`);
hooks.push({
hookName,
providers: providers.map((provider: { pluginId: string }) => ({
pluginId: provider.pluginId,
})),
selectedPluginId: selection,
});
}
return apiSuccess({ items: hooks });
} catch (error) {
return handleError(error, "Failed to list exclusive hooks", "EXCLUSIVE_HOOKS_LIST_ERROR");
}
};

View File

@@ -0,0 +1,110 @@
/**
* Single OAuth client endpoints
*
* GET /_emdash/api/admin/oauth-clients/:id — Get a client
* PUT /_emdash/api/admin/oauth-clients/:id — Update a client
* DELETE /_emdash/api/admin/oauth-clients/:id — Delete a client
*/
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { z } from "zod";
import { apiError, handleError, unwrapResult } from "#api/error.js";
import {
handleOAuthClientDelete,
handleOAuthClientGet,
handleOAuthClientUpdate,
} from "#api/handlers/oauth-clients.js";
import { isParseError, parseBody } from "#api/parse.js";
export const prerender = false;
const updateClientSchema = z.object({
name: z.string().min(1).max(255).optional(),
redirectUris: z
.array(z.string().url("Each redirect URI must be a valid URL"))
.min(1, "At least one redirect URI is required")
.optional(),
scopes: z.array(z.string()).nullable().optional(),
});
/**
* Get a single OAuth client.
*/
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const clientId = params.id;
if (!clientId) {
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
}
const result = await handleOAuthClientGet(emdash.db, clientId);
return unwrapResult(result);
};
/**
* Update an OAuth client.
*/
export const PUT: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const clientId = params.id;
if (!clientId) {
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
}
try {
const body = await parseBody(request, updateClientSchema);
if (isParseError(body)) return body;
const result = await handleOAuthClientUpdate(emdash.db, clientId, body);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to update OAuth client", "CLIENT_UPDATE_ERROR");
}
};
/**
* Delete an OAuth client.
*/
export const DELETE: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const clientId = params.id;
if (!clientId) {
return apiError("VALIDATION_ERROR", "Client ID is required", 400);
}
try {
const result = await handleOAuthClientDelete(emdash.db, clientId);
return unwrapResult(result);
} catch (error) {
return handleError(error, "Failed to delete OAuth client", "CLIENT_DELETE_ERROR");
}
};

View File

@@ -0,0 +1,71 @@
/**
* OAuth client management endpoints
*
* GET /_emdash/api/admin/oauth-clients — List all registered OAuth clients
* POST /_emdash/api/admin/oauth-clients — Register a new OAuth client
*/
import { Role } from "@emdashcms/auth";
import type { APIRoute } from "astro";
import { z } from "zod";
import { apiError, handleError, unwrapResult } from "#api/error.js";
import { handleOAuthClientCreate, handleOAuthClientList } from "#api/handlers/oauth-clients.js";
import { isParseError, parseBody } from "#api/parse.js";
export const prerender = false;
const createClientSchema = z.object({
id: z
.string()
.min(1, "Client ID is required")
.max(255, "Client ID must be at most 255 characters"),
name: z.string().min(1, "Name is required").max(255, "Name must be at most 255 characters"),
redirectUris: z
.array(z.string().url("Each redirect URI must be a valid URL"))
.min(1, "At least one redirect URI is required"),
scopes: z.array(z.string()).optional(),
});
/**
* List all registered OAuth clients.
*/
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const result = await handleOAuthClientList(emdash.db);
return unwrapResult(result);
};
/**
* Register a new OAuth client.
*/
export const POST: APIRoute = async ({ request, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
try {
const body = await parseBody(request, createClientSchema);
if (isParseError(body)) return body;
const result = await handleOAuthClientCreate(emdash.db, body);
return unwrapResult(result, 201);
} catch (error) {
return handleError(error, "Failed to create OAuth client", "CLIENT_CREATE_ERROR");
}
};

View File

@@ -0,0 +1,39 @@
/**
* Plugin disable endpoint
*
* POST /_emdash/api/admin/plugins/:id/disable - Disable a plugin
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handlePluginDisable } from "#api/index.js";
import { setCronTasksEnabled } from "#plugins/cron.js";
export const prerender = false;
export const POST: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:manage");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const result = await handlePluginDisable(emdash.db, emdash.configuredPlugins, id);
if (!result.success) return unwrapResult(result);
await emdash.setPluginStatus(id, "inactive");
await setCronTasksEnabled(emdash.db, id, false);
return unwrapResult(result);
};

View File

@@ -0,0 +1,39 @@
/**
* Plugin enable endpoint
*
* POST /_emdash/api/admin/plugins/:id/enable - Enable a plugin
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handlePluginEnable } from "#api/index.js";
import { setCronTasksEnabled } from "#plugins/cron.js";
export const prerender = false;
export const POST: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:manage");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const result = await handlePluginEnable(emdash.db, emdash.configuredPlugins, id);
if (!result.success) return unwrapResult(result);
await emdash.setPluginStatus(id, "active");
await setCronTasksEnabled(emdash.db, id, true);
return unwrapResult(result);
};

View File

@@ -0,0 +1,38 @@
/**
* Plugin management single plugin endpoint
*
* GET /_emdash/api/admin/plugins/:id - Get plugin details
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handlePluginGet } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const result = await handlePluginGet(
emdash.db,
emdash.configuredPlugins,
id,
emdash.config.marketplace,
);
return unwrapResult(result);
};

View File

@@ -0,0 +1,48 @@
/**
* Marketplace plugin uninstall endpoint
*
* POST /_emdash/api/admin/plugins/:id/uninstall - Uninstall a marketplace plugin
*/
import type { APIRoute } from "astro";
import { z } from "zod";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleMarketplaceUninstall } from "#api/index.js";
import { isParseError, parseOptionalBody } from "#api/parse.js";
export const prerender = false;
const uninstallBodySchema = z.object({
deleteData: z.boolean().optional(),
});
export const POST: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:manage");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const body = await parseOptionalBody(request, uninstallBodySchema, {});
if (isParseError(body)) return body;
const result = await handleMarketplaceUninstall(emdash.db, emdash.storage, id, {
deleteData: body.deleteData ?? false,
});
if (!result.success) return unwrapResult(result);
await emdash.syncMarketplacePlugins();
return unwrapResult(result);
};

View File

@@ -0,0 +1,59 @@
/**
* Marketplace plugin update endpoint
*
* POST /_emdash/api/admin/plugins/:id/update - Update a marketplace plugin
*/
import type { APIRoute } from "astro";
import { z } from "zod";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleMarketplaceUpdate } from "#api/index.js";
import { isParseError, parseOptionalBody } from "#api/parse.js";
export const prerender = false;
const updateBodySchema = z.object({
version: z.string().min(1).optional(),
confirmCapabilityChanges: z.boolean().optional(),
confirmRouteVisibilityChanges: z.boolean().optional(),
});
export const POST: APIRoute = async ({ params, request, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:manage");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const body = await parseOptionalBody(request, updateBodySchema, {});
if (isParseError(body)) return body;
const result = await handleMarketplaceUpdate(
emdash.db,
emdash.storage,
emdash.getSandboxRunner(),
emdash.config.marketplace,
id,
{
version: body.version,
confirmCapabilityChanges: body.confirmCapabilityChanges,
confirmRouteVisibilityChanges: body.confirmRouteVisibilityChanges,
},
);
if (!result.success) return unwrapResult(result);
await emdash.syncMarketplacePlugins();
return unwrapResult(result);
};

View File

@@ -0,0 +1,32 @@
/**
* Plugin management list endpoint
*
* GET /_emdash/api/admin/plugins - List all configured plugins with state
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handlePluginList } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const result = await handlePluginList(
emdash.db,
emdash.configuredPlugins,
emdash.config.marketplace,
);
return unwrapResult(result);
};

View File

@@ -0,0 +1,61 @@
/**
* Marketplace plugin icon proxy
*
* GET /_emdash/api/admin/plugins/marketplace/:id/icon - Proxy icon from marketplace
*
* Avoids CORS/auth issues when the marketplace Worker is behind Cloudflare Access
* or on a different origin. The admin UI uses this instead of linking directly.
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError } from "#api/error.js";
export const prerender = false;
export const GET: APIRoute = async ({ params, url, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const marketplaceUrl = emdash.config.marketplace;
if (!marketplaceUrl || !id) {
return apiError("NOT_CONFIGURED", "Marketplace not configured", 400);
}
const width = url.searchParams.get("w");
const target = new URL(`/api/v1/plugins/${encodeURIComponent(id)}/icon`, marketplaceUrl);
if (width) target.searchParams.set("w", width);
try {
const resp = await fetch(target.href);
if (!resp.ok) {
// Allowlist: only forward Content-Type from upstream.
// Never copy all upstream headers (denylist approach leaks
// headers we haven't anticipated).
return new Response(resp.body, {
status: resp.status,
headers: {
"Content-Type": resp.headers.get("Content-Type") ?? "application/octet-stream",
"Cache-Control": "private, no-store",
},
});
}
return new Response(resp.body, {
headers: {
"Content-Type": resp.headers.get("Content-Type") ?? "image/png",
"Cache-Control": "private, no-store",
},
});
} catch {
return apiError("PROXY_ERROR", "Failed to fetch icon", 502);
}
};

View File

@@ -0,0 +1,33 @@
/**
* Marketplace plugin detail proxy endpoint
*
* GET /_emdash/api/admin/plugins/marketplace/:id - Get plugin details
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleMarketplaceGetPlugin } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const result = await handleMarketplaceGetPlugin(emdash.config.marketplace, id);
return unwrapResult(result);
};

View File

@@ -0,0 +1,62 @@
/**
* Marketplace plugin install endpoint
*
* POST /_emdash/api/admin/plugins/marketplace/:id/install - Install a marketplace plugin
*/
import type { APIRoute } from "astro";
import { z } from "zod";
import { requirePerm } from "#api/authorize.js";
import { apiError, handleError, unwrapResult } from "#api/error.js";
import { handleMarketplaceInstall } from "#api/index.js";
import { isParseError, parseOptionalBody } from "#api/parse.js";
export const prerender = false;
const installBodySchema = z.object({
version: z.string().min(1).optional(),
});
export const POST: APIRoute = async ({ params, request, locals }) => {
try {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:manage");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Plugin ID required", 400);
}
const body = await parseOptionalBody(request, installBodySchema, {});
if (isParseError(body)) return body;
const configuredPluginIds = new Set<string>(
emdash.configuredPlugins.map((p: { id: string }) => p.id),
);
const result = await handleMarketplaceInstall(
emdash.db,
emdash.storage,
emdash.getSandboxRunner(),
emdash.config.marketplace,
id,
{ version: body.version, configuredPluginIds },
);
if (!result.success) return unwrapResult(result);
await emdash.syncMarketplacePlugins();
return unwrapResult(result, 201);
} catch (error) {
console.error("[marketplace-install] Unhandled error:", error);
return handleError(error, "Failed to install plugin from marketplace", "INSTALL_FAILED");
}
};

View File

@@ -0,0 +1,38 @@
/**
* Marketplace search proxy endpoint
*
* GET /_emdash/api/admin/plugins/marketplace - Search marketplace plugins
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleMarketplaceSearch } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ url, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const query = url.searchParams.get("q") ?? undefined;
const category = url.searchParams.get("category") ?? undefined;
const cursor = url.searchParams.get("cursor") ?? undefined;
const limitParam = url.searchParams.get("limit");
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam, 10) || 50), 100) : undefined;
const result = await handleMarketplaceSearch(emdash.config.marketplace, query, {
category,
cursor,
limit,
});
return unwrapResult(result);
};

View File

@@ -0,0 +1,28 @@
/**
* Marketplace update check endpoint
*
* GET /_emdash/api/admin/plugins/updates - Check for marketplace plugin updates
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleMarketplaceUpdateCheck } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const result = await handleMarketplaceUpdateCheck(emdash.db, emdash.config.marketplace);
return unwrapResult(result);
};

View File

@@ -0,0 +1,33 @@
/**
* Theme marketplace detail proxy endpoint
*
* GET /_emdash/api/admin/themes/marketplace/:id - Get theme details
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleThemeGetDetail } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
if (!id) {
return apiError("INVALID_REQUEST", "Theme ID required", 400);
}
const result = await handleThemeGetDetail(emdash.config.marketplace, id);
return unwrapResult(result);
};

View File

@@ -0,0 +1,61 @@
/**
* Theme marketplace thumbnail proxy
*
* GET /_emdash/api/admin/themes/marketplace/:id/thumbnail - Proxy thumbnail from marketplace
*
* Avoids CORS/auth issues when the marketplace Worker is behind Cloudflare Access
* or on a different origin. The admin UI uses this instead of linking directly.
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError } from "#api/error.js";
export const prerender = false;
export const GET: APIRoute = async ({ params, url, locals }) => {
const { emdash, user } = locals;
const { id } = params;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const marketplaceUrl = emdash.config.marketplace;
if (!marketplaceUrl || !id) {
return apiError("NOT_CONFIGURED", "Marketplace not configured", 400);
}
const width = url.searchParams.get("w");
const target = new URL(`/api/v1/themes/${encodeURIComponent(id)}/thumbnail`, marketplaceUrl);
if (width) target.searchParams.set("w", width);
try {
const resp = await fetch(target.href);
if (!resp.ok) {
// Allowlist: only forward Content-Type from upstream.
// Never copy all upstream headers (denylist approach leaks
// headers we haven't anticipated).
return new Response(resp.body, {
status: resp.status,
headers: {
"Content-Type": resp.headers.get("Content-Type") ?? "application/octet-stream",
"Cache-Control": "private, no-store",
},
});
}
return new Response(resp.body, {
headers: {
"Content-Type": resp.headers.get("Content-Type") ?? "image/png",
"Cache-Control": "private, no-store",
},
});
} catch {
return apiError("PROXY_ERROR", "Failed to fetch thumbnail", 502);
}
};

View File

@@ -0,0 +1,45 @@
/**
* Theme marketplace search proxy endpoint
*
* GET /_emdash/api/admin/themes/marketplace - Search marketplace themes
*/
import type { APIRoute } from "astro";
import { requirePerm } from "#api/authorize.js";
import { apiError, unwrapResult } from "#api/error.js";
import { handleThemeSearch } from "#api/index.js";
export const prerender = false;
export const GET: APIRoute = async ({ url, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "EmDash is not initialized", 500);
}
const denied = requirePerm(user, "plugins:read");
if (denied) return denied;
const query = url.searchParams.get("q") ?? undefined;
const keyword = url.searchParams.get("keyword") ?? undefined;
const sortParam = url.searchParams.get("sort");
const validSorts = new Set(["name", "created", "updated"]);
let sort: "name" | "created" | "updated" | undefined;
if (sortParam && validSorts.has(sortParam)) {
sort = sortParam as "name" | "created" | "updated"; // eslint-disable-line typescript-eslint(no-unsafe-type-assertion) -- validated by Set.has()
}
const cursor = url.searchParams.get("cursor") ?? undefined;
const limitParam = url.searchParams.get("limit");
const limit = limitParam ? Math.min(Math.max(1, parseInt(limitParam, 10) || 50), 100) : undefined;
const result = await handleThemeSearch(emdash.config.marketplace, query, {
keyword,
sort,
cursor,
limit,
});
return unwrapResult(result);
};

View File

@@ -0,0 +1,69 @@
/**
* User disable endpoint
*
* POST /_emdash/api/admin/users/:id/disable - Soft-disable a user
*/
import { Role } from "@emdashcms/auth";
import { createKyselyAdapter } from "@emdashcms/auth/adapters/kysely";
import type { APIRoute } from "astro";
import { apiError, apiSuccess, handleError } from "#api/error.js";
export const prerender = false;
export const POST: APIRoute = async ({ params, locals }) => {
const { emdash, user: currentUser } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!currentUser || currentUser.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
const { id } = params;
if (!id) {
return apiError("VALIDATION_ERROR", "User ID required", 400);
}
// Prevent disabling self
if (id === currentUser.id) {
return apiError("VALIDATION_ERROR", "Cannot disable your own account", 400);
}
try {
// Get target user
const targetUser = await adapter.getUserById(id);
if (!targetUser) {
return apiError("NOT_FOUND", "User not found", 404);
}
// Check if this would leave no active admins
if (targetUser.role === Role.ADMIN) {
const adminCount = await adapter.countAdmins();
if (adminCount <= 1) {
return apiError(
"VALIDATION_ERROR",
"Cannot disable the last admin. Promote another user first.",
400,
);
}
}
// Disable user
await adapter.updateUser(id, { disabled: true });
// SEC-43: Revoke all OAuth tokens for the disabled user.
// Without this, existing refresh tokens remain valid for up to 90 days.
await emdash.db.deleteFrom("_emdash_oauth_tokens").where("user_id", "=", id).execute();
return apiSuccess({ success: true });
} catch (error) {
return handleError(error, "Failed to disable user", "USER_DISABLE_ERROR");
}
};

View File

@@ -0,0 +1,48 @@
/**
* User enable endpoint
*
* POST /_emdash/api/admin/users/:id/enable - Re-enable a disabled user
*/
import { Role } from "@emdashcms/auth";
import { createKyselyAdapter } from "@emdashcms/auth/adapters/kysely";
import type { APIRoute } from "astro";
import { apiError, apiSuccess, handleError } from "#api/error.js";
export const prerender = false;
export const POST: APIRoute = async ({ params, locals }) => {
const { emdash, user } = locals;
if (!emdash?.db) {
return apiError("NOT_CONFIGURED", "Database not configured", 500);
}
if (!user || user.role < Role.ADMIN) {
return apiError("FORBIDDEN", "Admin privileges required", 403);
}
const adapter = createKyselyAdapter(emdash.db);
const { id } = params;
if (!id) {
return apiError("VALIDATION_ERROR", "User ID required", 400);
}
try {
// Get target user
const targetUser = await adapter.getUserById(id);
if (!targetUser) {
return apiError("NOT_FOUND", "User not found", 404);
}
// Enable user
await adapter.updateUser(id, { disabled: false });
return apiSuccess({ success: true });
} catch (error) {
return handleError(error, "Failed to enable user", "USER_ENABLE_ERROR");
}
};

Some files were not shown because too many files have changed in this diff Show More